blob: cd49a8d738ac528bdab587aff3c3f7b8fe011b18 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
33#include "zone-inl.h"
34
35
36namespace v8 {
37namespace internal {
38
39// Defines all the roots in Heap.
40#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
41 /* Cluster the most popular ones in a few cache lines here at the top. */ \
42 V(Smi, stack_limit, StackLimit) \
43 V(Object, undefined_value, UndefinedValue) \
44 V(Object, the_hole_value, TheHoleValue) \
45 V(Object, null_value, NullValue) \
46 V(Object, true_value, TrueValue) \
47 V(Object, false_value, FalseValue) \
48 V(Map, heap_number_map, HeapNumberMap) \
49 V(Map, global_context_map, GlobalContextMap) \
50 V(Map, fixed_array_map, FixedArrayMap) \
51 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
52 V(Map, meta_map, MetaMap) \
53 V(Object, termination_exception, TerminationException) \
54 V(Map, hash_table_map, HashTableMap) \
55 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
56 V(Map, short_string_map, ShortStringMap) \
57 V(Map, medium_string_map, MediumStringMap) \
58 V(Map, long_string_map, LongStringMap) \
59 V(Map, short_ascii_string_map, ShortAsciiStringMap) \
60 V(Map, medium_ascii_string_map, MediumAsciiStringMap) \
61 V(Map, long_ascii_string_map, LongAsciiStringMap) \
62 V(Map, short_symbol_map, ShortSymbolMap) \
63 V(Map, medium_symbol_map, MediumSymbolMap) \
64 V(Map, long_symbol_map, LongSymbolMap) \
65 V(Map, short_ascii_symbol_map, ShortAsciiSymbolMap) \
66 V(Map, medium_ascii_symbol_map, MediumAsciiSymbolMap) \
67 V(Map, long_ascii_symbol_map, LongAsciiSymbolMap) \
68 V(Map, short_cons_symbol_map, ShortConsSymbolMap) \
69 V(Map, medium_cons_symbol_map, MediumConsSymbolMap) \
70 V(Map, long_cons_symbol_map, LongConsSymbolMap) \
71 V(Map, short_cons_ascii_symbol_map, ShortConsAsciiSymbolMap) \
72 V(Map, medium_cons_ascii_symbol_map, MediumConsAsciiSymbolMap) \
73 V(Map, long_cons_ascii_symbol_map, LongConsAsciiSymbolMap) \
74 V(Map, short_sliced_symbol_map, ShortSlicedSymbolMap) \
75 V(Map, medium_sliced_symbol_map, MediumSlicedSymbolMap) \
76 V(Map, long_sliced_symbol_map, LongSlicedSymbolMap) \
77 V(Map, short_sliced_ascii_symbol_map, ShortSlicedAsciiSymbolMap) \
78 V(Map, medium_sliced_ascii_symbol_map, MediumSlicedAsciiSymbolMap) \
79 V(Map, long_sliced_ascii_symbol_map, LongSlicedAsciiSymbolMap) \
80 V(Map, short_external_symbol_map, ShortExternalSymbolMap) \
81 V(Map, medium_external_symbol_map, MediumExternalSymbolMap) \
82 V(Map, long_external_symbol_map, LongExternalSymbolMap) \
83 V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \
84 V(Map, medium_external_ascii_symbol_map, MediumExternalAsciiSymbolMap) \
85 V(Map, long_external_ascii_symbol_map, LongExternalAsciiSymbolMap) \
86 V(Map, short_cons_string_map, ShortConsStringMap) \
87 V(Map, medium_cons_string_map, MediumConsStringMap) \
88 V(Map, long_cons_string_map, LongConsStringMap) \
89 V(Map, short_cons_ascii_string_map, ShortConsAsciiStringMap) \
90 V(Map, medium_cons_ascii_string_map, MediumConsAsciiStringMap) \
91 V(Map, long_cons_ascii_string_map, LongConsAsciiStringMap) \
92 V(Map, short_sliced_string_map, ShortSlicedStringMap) \
93 V(Map, medium_sliced_string_map, MediumSlicedStringMap) \
94 V(Map, long_sliced_string_map, LongSlicedStringMap) \
95 V(Map, short_sliced_ascii_string_map, ShortSlicedAsciiStringMap) \
96 V(Map, medium_sliced_ascii_string_map, MediumSlicedAsciiStringMap) \
97 V(Map, long_sliced_ascii_string_map, LongSlicedAsciiStringMap) \
98 V(Map, short_external_string_map, ShortExternalStringMap) \
99 V(Map, medium_external_string_map, MediumExternalStringMap) \
100 V(Map, long_external_string_map, LongExternalStringMap) \
101 V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
102 V(Map, medium_external_ascii_string_map, MediumExternalAsciiStringMap) \
103 V(Map, long_external_ascii_string_map, LongExternalAsciiStringMap) \
104 V(Map, undetectable_short_string_map, UndetectableShortStringMap) \
105 V(Map, undetectable_medium_string_map, UndetectableMediumStringMap) \
106 V(Map, undetectable_long_string_map, UndetectableLongStringMap) \
107 V(Map, undetectable_short_ascii_string_map, UndetectableShortAsciiStringMap) \
108 V(Map, \
109 undetectable_medium_ascii_string_map, \
110 UndetectableMediumAsciiStringMap) \
111 V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap) \
112 V(Map, byte_array_map, ByteArrayMap) \
113 V(Map, pixel_array_map, PixelArrayMap) \
Steve Block3ce2e202009-11-05 08:53:23 +0000114 V(Map, external_byte_array_map, ExternalByteArrayMap) \
115 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
116 V(Map, external_short_array_map, ExternalShortArrayMap) \
117 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
118 V(Map, external_int_array_map, ExternalIntArrayMap) \
119 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
120 V(Map, external_float_array_map, ExternalFloatArrayMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000121 V(Map, context_map, ContextMap) \
122 V(Map, catch_context_map, CatchContextMap) \
123 V(Map, code_map, CodeMap) \
124 V(Map, oddball_map, OddballMap) \
125 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
126 V(Map, boilerplate_function_map, BoilerplateFunctionMap) \
127 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
128 V(Map, proxy_map, ProxyMap) \
129 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
130 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
131 V(Object, nan_value, NanValue) \
132 V(Object, minus_zero_value, MinusZeroValue) \
133 V(String, empty_string, EmptyString) \
134 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
135 V(Map, neander_map, NeanderMap) \
136 V(JSObject, message_listeners, MessageListeners) \
137 V(Proxy, prototype_accessors, PrototypeAccessors) \
138 V(NumberDictionary, code_stubs, CodeStubs) \
139 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
140 V(Code, js_entry_code, JsEntryCode) \
141 V(Code, js_construct_entry_code, JsConstructEntryCode) \
142 V(Code, c_entry_code, CEntryCode) \
143 V(Code, c_entry_debug_break_code, CEntryDebugBreakCode) \
144 V(FixedArray, number_string_cache, NumberStringCache) \
145 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
146 V(FixedArray, natives_source_cache, NativesSourceCache) \
147 V(Object, last_script_id, LastScriptId) \
148
149#if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
150#define STRONG_ROOT_LIST(V) \
151 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
152 V(Code, re_c_entry_code, RegExpCEntryCode)
153#else
154#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
155#endif
156
157#define ROOT_LIST(V) \
158 STRONG_ROOT_LIST(V) \
159 V(SymbolTable, symbol_table, SymbolTable)
160
161#define SYMBOL_LIST(V) \
162 V(Array_symbol, "Array") \
163 V(Object_symbol, "Object") \
164 V(Proto_symbol, "__proto__") \
165 V(StringImpl_symbol, "StringImpl") \
166 V(arguments_symbol, "arguments") \
167 V(Arguments_symbol, "Arguments") \
168 V(arguments_shadow_symbol, ".arguments") \
169 V(call_symbol, "call") \
170 V(apply_symbol, "apply") \
171 V(caller_symbol, "caller") \
172 V(boolean_symbol, "boolean") \
173 V(Boolean_symbol, "Boolean") \
174 V(callee_symbol, "callee") \
175 V(constructor_symbol, "constructor") \
176 V(code_symbol, ".code") \
177 V(result_symbol, ".result") \
178 V(catch_var_symbol, ".catch-var") \
179 V(empty_symbol, "") \
180 V(eval_symbol, "eval") \
181 V(function_symbol, "function") \
182 V(length_symbol, "length") \
183 V(name_symbol, "name") \
184 V(number_symbol, "number") \
185 V(Number_symbol, "Number") \
186 V(RegExp_symbol, "RegExp") \
187 V(object_symbol, "object") \
188 V(prototype_symbol, "prototype") \
189 V(string_symbol, "string") \
190 V(String_symbol, "String") \
191 V(Date_symbol, "Date") \
192 V(this_symbol, "this") \
193 V(to_string_symbol, "toString") \
194 V(char_at_symbol, "CharAt") \
195 V(undefined_symbol, "undefined") \
196 V(value_of_symbol, "valueOf") \
197 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
198 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
199 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
200 V(illegal_access_symbol, "illegal access") \
201 V(out_of_memory_symbol, "out-of-memory") \
202 V(illegal_execution_state_symbol, "illegal execution state") \
203 V(get_symbol, "get") \
204 V(set_symbol, "set") \
205 V(function_class_symbol, "Function") \
206 V(illegal_argument_symbol, "illegal argument") \
207 V(MakeReferenceError_symbol, "MakeReferenceError") \
208 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
209 V(MakeTypeError_symbol, "MakeTypeError") \
210 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
211 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
212 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
213 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
214 V(illegal_return_symbol, "illegal_return") \
215 V(illegal_break_symbol, "illegal_break") \
216 V(illegal_continue_symbol, "illegal_continue") \
217 V(unknown_label_symbol, "unknown_label") \
218 V(redeclaration_symbol, "redeclaration") \
219 V(failure_symbol, "<failure>") \
220 V(space_symbol, " ") \
221 V(exec_symbol, "exec") \
222 V(zero_symbol, "0") \
223 V(global_eval_symbol, "GlobalEval") \
224 V(identity_hash_symbol, "v8::IdentityHash")
225
226
227// Forward declaration of the GCTracer class.
228class GCTracer;
229
230
231// The all static Heap captures the interface to the global object heap.
232// All JavaScript contexts by this process share the same object heap.
233
234class Heap : public AllStatic {
235 public:
236 // Configure heap size before setup. Return false if the heap has been
237 // setup already.
Steve Block3ce2e202009-11-05 08:53:23 +0000238 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000239 static bool ConfigureHeapDefault();
240
241 // Initializes the global object heap. If create_heap_objects is true,
242 // also creates the basic non-mutable objects.
243 // Returns whether it succeeded.
244 static bool Setup(bool create_heap_objects);
245
246 // Destroys all memory allocated by the heap.
247 static void TearDown();
248
249 // Sets the stack limit in the roots_ array. Some architectures generate code
250 // that looks here, because it is faster than loading from the static jslimit_
251 // variable.
252 static void SetStackLimit(intptr_t limit);
253
254 // Returns whether Setup has been called.
255 static bool HasBeenSetup();
256
Steve Block3ce2e202009-11-05 08:53:23 +0000257 // Returns the maximum amount of memory reserved for the heap. For
258 // the young generation, we reserve 4 times the amount needed for a
259 // semi space. The young generation consists of two semi spaces and
260 // we reserve twice the amount needed for those in order to ensure
261 // that new space can be aligned to its size.
262 static int MaxReserved() {
263 return 4 * reserved_semispace_size_ + max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000264 }
Steve Block3ce2e202009-11-05 08:53:23 +0000265 static int MaxSemiSpaceSize() { return max_semispace_size_; }
266 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000267 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
Steve Block3ce2e202009-11-05 08:53:23 +0000268 static int MaxOldGenerationSize() { return max_old_generation_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000269
270 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
271 // more spaces are needed until it reaches the limit.
272 static int Capacity();
273
Steve Block3ce2e202009-11-05 08:53:23 +0000274 // Returns the amount of memory currently committed for the heap.
275 static int CommittedMemory();
276
Steve Blocka7e24c12009-10-30 11:49:00 +0000277 // Returns the available bytes in space w/o growing.
278 // Heap doesn't guarantee that it can allocate an object that requires
279 // all available bytes. Check MaxHeapObjectSize() instead.
280 static int Available();
281
282 // Returns the maximum object size in paged space.
283 static inline int MaxObjectSizeInPagedSpace();
284
285 // Returns of size of all objects residing in the heap.
286 static int SizeOfObjects();
287
288 // Return the starting address and a mask for the new space. And-masking an
289 // address with the mask will result in the start address of the new space
290 // for all addresses in either semispace.
291 static Address NewSpaceStart() { return new_space_.start(); }
292 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
293 static Address NewSpaceTop() { return new_space_.top(); }
294
295 static NewSpace* new_space() { return &new_space_; }
296 static OldSpace* old_pointer_space() { return old_pointer_space_; }
297 static OldSpace* old_data_space() { return old_data_space_; }
298 static OldSpace* code_space() { return code_space_; }
299 static MapSpace* map_space() { return map_space_; }
300 static CellSpace* cell_space() { return cell_space_; }
301 static LargeObjectSpace* lo_space() { return lo_space_; }
302
303 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
304 static Address always_allocate_scope_depth_address() {
305 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
306 }
307
308 static Address* NewSpaceAllocationTopAddress() {
309 return new_space_.allocation_top_address();
310 }
311 static Address* NewSpaceAllocationLimitAddress() {
312 return new_space_.allocation_limit_address();
313 }
314
315 // Uncommit unused semi space.
316 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
317
318#ifdef ENABLE_HEAP_PROTECTION
319 // Protect/unprotect the heap by marking all spaces read-only/writable.
320 static void Protect();
321 static void Unprotect();
322#endif
323
324 // Allocates and initializes a new JavaScript object based on a
325 // constructor.
326 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
327 // failed.
328 // Please note this does not perform a garbage collection.
329 static Object* AllocateJSObject(JSFunction* constructor,
330 PretenureFlag pretenure = NOT_TENURED);
331
332 // Allocates and initializes a new global object based on a constructor.
333 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
334 // failed.
335 // Please note this does not perform a garbage collection.
336 static Object* AllocateGlobalObject(JSFunction* constructor);
337
338 // Returns a deep copy of the JavaScript object.
339 // Properties and elements are copied too.
340 // Returns failure if allocation failed.
341 static Object* CopyJSObject(JSObject* source);
342
343 // Allocates the function prototype.
344 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
345 // failed.
346 // Please note this does not perform a garbage collection.
347 static Object* AllocateFunctionPrototype(JSFunction* function);
348
349 // Reinitialize an JSGlobalProxy based on a constructor. The object
350 // must have the same size as objects allocated using the
351 // constructor. The object is reinitialized and behaves as an
352 // object that has been freshly allocated using the constructor.
353 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
354 JSGlobalProxy* global);
355
356 // Allocates and initializes a new JavaScript object based on a map.
357 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
358 // failed.
359 // Please note this does not perform a garbage collection.
360 static Object* AllocateJSObjectFromMap(Map* map,
361 PretenureFlag pretenure = NOT_TENURED);
362
363 // Allocates a heap object based on the map.
364 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
365 // failed.
366 // Please note this function does not perform a garbage collection.
367 static Object* Allocate(Map* map, AllocationSpace space);
368
369 // Allocates a JS Map in the heap.
370 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
371 // failed.
372 // Please note this function does not perform a garbage collection.
373 static Object* AllocateMap(InstanceType instance_type, int instance_size);
374
375 // Allocates a partial map for bootstrapping.
376 static Object* AllocatePartialMap(InstanceType instance_type,
377 int instance_size);
378
379 // Allocate a map for the specified function
380 static Object* AllocateInitialMap(JSFunction* fun);
381
382 // Allocates and fully initializes a String. There are two String
383 // encodings: ASCII and two byte. One should choose between the three string
384 // allocation functions based on the encoding of the string buffer used to
385 // initialized the string.
386 // - ...FromAscii initializes the string from a buffer that is ASCII
387 // encoded (it does not check that the buffer is ASCII encoded) and the
388 // result will be ASCII encoded.
389 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
390 // encoded. If the characters are all single-byte characters, the
391 // result will be ASCII encoded, otherwise it will converted to two
392 // byte.
393 // - ...FromTwoByte initializes the string from a buffer that is two-byte
394 // encoded. If the characters are all single-byte characters, the
395 // result will be converted to ASCII, otherwise it will be left as
396 // two-byte.
397 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
398 // failed.
399 // Please note this does not perform a garbage collection.
400 static Object* AllocateStringFromAscii(
401 Vector<const char> str,
402 PretenureFlag pretenure = NOT_TENURED);
403 static Object* AllocateStringFromUtf8(
404 Vector<const char> str,
405 PretenureFlag pretenure = NOT_TENURED);
406 static Object* AllocateStringFromTwoByte(
407 Vector<const uc16> str,
408 PretenureFlag pretenure = NOT_TENURED);
409
410 // Allocates a symbol in old space based on the character stream.
411 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
412 // failed.
413 // Please note this function does not perform a garbage collection.
414 static inline Object* AllocateSymbol(Vector<const char> str,
415 int chars,
416 uint32_t length_field);
417
418 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
419 int chars,
420 uint32_t length_field);
421
422 static Object* AllocateExternalSymbol(Vector<const char> str,
423 int chars);
424
425
426 // Allocates and partially initializes a String. There are two String
427 // encodings: ASCII and two byte. These functions allocate a string of the
428 // given length and set its map and length fields. The characters of the
429 // string are uninitialized.
430 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
431 // failed.
432 // Please note this does not perform a garbage collection.
433 static Object* AllocateRawAsciiString(
434 int length,
435 PretenureFlag pretenure = NOT_TENURED);
436 static Object* AllocateRawTwoByteString(
437 int length,
438 PretenureFlag pretenure = NOT_TENURED);
439
440 // Computes a single character string where the character has code.
441 // A cache is used for ascii codes.
442 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
443 // failed. Please note this does not perform a garbage collection.
444 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
445
446 // Allocate a byte array of the specified length
447 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
448 // failed.
449 // Please note this does not perform a garbage collection.
450 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
451
452 // Allocate a non-tenured byte array of the specified length
453 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
454 // failed.
455 // Please note this does not perform a garbage collection.
456 static Object* AllocateByteArray(int length);
457
458 // Allocate a pixel array of the specified length
459 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
460 // failed.
461 // Please note this does not perform a garbage collection.
462 static Object* AllocatePixelArray(int length,
463 uint8_t* external_pointer,
464 PretenureFlag pretenure);
465
Steve Block3ce2e202009-11-05 08:53:23 +0000466 // Allocates an external array of the specified length and type.
467 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
468 // failed.
469 // Please note this does not perform a garbage collection.
470 static Object* AllocateExternalArray(int length,
471 ExternalArrayType array_type,
472 void* external_pointer,
473 PretenureFlag pretenure);
474
Steve Blocka7e24c12009-10-30 11:49:00 +0000475 // Allocate a tenured JS global property cell.
476 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
477 // failed.
478 // Please note this does not perform a garbage collection.
479 static Object* AllocateJSGlobalPropertyCell(Object* value);
480
481 // Allocates a fixed array initialized with undefined values
482 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
483 // failed.
484 // Please note this does not perform a garbage collection.
485 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
486 // Allocate uninitialized, non-tenured fixed array with length elements.
487 static Object* AllocateFixedArray(int length);
488
489 // Make a copy of src and return it. Returns
490 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
491 static Object* CopyFixedArray(FixedArray* src);
492
493 // Allocates a fixed array initialized with the hole values.
494 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
495 // failed.
496 // Please note this does not perform a garbage collection.
497 static Object* AllocateFixedArrayWithHoles(int length);
498
499 // AllocateHashTable is identical to AllocateFixedArray except
500 // that the resulting object has hash_table_map as map.
501 static Object* AllocateHashTable(int length);
502
503 // Allocate a global (but otherwise uninitialized) context.
504 static Object* AllocateGlobalContext();
505
506 // Allocate a function context.
507 static Object* AllocateFunctionContext(int length, JSFunction* closure);
508
509 // Allocate a 'with' context.
510 static Object* AllocateWithContext(Context* previous,
511 JSObject* extension,
512 bool is_catch_context);
513
514 // Allocates a new utility object in the old generation.
515 static Object* AllocateStruct(InstanceType type);
516
517 // Allocates a function initialized with a shared part.
518 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
519 // failed.
520 // Please note this does not perform a garbage collection.
521 static Object* AllocateFunction(Map* function_map,
522 SharedFunctionInfo* shared,
523 Object* prototype);
524
525 // Indicies for direct access into argument objects.
526 static const int arguments_callee_index = 0;
527 static const int arguments_length_index = 1;
528
529 // Allocates an arguments object - optionally with an elements array.
530 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
531 // failed.
532 // Please note this does not perform a garbage collection.
533 static Object* AllocateArgumentsObject(Object* callee, int length);
534
535 // Converts a double into either a Smi or a HeapNumber object.
536 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
537 // failed.
538 // Please note this does not perform a garbage collection.
539 static Object* NewNumberFromDouble(double value,
540 PretenureFlag pretenure = NOT_TENURED);
541
542 // Same as NewNumberFromDouble, but may return a preallocated/immutable
543 // number object (e.g., minus_zero_value_, nan_value_)
544 static Object* NumberFromDouble(double value,
545 PretenureFlag pretenure = NOT_TENURED);
546
547 // Allocated a HeapNumber from value.
548 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
549 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
550
551 // Converts an int into either a Smi or a HeapNumber object.
552 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
553 // failed.
554 // Please note this does not perform a garbage collection.
555 static inline Object* NumberFromInt32(int32_t value);
556
557 // Converts an int into either a Smi or a HeapNumber object.
558 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
559 // failed.
560 // Please note this does not perform a garbage collection.
561 static inline Object* NumberFromUint32(uint32_t value);
562
563 // Allocates a new proxy object.
564 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
565 // failed.
566 // Please note this does not perform a garbage collection.
567 static Object* AllocateProxy(Address proxy,
568 PretenureFlag pretenure = NOT_TENURED);
569
570 // Allocates a new SharedFunctionInfo object.
571 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
572 // failed.
573 // Please note this does not perform a garbage collection.
574 static Object* AllocateSharedFunctionInfo(Object* name);
575
576 // Allocates a new cons string object.
577 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
578 // failed.
579 // Please note this does not perform a garbage collection.
580 static Object* AllocateConsString(String* first, String* second);
581
582 // Allocates a new sliced string object which is a slice of an underlying
583 // string buffer stretching from the index start (inclusive) to the index
584 // end (exclusive).
585 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
586 // failed.
587 // Please note this does not perform a garbage collection.
588 static Object* AllocateSlicedString(String* buffer,
589 int start,
590 int end);
591
592 // Allocates a new sub string object which is a substring of an underlying
593 // string buffer stretching from the index start (inclusive) to the index
594 // end (exclusive).
595 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
596 // failed.
597 // Please note this does not perform a garbage collection.
598 static Object* AllocateSubString(String* buffer,
599 int start,
600 int end);
601
602 // Allocate a new external string object, which is backed by a string
603 // resource that resides outside the V8 heap.
604 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
605 // failed.
606 // Please note this does not perform a garbage collection.
607 static Object* AllocateExternalStringFromAscii(
608 ExternalAsciiString::Resource* resource);
609 static Object* AllocateExternalStringFromTwoByte(
610 ExternalTwoByteString::Resource* resource);
611
612 // Allocates an uninitialized object. The memory is non-executable if the
613 // hardware and OS allow.
614 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
615 // failed.
616 // Please note this function does not perform a garbage collection.
617 static inline Object* AllocateRaw(int size_in_bytes,
618 AllocationSpace space,
619 AllocationSpace retry_space);
620
621 // Initialize a filler object to keep the ability to iterate over the heap
622 // when shortening objects.
623 static void CreateFillerObjectAt(Address addr, int size);
624
625 // Makes a new native code object
626 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
627 // failed. On success, the pointer to the Code object is stored in the
628 // self_reference. This allows generated code to reference its own Code
629 // object by containing this pointer.
630 // Please note this function does not perform a garbage collection.
631 static Object* CreateCode(const CodeDesc& desc,
632 ZoneScopeInfo* sinfo,
633 Code::Flags flags,
634 Handle<Object> self_reference);
635
636 static Object* CopyCode(Code* code);
637 // Finds the symbol for string in the symbol table.
638 // If not found, a new symbol is added to the table and returned.
639 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
640 // failed.
641 // Please note this function does not perform a garbage collection.
642 static Object* LookupSymbol(Vector<const char> str);
643 static Object* LookupAsciiSymbol(const char* str) {
644 return LookupSymbol(CStrVector(str));
645 }
646 static Object* LookupSymbol(String* str);
647 static bool LookupSymbolIfExists(String* str, String** symbol);
648
649 // Compute the matching symbol map for a string if possible.
650 // NULL is returned if string is in new space or not flattened.
651 static Map* SymbolMapForString(String* str);
652
653 // Converts the given boolean condition to JavaScript boolean value.
654 static Object* ToBoolean(bool condition) {
655 return condition ? true_value() : false_value();
656 }
657
658 // Code that should be run before and after each GC. Includes some
659 // reporting/verification activities when compiled with DEBUG set.
660 static void GarbageCollectionPrologue();
661 static void GarbageCollectionEpilogue();
662
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 // Performs garbage collection operation.
664 // Returns whether required_space bytes are available after the collection.
665 static bool CollectGarbage(int required_space, AllocationSpace space);
666
667 // Performs a full garbage collection. Force compaction if the
668 // parameter is true.
669 static void CollectAllGarbage(bool force_compaction);
670
671 // Performs a full garbage collection if a context has been disposed
672 // since the last time the check was performed.
673 static void CollectAllGarbageIfContextDisposed();
674
675 // Notify the heap that a context has been disposed.
676 static void NotifyContextDisposed();
677
678 // Utility to invoke the scavenger. This is needed in test code to
679 // ensure correct callback for weak global handles.
680 static void PerformScavenge();
681
682#ifdef DEBUG
683 // Utility used with flag gc-greedy.
684 static bool GarbageCollectionGreedyCheck();
685#endif
686
687 static void SetGlobalGCPrologueCallback(GCCallback callback) {
688 global_gc_prologue_callback_ = callback;
689 }
690 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
691 global_gc_epilogue_callback_ = callback;
692 }
693
694 // Heap root getters. We have versions with and without type::cast() here.
695 // You can't use type::cast during GC because the assert fails.
696#define ROOT_ACCESSOR(type, name, camel_name) \
697 static inline type* name() { \
698 return type::cast(roots_[k##camel_name##RootIndex]); \
699 } \
700 static inline type* raw_unchecked_##name() { \
701 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
702 }
703 ROOT_LIST(ROOT_ACCESSOR)
704#undef ROOT_ACCESSOR
705
706// Utility type maps
707#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
708 static inline Map* name##_map() { \
709 return Map::cast(roots_[k##Name##MapRootIndex]); \
710 }
711 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
712#undef STRUCT_MAP_ACCESSOR
713
714#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
715 return String::cast(roots_[k##name##RootIndex]); \
716 }
717 SYMBOL_LIST(SYMBOL_ACCESSOR)
718#undef SYMBOL_ACCESSOR
719
720 // The hidden_symbol is special because it is the empty string, but does
721 // not match the empty string.
722 static String* hidden_symbol() { return hidden_symbol_; }
723
724 // Iterates over all roots in the heap.
725 static void IterateRoots(ObjectVisitor* v);
726 // Iterates over all strong roots in the heap.
727 static void IterateStrongRoots(ObjectVisitor* v);
728
729 // Iterates remembered set of an old space.
730 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
731
732 // Iterates a range of remembered set addresses starting with rset_start
733 // corresponding to the range of allocated pointers
734 // [object_start, object_end).
735 // Returns the number of bits that were set.
736 static int IterateRSetRange(Address object_start,
737 Address object_end,
738 Address rset_start,
739 ObjectSlotCallback copy_object_func);
740
741 // Returns whether the object resides in new space.
742 static inline bool InNewSpace(Object* object);
743 static inline bool InFromSpace(Object* object);
744 static inline bool InToSpace(Object* object);
745
746 // Checks whether an address/object in the heap (including auxiliary
747 // area and unused area).
748 static bool Contains(Address addr);
749 static bool Contains(HeapObject* value);
750
751 // Checks whether an address/object in a space.
752 // Currently used by tests and heap verification only.
753 static bool InSpace(Address addr, AllocationSpace space);
754 static bool InSpace(HeapObject* value, AllocationSpace space);
755
756 // Finds out which space an object should get promoted to based on its type.
757 static inline OldSpace* TargetSpace(HeapObject* object);
758 static inline AllocationSpace TargetSpaceId(InstanceType type);
759
760 // Sets the stub_cache_ (only used when expanding the dictionary).
761 static void public_set_code_stubs(NumberDictionary* value) {
762 roots_[kCodeStubsRootIndex] = value;
763 }
764
765 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
766 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
767 roots_[kNonMonomorphicCacheRootIndex] = value;
768 }
769
770 // Update the next script id.
771 static inline void SetLastScriptId(Object* last_script_id);
772
773 // Generated code can embed this address to get access to the roots.
774 static Object** roots_address() { return roots_; }
775
776#ifdef DEBUG
777 static void Print();
778 static void PrintHandles();
779
780 // Verify the heap is in its normal state before or after a GC.
781 static void Verify();
782
783 // Report heap statistics.
784 static void ReportHeapStatistics(const char* title);
785 static void ReportCodeStatistics(const char* title);
786
787 // Fill in bogus values in from space
788 static void ZapFromSpace();
789#endif
790
791#if defined(ENABLE_LOGGING_AND_PROFILING)
792 // Print short heap statistics.
793 static void PrintShortHeapStatistics();
794#endif
795
796 // Makes a new symbol object
797 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
798 // failed.
799 // Please note this function does not perform a garbage collection.
800 static Object* CreateSymbol(const char* str, int length, int hash);
801 static Object* CreateSymbol(String* str);
802
803 // Write barrier support for address[offset] = o.
804 static inline void RecordWrite(Address address, int offset);
805
806 // Given an address occupied by a live code object, return that object.
807 static Object* FindCodeObject(Address a);
808
809 // Invoke Shrink on shrinkable spaces.
810 static void Shrink();
811
812 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
813 static inline HeapState gc_state() { return gc_state_; }
814
815#ifdef DEBUG
816 static bool IsAllocationAllowed() { return allocation_allowed_; }
817 static inline bool allow_allocation(bool enable);
818
819 static bool disallow_allocation_failure() {
820 return disallow_allocation_failure_;
821 }
822
823 static void TracePathToObject();
824 static void TracePathToGlobal();
825#endif
826
827 // Callback function passed to Heap::Iterate etc. Copies an object if
828 // necessary, the object might be promoted to an old space. The caller must
829 // ensure the precondition that the object is (a) a heap object and (b) in
830 // the heap's from space.
831 static void ScavengePointer(HeapObject** p);
832 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
833
834 // Clear a range of remembered set addresses corresponding to the object
835 // area address 'start' with size 'size_in_bytes', eg, when adding blocks
836 // to the free list.
837 static void ClearRSetRange(Address start, int size_in_bytes);
838
839 // Rebuild remembered set in old and map spaces.
840 static void RebuildRSets();
841
842 // Commits from space if it is uncommitted.
843 static void EnsureFromSpaceIsCommitted();
844
845 //
846 // Support for the API.
847 //
848
849 static bool CreateApiObjects();
850
851 // Attempt to find the number in a small cache. If we finds it, return
852 // the string representation of the number. Otherwise return undefined.
853 static Object* GetNumberStringCache(Object* number);
854
855 // Update the cache with a new number-string pair.
856 static void SetNumberStringCache(Object* number, String* str);
857
858 // Entries in the cache. Must be a power of 2.
859 static const int kNumberStringCacheSize = 64;
860
861 // Adjusts the amount of registered external memory.
862 // Returns the adjusted value.
863 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
864
865 // Allocate unitialized fixed array (pretenure == NON_TENURE).
866 static Object* AllocateRawFixedArray(int length);
867
868 // True if we have reached the allocation limit in the old generation that
869 // should force the next GC (caused normally) to be a full one.
870 static bool OldGenerationPromotionLimitReached() {
871 return (PromotedSpaceSize() + PromotedExternalMemorySize())
872 > old_gen_promotion_limit_;
873 }
874
875 // True if we have reached the allocation limit in the old generation that
876 // should artificially cause a GC right now.
877 static bool OldGenerationAllocationLimitReached() {
878 return (PromotedSpaceSize() + PromotedExternalMemorySize())
879 > old_gen_allocation_limit_;
880 }
881
882 // Can be called when the embedding application is idle.
883 static bool IdleNotification();
884
885 // Declare all the root indices.
886 enum RootListIndex {
887#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
888 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
889#undef ROOT_INDEX_DECLARATION
890
891// Utility type maps
892#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
893 STRUCT_LIST(DECLARE_STRUCT_MAP)
894#undef DECLARE_STRUCT_MAP
895
896#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
897 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
898#undef SYMBOL_DECLARATION
899
900 kSymbolTableRootIndex,
901 kStrongRootListLength = kSymbolTableRootIndex,
902 kRootListLength
903 };
904
905 static Object* NumberToString(Object* number);
906
Steve Block3ce2e202009-11-05 08:53:23 +0000907 static Map* MapForExternalArrayType(ExternalArrayType array_type);
908 static RootListIndex RootIndexForExternalArrayType(
909 ExternalArrayType array_type);
910
Steve Blocka7e24c12009-10-30 11:49:00 +0000911 private:
Steve Block3ce2e202009-11-05 08:53:23 +0000912 static int reserved_semispace_size_;
913 static int max_semispace_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000914 static int initial_semispace_size_;
Steve Block3ce2e202009-11-05 08:53:23 +0000915 static int max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000916 static size_t code_range_size_;
917
918 // For keeping track of how much data has survived
919 // scavenge since last new space expansion.
920 static int survived_since_last_expansion_;
921
922 static int always_allocate_scope_depth_;
923 static bool context_disposed_pending_;
924
925 static const int kMaxMapSpaceSize = 8*MB;
926
927#if defined(V8_TARGET_ARCH_X64)
928 static const int kMaxObjectSizeInNewSpace = 512*KB;
929#else
930 static const int kMaxObjectSizeInNewSpace = 256*KB;
931#endif
932
933 static NewSpace new_space_;
934 static OldSpace* old_pointer_space_;
935 static OldSpace* old_data_space_;
936 static OldSpace* code_space_;
937 static MapSpace* map_space_;
938 static CellSpace* cell_space_;
939 static LargeObjectSpace* lo_space_;
940 static HeapState gc_state_;
941
942 // Returns the size of object residing in non new spaces.
943 static int PromotedSpaceSize();
944
945 // Returns the amount of external memory registered since last global gc.
946 static int PromotedExternalMemorySize();
947
948 static int mc_count_; // how many mark-compact collections happened
949 static int gc_count_; // how many gc happened
950
951#define ROOT_ACCESSOR(type, name, camel_name) \
952 static inline void set_##name(type* value) { \
953 roots_[k##camel_name##RootIndex] = value; \
954 }
955 ROOT_LIST(ROOT_ACCESSOR)
956#undef ROOT_ACCESSOR
957
958#ifdef DEBUG
959 static bool allocation_allowed_;
960
961 // If the --gc-interval flag is set to a positive value, this
962 // variable holds the value indicating the number of allocations
963 // remain until the next failure and garbage collection.
964 static int allocation_timeout_;
965
966 // Do we expect to be able to handle allocation failure at this
967 // time?
968 static bool disallow_allocation_failure_;
969#endif // DEBUG
970
971 // Limit that triggers a global GC on the next (normally caused) GC. This
972 // is checked when we have already decided to do a GC to help determine
973 // which collector to invoke.
974 static int old_gen_promotion_limit_;
975
976 // Limit that triggers a global GC as soon as is reasonable. This is
977 // checked before expanding a paged space in the old generation and on
978 // every allocation in large object space.
979 static int old_gen_allocation_limit_;
980
981 // Limit on the amount of externally allocated memory allowed
982 // between global GCs. If reached a global GC is forced.
983 static int external_allocation_limit_;
984
985 // The amount of external memory registered through the API kept alive
986 // by global handles
987 static int amount_of_external_allocated_memory_;
988
989 // Caches the amount of external memory registered at the last global gc.
990 static int amount_of_external_allocated_memory_at_last_global_gc_;
991
992 // Indicates that an allocation has failed in the old generation since the
993 // last GC.
994 static int old_gen_exhausted_;
995
996 static Object* roots_[kRootListLength];
997
998 struct StringTypeTable {
999 InstanceType type;
1000 int size;
1001 RootListIndex index;
1002 };
1003
1004 struct ConstantSymbolTable {
1005 const char* contents;
1006 RootListIndex index;
1007 };
1008
1009 struct StructTable {
1010 InstanceType type;
1011 int size;
1012 RootListIndex index;
1013 };
1014
1015 static const StringTypeTable string_type_table[];
1016 static const ConstantSymbolTable constant_symbol_table[];
1017 static const StructTable struct_table[];
1018
1019 // The special hidden symbol which is an empty string, but does not match
1020 // any string when looked up in properties.
1021 static String* hidden_symbol_;
1022
1023 // GC callback function, called before and after mark-compact GC.
1024 // Allocations in the callback function are disallowed.
1025 static GCCallback global_gc_prologue_callback_;
1026 static GCCallback global_gc_epilogue_callback_;
1027
1028 // Checks whether a global GC is necessary
1029 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1030
1031 // Performs garbage collection
1032 static void PerformGarbageCollection(AllocationSpace space,
1033 GarbageCollector collector,
1034 GCTracer* tracer);
1035
1036 // Returns either a Smi or a Number object from 'value'. If 'new_object'
1037 // is false, it may return a preallocated immutable object.
1038 static Object* SmiOrNumberFromDouble(double value,
1039 bool new_object,
1040 PretenureFlag pretenure = NOT_TENURED);
1041
1042 // Allocate an uninitialized object in map space. The behavior is identical
1043 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1044 // have to test the allocation space argument and (b) can reduce code size
1045 // (since both AllocateRaw and AllocateRawMap are inlined).
1046 static inline Object* AllocateRawMap();
1047
1048 // Allocate an uninitialized object in the global property cell space.
1049 static inline Object* AllocateRawCell();
1050
1051 // Initializes a JSObject based on its map.
1052 static void InitializeJSObjectFromMap(JSObject* obj,
1053 FixedArray* properties,
1054 Map* map);
1055
1056 static bool CreateInitialMaps();
1057 static bool CreateInitialObjects();
1058
1059 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1060 // that assigns wrong vtable entries.
1061 static void CreateCEntryStub();
1062 static void CreateCEntryDebugBreakStub();
1063 static void CreateJSEntryStub();
1064 static void CreateJSConstructEntryStub();
1065 static void CreateRegExpCEntryStub();
1066
1067 static void CreateFixedStubs();
1068
1069 static Object* CreateOddball(Map* map,
1070 const char* to_string,
1071 Object* to_number);
1072
1073 // Allocate empty fixed array.
1074 static Object* AllocateEmptyFixedArray();
1075
1076 // Performs a minor collection in new generation.
1077 static void Scavenge();
1078
1079 // Performs a major collection in the whole heap.
1080 static void MarkCompact(GCTracer* tracer);
1081
1082 // Code to be run before and after mark-compact.
1083 static void MarkCompactPrologue(bool is_compacting);
1084 static void MarkCompactEpilogue(bool is_compacting);
1085
1086 // Helper function used by CopyObject to copy a source object to an
1087 // allocated target object and update the forwarding pointer in the source
1088 // object. Returns the target object.
1089 static HeapObject* MigrateObject(HeapObject* source,
1090 HeapObject* target,
1091 int size);
1092
1093 // Helper function that governs the promotion policy from new space to
1094 // old. If the object's old address lies below the new space's age
1095 // mark or if we've already filled the bottom 1/16th of the to space,
1096 // we try to promote this object.
1097 static inline bool ShouldBePromoted(Address old_address, int object_size);
1098#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1099 // Record the copy of an object in the NewSpace's statistics.
1100 static void RecordCopiedObject(HeapObject* obj);
1101
1102 // Record statistics before and after garbage collection.
1103 static void ReportStatisticsBeforeGC();
1104 static void ReportStatisticsAfterGC();
1105#endif
1106
1107 // Update an old object's remembered set
1108 static int UpdateRSet(HeapObject* obj);
1109
1110 // Rebuild remembered set in an old space.
1111 static void RebuildRSets(PagedSpace* space);
1112
1113 // Rebuild remembered set in the large object space.
1114 static void RebuildRSets(LargeObjectSpace* space);
1115
1116 // Slow part of scavenge object.
1117 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1118
1119 // Copy memory from src to dst.
1120 static inline void CopyBlock(Object** dst, Object** src, int byte_size);
1121
1122 // Initializes a function with a shared part and prototype.
1123 // Returns the function.
1124 // Note: this code was factored out of AllocateFunction such that
1125 // other parts of the VM could use it. Specifically, a function that creates
1126 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1127 // Please note this does not perform a garbage collection.
1128 static inline Object* InitializeFunction(JSFunction* function,
1129 SharedFunctionInfo* shared,
1130 Object* prototype);
1131
1132 static const int kInitialSymbolTableSize = 2048;
1133 static const int kInitialEvalCacheSize = 64;
1134
1135 friend class Factory;
1136 friend class DisallowAllocationFailure;
1137 friend class AlwaysAllocateScope;
1138};
1139
1140
1141class AlwaysAllocateScope {
1142 public:
1143 AlwaysAllocateScope() {
1144 // We shouldn't hit any nested scopes, because that requires
1145 // non-handle code to call handle code. The code still works but
1146 // performance will degrade, so we want to catch this situation
1147 // in debug mode.
1148 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1149 Heap::always_allocate_scope_depth_++;
1150 }
1151
1152 ~AlwaysAllocateScope() {
1153 Heap::always_allocate_scope_depth_--;
1154 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1155 }
1156};
1157
1158
1159#ifdef DEBUG
1160// Visitor class to verify interior pointers that do not have remembered set
1161// bits. All heap object pointers have to point into the heap to a location
1162// that has a map pointer at its first word. Caveat: Heap::Contains is an
1163// approximation because it can return true for objects in a heap space but
1164// above the allocation pointer.
1165class VerifyPointersVisitor: public ObjectVisitor {
1166 public:
1167 void VisitPointers(Object** start, Object** end) {
1168 for (Object** current = start; current < end; current++) {
1169 if ((*current)->IsHeapObject()) {
1170 HeapObject* object = HeapObject::cast(*current);
1171 ASSERT(Heap::Contains(object));
1172 ASSERT(object->map()->IsMap());
1173 }
1174 }
1175 }
1176};
1177
1178
1179// Visitor class to verify interior pointers that have remembered set bits.
1180// As VerifyPointersVisitor but also checks that remembered set bits are
1181// always set for pointers into new space.
1182class VerifyPointersAndRSetVisitor: public ObjectVisitor {
1183 public:
1184 void VisitPointers(Object** start, Object** end) {
1185 for (Object** current = start; current < end; current++) {
1186 if ((*current)->IsHeapObject()) {
1187 HeapObject* object = HeapObject::cast(*current);
1188 ASSERT(Heap::Contains(object));
1189 ASSERT(object->map()->IsMap());
1190 if (Heap::InNewSpace(object)) {
1191 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
1192 }
1193 }
1194 }
1195 }
1196};
1197#endif
1198
1199
1200// Space iterator for iterating over all spaces of the heap.
1201// Returns each space in turn, and null when it is done.
1202class AllSpaces BASE_EMBEDDED {
1203 public:
1204 Space* next();
1205 AllSpaces() { counter_ = FIRST_SPACE; }
1206 private:
1207 int counter_;
1208};
1209
1210
1211// Space iterator for iterating over all old spaces of the heap: Old pointer
1212// space, old data space and code space.
1213// Returns each space in turn, and null when it is done.
1214class OldSpaces BASE_EMBEDDED {
1215 public:
1216 OldSpace* next();
1217 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1218 private:
1219 int counter_;
1220};
1221
1222
1223// Space iterator for iterating over all the paged spaces of the heap:
1224// Map space, old pointer space, old data space and code space.
1225// Returns each space in turn, and null when it is done.
1226class PagedSpaces BASE_EMBEDDED {
1227 public:
1228 PagedSpace* next();
1229 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1230 private:
1231 int counter_;
1232};
1233
1234
1235// Space iterator for iterating over all spaces of the heap.
1236// For each space an object iterator is provided. The deallocation of the
1237// returned object iterators is handled by the space iterator.
1238class SpaceIterator : public Malloced {
1239 public:
1240 SpaceIterator();
1241 virtual ~SpaceIterator();
1242
1243 bool has_next();
1244 ObjectIterator* next();
1245
1246 private:
1247 ObjectIterator* CreateIterator();
1248
1249 int current_space_; // from enum AllocationSpace.
1250 ObjectIterator* iterator_; // object iterator for the current space.
1251};
1252
1253
1254// A HeapIterator provides iteration over the whole heap It aggregates a the
1255// specific iterators for the different spaces as these can only iterate over
1256// one space only.
1257
1258class HeapIterator BASE_EMBEDDED {
1259 public:
1260 explicit HeapIterator();
1261 virtual ~HeapIterator();
1262
1263 bool has_next();
1264 HeapObject* next();
1265 void reset();
1266
1267 private:
1268 // Perform the initialization.
1269 void Init();
1270
1271 // Perform all necessary shutdown (destruction) work.
1272 void Shutdown();
1273
1274 // Space iterator for iterating all the spaces.
1275 SpaceIterator* space_iterator_;
1276 // Object iterator for the space currently being iterated.
1277 ObjectIterator* object_iterator_;
1278};
1279
1280
1281// Cache for mapping (map, property name) into field offset.
1282// Cleared at startup and prior to mark sweep collection.
1283class KeyedLookupCache {
1284 public:
1285 // Lookup field offset for (map, name). If absent, -1 is returned.
1286 static int Lookup(Map* map, String* name);
1287
1288 // Update an element in the cache.
1289 static void Update(Map* map, String* name, int field_offset);
1290
1291 // Clear the cache.
1292 static void Clear();
1293 private:
1294 static inline int Hash(Map* map, String* name);
1295 static const int kLength = 64;
1296 struct Key {
1297 Map* map;
1298 String* name;
1299 };
1300 static Key keys_[kLength];
1301 static int field_offsets_[kLength];
1302};
1303
1304
1305
1306// Cache for mapping (array, property name) into descriptor index.
1307// The cache contains both positive and negative results.
1308// Descriptor index equals kNotFound means the property is absent.
1309// Cleared at startup and prior to any gc.
1310class DescriptorLookupCache {
1311 public:
1312 // Lookup descriptor index for (map, name).
1313 // If absent, kAbsent is returned.
1314 static int Lookup(DescriptorArray* array, String* name) {
1315 if (!StringShape(name).IsSymbol()) return kAbsent;
1316 int index = Hash(array, name);
1317 Key& key = keys_[index];
1318 if ((key.array == array) && (key.name == name)) return results_[index];
1319 return kAbsent;
1320 }
1321
1322 // Update an element in the cache.
1323 static void Update(DescriptorArray* array, String* name, int result) {
1324 ASSERT(result != kAbsent);
1325 if (StringShape(name).IsSymbol()) {
1326 int index = Hash(array, name);
1327 Key& key = keys_[index];
1328 key.array = array;
1329 key.name = name;
1330 results_[index] = result;
1331 }
1332 }
1333
1334 // Clear the cache.
1335 static void Clear();
1336
1337 static const int kAbsent = -2;
1338 private:
1339 static int Hash(DescriptorArray* array, String* name) {
1340 // Uses only lower 32 bits if pointers are larger.
1341 uintptr_t array_hash =
1342 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1343 uintptr_t name_hash =
1344 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1345 return (array_hash ^ name_hash) % kLength;
1346 }
1347
1348 static const int kLength = 64;
1349 struct Key {
1350 DescriptorArray* array;
1351 String* name;
1352 };
1353
1354 static Key keys_[kLength];
1355 static int results_[kLength];
1356};
1357
1358
1359// ----------------------------------------------------------------------------
1360// Marking stack for tracing live objects.
1361
1362class MarkingStack {
1363 public:
1364 void Initialize(Address low, Address high) {
1365 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1366 high_ = reinterpret_cast<HeapObject**>(high);
1367 overflowed_ = false;
1368 }
1369
1370 bool is_full() { return top_ >= high_; }
1371
1372 bool is_empty() { return top_ <= low_; }
1373
1374 bool overflowed() { return overflowed_; }
1375
1376 void clear_overflowed() { overflowed_ = false; }
1377
1378 // Push the (marked) object on the marking stack if there is room,
1379 // otherwise mark the object as overflowed and wait for a rescan of the
1380 // heap.
1381 void Push(HeapObject* object) {
1382 CHECK(object->IsHeapObject());
1383 if (is_full()) {
1384 object->SetOverflow();
1385 overflowed_ = true;
1386 } else {
1387 *(top_++) = object;
1388 }
1389 }
1390
1391 HeapObject* Pop() {
1392 ASSERT(!is_empty());
1393 HeapObject* object = *(--top_);
1394 CHECK(object->IsHeapObject());
1395 return object;
1396 }
1397
1398 private:
1399 HeapObject** low_;
1400 HeapObject** top_;
1401 HeapObject** high_;
1402 bool overflowed_;
1403};
1404
1405
1406// A helper class to document/test C++ scopes where we do not
1407// expect a GC. Usage:
1408//
1409// /* Allocation not allowed: we cannot handle a GC in this scope. */
1410// { AssertNoAllocation nogc;
1411// ...
1412// }
1413
1414#ifdef DEBUG
1415
1416class DisallowAllocationFailure {
1417 public:
1418 DisallowAllocationFailure() {
1419 old_state_ = Heap::disallow_allocation_failure_;
1420 Heap::disallow_allocation_failure_ = true;
1421 }
1422 ~DisallowAllocationFailure() {
1423 Heap::disallow_allocation_failure_ = old_state_;
1424 }
1425 private:
1426 bool old_state_;
1427};
1428
1429class AssertNoAllocation {
1430 public:
1431 AssertNoAllocation() {
1432 old_state_ = Heap::allow_allocation(false);
1433 }
1434
1435 ~AssertNoAllocation() {
1436 Heap::allow_allocation(old_state_);
1437 }
1438
1439 private:
1440 bool old_state_;
1441};
1442
1443class DisableAssertNoAllocation {
1444 public:
1445 DisableAssertNoAllocation() {
1446 old_state_ = Heap::allow_allocation(true);
1447 }
1448
1449 ~DisableAssertNoAllocation() {
1450 Heap::allow_allocation(old_state_);
1451 }
1452
1453 private:
1454 bool old_state_;
1455};
1456
1457#else // ndef DEBUG
1458
1459class AssertNoAllocation {
1460 public:
1461 AssertNoAllocation() { }
1462 ~AssertNoAllocation() { }
1463};
1464
1465class DisableAssertNoAllocation {
1466 public:
1467 DisableAssertNoAllocation() { }
1468 ~DisableAssertNoAllocation() { }
1469};
1470
1471#endif
1472
1473// GCTracer collects and prints ONE line after each garbage collector
1474// invocation IFF --trace_gc is used.
1475
1476class GCTracer BASE_EMBEDDED {
1477 public:
1478 GCTracer();
1479
1480 ~GCTracer();
1481
1482 // Sets the collector.
1483 void set_collector(GarbageCollector collector) { collector_ = collector; }
1484
1485 // Sets the GC count.
1486 void set_gc_count(int count) { gc_count_ = count; }
1487
1488 // Sets the full GC count.
1489 void set_full_gc_count(int count) { full_gc_count_ = count; }
1490
1491 // Sets the flag that this is a compacting full GC.
1492 void set_is_compacting() { is_compacting_ = true; }
1493
1494 // Increment and decrement the count of marked objects.
1495 void increment_marked_count() { ++marked_count_; }
1496 void decrement_marked_count() { --marked_count_; }
1497
1498 int marked_count() { return marked_count_; }
1499
1500 private:
1501 // Returns a string matching the collector.
1502 const char* CollectorString();
1503
1504 // Returns size of object in heap (in MB).
1505 double SizeOfHeapObjects() {
1506 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1507 }
1508
1509 double start_time_; // Timestamp set in the constructor.
1510 double start_size_; // Size of objects in heap set in constructor.
1511 GarbageCollector collector_; // Type of collector.
1512
1513 // A count (including this one, eg, the first collection is 1) of the
1514 // number of garbage collections.
1515 int gc_count_;
1516
1517 // A count (including this one) of the number of full garbage collections.
1518 int full_gc_count_;
1519
1520 // True if the current GC is a compacting full collection, false
1521 // otherwise.
1522 bool is_compacting_;
1523
1524 // True if the *previous* full GC cwas a compacting collection (will be
1525 // false if there has not been a previous full GC).
1526 bool previous_has_compacted_;
1527
1528 // On a full GC, a count of the number of marked objects. Incremented
1529 // when an object is marked and decremented when an object's mark bit is
1530 // cleared. Will be zero on a scavenge collection.
1531 int marked_count_;
1532
1533 // The count from the end of the previous full GC. Will be zero if there
1534 // was no previous full GC.
1535 int previous_marked_count_;
1536};
1537
1538
1539class TranscendentalCache {
1540 public:
1541 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1542
1543 explicit TranscendentalCache(Type t);
1544
1545 // Returns a heap number with f(input), where f is a math function specified
1546 // by the 'type' argument.
1547 static inline Object* Get(Type type, double input) {
1548 TranscendentalCache* cache = caches_[type];
1549 if (cache == NULL) {
1550 caches_[type] = cache = new TranscendentalCache(type);
1551 }
1552 return cache->Get(input);
1553 }
1554
1555 // The cache contains raw Object pointers. This method disposes of
1556 // them before a garbage collection.
1557 static void Clear();
1558
1559 private:
1560 inline Object* Get(double input) {
1561 Converter c;
1562 c.dbl = input;
1563 int hash = Hash(c);
1564 Element e = elements_[hash];
1565 if (e.in[0] == c.integers[0] &&
1566 e.in[1] == c.integers[1]) {
1567 ASSERT(e.output != NULL);
1568 return e.output;
1569 }
1570 double answer = Calculate(input);
1571 Object* heap_number = Heap::AllocateHeapNumber(answer);
1572 if (!heap_number->IsFailure()) {
1573 elements_[hash].in[0] = c.integers[0];
1574 elements_[hash].in[1] = c.integers[1];
1575 elements_[hash].output = heap_number;
1576 }
1577 return heap_number;
1578 }
1579
1580 inline double Calculate(double input) {
1581 switch (type_) {
1582 case ACOS:
1583 return acos(input);
1584 case ASIN:
1585 return asin(input);
1586 case ATAN:
1587 return atan(input);
1588 case COS:
1589 return cos(input);
1590 case EXP:
1591 return exp(input);
1592 case LOG:
1593 return log(input);
1594 case SIN:
1595 return sin(input);
1596 case TAN:
1597 return tan(input);
1598 default:
1599 return 0.0; // Never happens.
1600 }
1601 }
1602 static const int kCacheSize = 512;
1603 struct Element {
1604 uint32_t in[2];
1605 Object* output;
1606 };
1607 union Converter {
1608 double dbl;
1609 uint32_t integers[2];
1610 };
1611 inline static int Hash(const Converter& c) {
1612 uint32_t hash = (c.integers[0] ^ c.integers[1]);
1613 hash ^= hash >> 16;
1614 hash ^= hash >> 8;
1615 return (hash & (kCacheSize - 1));
1616 }
1617 static TranscendentalCache* caches_[kNumberOfCaches];
1618 Element elements_[kCacheSize];
1619 Type type_;
1620};
1621
1622
1623} } // namespace v8::internal
1624
1625#endif // V8_HEAP_H_