blob: 7e1a743bc54979996a37efcacf946fe67ce4cce2 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
33#include "zone-inl.h"
34
35
36namespace v8 {
37namespace internal {
38
39// Defines all the roots in Heap.
40#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
Steve Blockd0582a62009-12-15 09:54:21 +000041 /* Put the byte array map early. We need it to be in place by the time */ \
42 /* the deserializer hits the next page, since it wants to put a byte */ \
43 /* array in the unused space at the end of the page. */ \
44 V(Map, byte_array_map, ByteArrayMap) \
45 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
46 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
47 /* Cluster the most popular ones in a few cache lines here at the top. */ \
Steve Blocka7e24c12009-10-30 11:49:00 +000048 V(Smi, stack_limit, StackLimit) \
49 V(Object, undefined_value, UndefinedValue) \
50 V(Object, the_hole_value, TheHoleValue) \
51 V(Object, null_value, NullValue) \
52 V(Object, true_value, TrueValue) \
53 V(Object, false_value, FalseValue) \
54 V(Map, heap_number_map, HeapNumberMap) \
55 V(Map, global_context_map, GlobalContextMap) \
56 V(Map, fixed_array_map, FixedArrayMap) \
57 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
58 V(Map, meta_map, MetaMap) \
59 V(Object, termination_exception, TerminationException) \
60 V(Map, hash_table_map, HashTableMap) \
61 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
Steve Blockd0582a62009-12-15 09:54:21 +000062 V(Map, string_map, StringMap) \
63 V(Map, ascii_string_map, AsciiStringMap) \
64 V(Map, symbol_map, SymbolMap) \
65 V(Map, ascii_symbol_map, AsciiSymbolMap) \
66 V(Map, cons_symbol_map, ConsSymbolMap) \
67 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
68 V(Map, external_symbol_map, ExternalSymbolMap) \
69 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
70 V(Map, cons_string_map, ConsStringMap) \
71 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
72 V(Map, external_string_map, ExternalStringMap) \
73 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
74 V(Map, undetectable_string_map, UndetectableStringMap) \
75 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000076 V(Map, pixel_array_map, PixelArrayMap) \
Steve Block3ce2e202009-11-05 08:53:23 +000077 V(Map, external_byte_array_map, ExternalByteArrayMap) \
78 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
79 V(Map, external_short_array_map, ExternalShortArrayMap) \
80 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
81 V(Map, external_int_array_map, ExternalIntArrayMap) \
82 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
83 V(Map, external_float_array_map, ExternalFloatArrayMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000084 V(Map, context_map, ContextMap) \
85 V(Map, catch_context_map, CatchContextMap) \
86 V(Map, code_map, CodeMap) \
87 V(Map, oddball_map, OddballMap) \
88 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
89 V(Map, boilerplate_function_map, BoilerplateFunctionMap) \
90 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
91 V(Map, proxy_map, ProxyMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000092 V(Object, nan_value, NanValue) \
93 V(Object, minus_zero_value, MinusZeroValue) \
94 V(String, empty_string, EmptyString) \
95 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
96 V(Map, neander_map, NeanderMap) \
97 V(JSObject, message_listeners, MessageListeners) \
98 V(Proxy, prototype_accessors, PrototypeAccessors) \
99 V(NumberDictionary, code_stubs, CodeStubs) \
100 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
101 V(Code, js_entry_code, JsEntryCode) \
102 V(Code, js_construct_entry_code, JsConstructEntryCode) \
103 V(Code, c_entry_code, CEntryCode) \
Leon Clarke4515c472010-02-03 11:58:03 +0000104 V(Code, debugger_statement_code, DebuggerStatementCode) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 V(FixedArray, number_string_cache, NumberStringCache) \
106 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
107 V(FixedArray, natives_source_cache, NativesSourceCache) \
108 V(Object, last_script_id, LastScriptId) \
Andrei Popescu31002712010-02-23 13:46:05 +0000109 V(Script, empty_script, EmptyScript) \
Steve Blockd0582a62009-12-15 09:54:21 +0000110 V(Smi, real_stack_limit, RealStackLimit) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000111
112#if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
113#define STRONG_ROOT_LIST(V) \
114 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
115 V(Code, re_c_entry_code, RegExpCEntryCode)
116#else
117#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
118#endif
119
120#define ROOT_LIST(V) \
121 STRONG_ROOT_LIST(V) \
122 V(SymbolTable, symbol_table, SymbolTable)
123
124#define SYMBOL_LIST(V) \
125 V(Array_symbol, "Array") \
126 V(Object_symbol, "Object") \
127 V(Proto_symbol, "__proto__") \
128 V(StringImpl_symbol, "StringImpl") \
129 V(arguments_symbol, "arguments") \
130 V(Arguments_symbol, "Arguments") \
131 V(arguments_shadow_symbol, ".arguments") \
132 V(call_symbol, "call") \
133 V(apply_symbol, "apply") \
134 V(caller_symbol, "caller") \
135 V(boolean_symbol, "boolean") \
136 V(Boolean_symbol, "Boolean") \
137 V(callee_symbol, "callee") \
138 V(constructor_symbol, "constructor") \
139 V(code_symbol, ".code") \
140 V(result_symbol, ".result") \
141 V(catch_var_symbol, ".catch-var") \
142 V(empty_symbol, "") \
143 V(eval_symbol, "eval") \
144 V(function_symbol, "function") \
145 V(length_symbol, "length") \
146 V(name_symbol, "name") \
147 V(number_symbol, "number") \
148 V(Number_symbol, "Number") \
149 V(RegExp_symbol, "RegExp") \
150 V(object_symbol, "object") \
151 V(prototype_symbol, "prototype") \
152 V(string_symbol, "string") \
153 V(String_symbol, "String") \
154 V(Date_symbol, "Date") \
155 V(this_symbol, "this") \
156 V(to_string_symbol, "toString") \
157 V(char_at_symbol, "CharAt") \
158 V(undefined_symbol, "undefined") \
159 V(value_of_symbol, "valueOf") \
160 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
161 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
162 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
163 V(illegal_access_symbol, "illegal access") \
164 V(out_of_memory_symbol, "out-of-memory") \
165 V(illegal_execution_state_symbol, "illegal execution state") \
166 V(get_symbol, "get") \
167 V(set_symbol, "set") \
168 V(function_class_symbol, "Function") \
169 V(illegal_argument_symbol, "illegal argument") \
170 V(MakeReferenceError_symbol, "MakeReferenceError") \
171 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
172 V(MakeTypeError_symbol, "MakeTypeError") \
173 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
174 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
175 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
176 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
177 V(illegal_return_symbol, "illegal_return") \
178 V(illegal_break_symbol, "illegal_break") \
179 V(illegal_continue_symbol, "illegal_continue") \
180 V(unknown_label_symbol, "unknown_label") \
181 V(redeclaration_symbol, "redeclaration") \
182 V(failure_symbol, "<failure>") \
183 V(space_symbol, " ") \
184 V(exec_symbol, "exec") \
185 V(zero_symbol, "0") \
186 V(global_eval_symbol, "GlobalEval") \
Steve Blockd0582a62009-12-15 09:54:21 +0000187 V(identity_hash_symbol, "v8::IdentityHash") \
188 V(closure_symbol, "(closure)")
Steve Blocka7e24c12009-10-30 11:49:00 +0000189
190
191// Forward declaration of the GCTracer class.
192class GCTracer;
Steve Blockd0582a62009-12-15 09:54:21 +0000193class HeapStats;
Steve Blocka7e24c12009-10-30 11:49:00 +0000194
195
196// The all static Heap captures the interface to the global object heap.
197// All JavaScript contexts by this process share the same object heap.
198
199class Heap : public AllStatic {
200 public:
201 // Configure heap size before setup. Return false if the heap has been
202 // setup already.
Steve Block3ce2e202009-11-05 08:53:23 +0000203 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 static bool ConfigureHeapDefault();
205
206 // Initializes the global object heap. If create_heap_objects is true,
207 // also creates the basic non-mutable objects.
208 // Returns whether it succeeded.
209 static bool Setup(bool create_heap_objects);
210
211 // Destroys all memory allocated by the heap.
212 static void TearDown();
213
Steve Blockd0582a62009-12-15 09:54:21 +0000214 // Set the stack limit in the roots_ array. Some architectures generate
215 // code that looks here, because it is faster than loading from the static
216 // jslimit_/real_jslimit_ variable in the StackGuard.
217 static void SetStackLimits();
Steve Blocka7e24c12009-10-30 11:49:00 +0000218
219 // Returns whether Setup has been called.
220 static bool HasBeenSetup();
221
Steve Block3ce2e202009-11-05 08:53:23 +0000222 // Returns the maximum amount of memory reserved for the heap. For
223 // the young generation, we reserve 4 times the amount needed for a
224 // semi space. The young generation consists of two semi spaces and
225 // we reserve twice the amount needed for those in order to ensure
226 // that new space can be aligned to its size.
227 static int MaxReserved() {
228 return 4 * reserved_semispace_size_ + max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000229 }
Steve Block3ce2e202009-11-05 08:53:23 +0000230 static int MaxSemiSpaceSize() { return max_semispace_size_; }
231 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000232 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
Steve Block3ce2e202009-11-05 08:53:23 +0000233 static int MaxOldGenerationSize() { return max_old_generation_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000234
235 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
236 // more spaces are needed until it reaches the limit.
237 static int Capacity();
238
Steve Block3ce2e202009-11-05 08:53:23 +0000239 // Returns the amount of memory currently committed for the heap.
240 static int CommittedMemory();
241
Steve Blocka7e24c12009-10-30 11:49:00 +0000242 // Returns the available bytes in space w/o growing.
243 // Heap doesn't guarantee that it can allocate an object that requires
244 // all available bytes. Check MaxHeapObjectSize() instead.
245 static int Available();
246
247 // Returns the maximum object size in paged space.
248 static inline int MaxObjectSizeInPagedSpace();
249
250 // Returns of size of all objects residing in the heap.
251 static int SizeOfObjects();
252
253 // Return the starting address and a mask for the new space. And-masking an
254 // address with the mask will result in the start address of the new space
255 // for all addresses in either semispace.
256 static Address NewSpaceStart() { return new_space_.start(); }
257 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
258 static Address NewSpaceTop() { return new_space_.top(); }
259
260 static NewSpace* new_space() { return &new_space_; }
261 static OldSpace* old_pointer_space() { return old_pointer_space_; }
262 static OldSpace* old_data_space() { return old_data_space_; }
263 static OldSpace* code_space() { return code_space_; }
264 static MapSpace* map_space() { return map_space_; }
265 static CellSpace* cell_space() { return cell_space_; }
266 static LargeObjectSpace* lo_space() { return lo_space_; }
267
268 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
269 static Address always_allocate_scope_depth_address() {
270 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
271 }
Steve Blockd0582a62009-12-15 09:54:21 +0000272 static bool linear_allocation() {
Leon Clarkee46be812010-01-19 14:06:41 +0000273 return linear_allocation_scope_depth_ != 0;
Steve Blockd0582a62009-12-15 09:54:21 +0000274 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000275
276 static Address* NewSpaceAllocationTopAddress() {
277 return new_space_.allocation_top_address();
278 }
279 static Address* NewSpaceAllocationLimitAddress() {
280 return new_space_.allocation_limit_address();
281 }
282
283 // Uncommit unused semi space.
284 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
285
286#ifdef ENABLE_HEAP_PROTECTION
287 // Protect/unprotect the heap by marking all spaces read-only/writable.
288 static void Protect();
289 static void Unprotect();
290#endif
291
292 // Allocates and initializes a new JavaScript object based on a
293 // constructor.
294 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
295 // failed.
296 // Please note this does not perform a garbage collection.
297 static Object* AllocateJSObject(JSFunction* constructor,
298 PretenureFlag pretenure = NOT_TENURED);
299
300 // Allocates and initializes a new global object based on a constructor.
301 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
302 // failed.
303 // Please note this does not perform a garbage collection.
304 static Object* AllocateGlobalObject(JSFunction* constructor);
305
306 // Returns a deep copy of the JavaScript object.
307 // Properties and elements are copied too.
308 // Returns failure if allocation failed.
309 static Object* CopyJSObject(JSObject* source);
310
311 // Allocates the function prototype.
312 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
313 // failed.
314 // Please note this does not perform a garbage collection.
315 static Object* AllocateFunctionPrototype(JSFunction* function);
316
317 // Reinitialize an JSGlobalProxy based on a constructor. The object
318 // must have the same size as objects allocated using the
319 // constructor. The object is reinitialized and behaves as an
320 // object that has been freshly allocated using the constructor.
321 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
322 JSGlobalProxy* global);
323
324 // Allocates and initializes a new JavaScript object based on a map.
325 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
326 // failed.
327 // Please note this does not perform a garbage collection.
328 static Object* AllocateJSObjectFromMap(Map* map,
329 PretenureFlag pretenure = NOT_TENURED);
330
331 // Allocates a heap object based on the map.
332 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
333 // failed.
334 // Please note this function does not perform a garbage collection.
335 static Object* Allocate(Map* map, AllocationSpace space);
336
337 // Allocates a JS Map in the heap.
338 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
339 // failed.
340 // Please note this function does not perform a garbage collection.
341 static Object* AllocateMap(InstanceType instance_type, int instance_size);
342
343 // Allocates a partial map for bootstrapping.
344 static Object* AllocatePartialMap(InstanceType instance_type,
345 int instance_size);
346
347 // Allocate a map for the specified function
348 static Object* AllocateInitialMap(JSFunction* fun);
349
350 // Allocates and fully initializes a String. There are two String
351 // encodings: ASCII and two byte. One should choose between the three string
352 // allocation functions based on the encoding of the string buffer used to
353 // initialized the string.
354 // - ...FromAscii initializes the string from a buffer that is ASCII
355 // encoded (it does not check that the buffer is ASCII encoded) and the
356 // result will be ASCII encoded.
357 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
358 // encoded. If the characters are all single-byte characters, the
359 // result will be ASCII encoded, otherwise it will converted to two
360 // byte.
361 // - ...FromTwoByte initializes the string from a buffer that is two-byte
362 // encoded. If the characters are all single-byte characters, the
363 // result will be converted to ASCII, otherwise it will be left as
364 // two-byte.
365 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
366 // failed.
367 // Please note this does not perform a garbage collection.
368 static Object* AllocateStringFromAscii(
369 Vector<const char> str,
370 PretenureFlag pretenure = NOT_TENURED);
371 static Object* AllocateStringFromUtf8(
372 Vector<const char> str,
373 PretenureFlag pretenure = NOT_TENURED);
374 static Object* AllocateStringFromTwoByte(
375 Vector<const uc16> str,
376 PretenureFlag pretenure = NOT_TENURED);
377
378 // Allocates a symbol in old space based on the character stream.
379 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
380 // failed.
381 // Please note this function does not perform a garbage collection.
382 static inline Object* AllocateSymbol(Vector<const char> str,
383 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000384 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000385
386 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
387 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000388 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000389
390 static Object* AllocateExternalSymbol(Vector<const char> str,
391 int chars);
392
393
394 // Allocates and partially initializes a String. There are two String
395 // encodings: ASCII and two byte. These functions allocate a string of the
396 // given length and set its map and length fields. The characters of the
397 // string are uninitialized.
398 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
399 // failed.
400 // Please note this does not perform a garbage collection.
401 static Object* AllocateRawAsciiString(
402 int length,
403 PretenureFlag pretenure = NOT_TENURED);
404 static Object* AllocateRawTwoByteString(
405 int length,
406 PretenureFlag pretenure = NOT_TENURED);
407
408 // Computes a single character string where the character has code.
409 // A cache is used for ascii codes.
410 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
411 // failed. Please note this does not perform a garbage collection.
412 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
413
414 // Allocate a byte array of the specified length
415 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
416 // failed.
417 // Please note this does not perform a garbage collection.
418 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
419
420 // Allocate a non-tenured byte array of the specified length
421 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
422 // failed.
423 // Please note this does not perform a garbage collection.
424 static Object* AllocateByteArray(int length);
425
426 // Allocate a pixel array of the specified length
427 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
428 // failed.
429 // Please note this does not perform a garbage collection.
430 static Object* AllocatePixelArray(int length,
431 uint8_t* external_pointer,
432 PretenureFlag pretenure);
433
Steve Block3ce2e202009-11-05 08:53:23 +0000434 // Allocates an external array of the specified length and type.
435 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
436 // failed.
437 // Please note this does not perform a garbage collection.
438 static Object* AllocateExternalArray(int length,
439 ExternalArrayType array_type,
440 void* external_pointer,
441 PretenureFlag pretenure);
442
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 // Allocate a tenured JS global property cell.
444 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
445 // failed.
446 // Please note this does not perform a garbage collection.
447 static Object* AllocateJSGlobalPropertyCell(Object* value);
448
449 // Allocates a fixed array initialized with undefined values
450 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
451 // failed.
452 // Please note this does not perform a garbage collection.
453 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
454 // Allocate uninitialized, non-tenured fixed array with length elements.
455 static Object* AllocateFixedArray(int length);
456
457 // Make a copy of src and return it. Returns
458 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
459 static Object* CopyFixedArray(FixedArray* src);
460
461 // Allocates a fixed array initialized with the hole values.
462 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
463 // failed.
464 // Please note this does not perform a garbage collection.
465 static Object* AllocateFixedArrayWithHoles(int length);
466
467 // AllocateHashTable is identical to AllocateFixedArray except
468 // that the resulting object has hash_table_map as map.
469 static Object* AllocateHashTable(int length);
470
471 // Allocate a global (but otherwise uninitialized) context.
472 static Object* AllocateGlobalContext();
473
474 // Allocate a function context.
475 static Object* AllocateFunctionContext(int length, JSFunction* closure);
476
477 // Allocate a 'with' context.
478 static Object* AllocateWithContext(Context* previous,
479 JSObject* extension,
480 bool is_catch_context);
481
482 // Allocates a new utility object in the old generation.
483 static Object* AllocateStruct(InstanceType type);
484
485 // Allocates a function initialized with a shared part.
486 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
487 // failed.
488 // Please note this does not perform a garbage collection.
489 static Object* AllocateFunction(Map* function_map,
490 SharedFunctionInfo* shared,
Leon Clarkee46be812010-01-19 14:06:41 +0000491 Object* prototype,
492 PretenureFlag pretenure = TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000493
494 // Indicies for direct access into argument objects.
Leon Clarkee46be812010-01-19 14:06:41 +0000495 static const int kArgumentsObjectSize =
496 JSObject::kHeaderSize + 2 * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000497 static const int arguments_callee_index = 0;
498 static const int arguments_length_index = 1;
499
500 // Allocates an arguments object - optionally with an elements array.
501 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
502 // failed.
503 // Please note this does not perform a garbage collection.
504 static Object* AllocateArgumentsObject(Object* callee, int length);
505
506 // Converts a double into either a Smi or a HeapNumber object.
507 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
508 // failed.
509 // Please note this does not perform a garbage collection.
510 static Object* NewNumberFromDouble(double value,
511 PretenureFlag pretenure = NOT_TENURED);
512
513 // Same as NewNumberFromDouble, but may return a preallocated/immutable
514 // number object (e.g., minus_zero_value_, nan_value_)
515 static Object* NumberFromDouble(double value,
516 PretenureFlag pretenure = NOT_TENURED);
517
518 // Allocated a HeapNumber from value.
519 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
520 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
521
522 // Converts an int into either a Smi or a HeapNumber object.
523 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
524 // failed.
525 // Please note this does not perform a garbage collection.
526 static inline Object* NumberFromInt32(int32_t value);
527
528 // Converts an int into either a Smi or a HeapNumber object.
529 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
530 // failed.
531 // Please note this does not perform a garbage collection.
532 static inline Object* NumberFromUint32(uint32_t value);
533
534 // Allocates a new proxy object.
535 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
536 // failed.
537 // Please note this does not perform a garbage collection.
538 static Object* AllocateProxy(Address proxy,
539 PretenureFlag pretenure = NOT_TENURED);
540
541 // Allocates a new SharedFunctionInfo object.
542 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
543 // failed.
544 // Please note this does not perform a garbage collection.
545 static Object* AllocateSharedFunctionInfo(Object* name);
546
547 // Allocates a new cons string object.
548 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
549 // failed.
550 // Please note this does not perform a garbage collection.
551 static Object* AllocateConsString(String* first, String* second);
552
Steve Blocka7e24c12009-10-30 11:49:00 +0000553 // Allocates a new sub string object which is a substring of an underlying
554 // string buffer stretching from the index start (inclusive) to the index
555 // end (exclusive).
556 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
557 // failed.
558 // Please note this does not perform a garbage collection.
559 static Object* AllocateSubString(String* buffer,
560 int start,
561 int end);
562
563 // Allocate a new external string object, which is backed by a string
564 // resource that resides outside the V8 heap.
565 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
566 // failed.
567 // Please note this does not perform a garbage collection.
568 static Object* AllocateExternalStringFromAscii(
569 ExternalAsciiString::Resource* resource);
570 static Object* AllocateExternalStringFromTwoByte(
571 ExternalTwoByteString::Resource* resource);
572
Leon Clarkee46be812010-01-19 14:06:41 +0000573 // Finalizes an external string by deleting the associated external
574 // data and clearing the resource pointer.
575 static inline void FinalizeExternalString(String* string);
576
Steve Blocka7e24c12009-10-30 11:49:00 +0000577 // Allocates an uninitialized object. The memory is non-executable if the
578 // hardware and OS allow.
579 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
580 // failed.
581 // Please note this function does not perform a garbage collection.
582 static inline Object* AllocateRaw(int size_in_bytes,
583 AllocationSpace space,
584 AllocationSpace retry_space);
585
586 // Initialize a filler object to keep the ability to iterate over the heap
587 // when shortening objects.
588 static void CreateFillerObjectAt(Address addr, int size);
589
590 // Makes a new native code object
591 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
592 // failed. On success, the pointer to the Code object is stored in the
593 // self_reference. This allows generated code to reference its own Code
594 // object by containing this pointer.
595 // Please note this function does not perform a garbage collection.
596 static Object* CreateCode(const CodeDesc& desc,
597 ZoneScopeInfo* sinfo,
598 Code::Flags flags,
599 Handle<Object> self_reference);
600
601 static Object* CopyCode(Code* code);
602 // Finds the symbol for string in the symbol table.
603 // If not found, a new symbol is added to the table and returned.
604 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
605 // failed.
606 // Please note this function does not perform a garbage collection.
607 static Object* LookupSymbol(Vector<const char> str);
608 static Object* LookupAsciiSymbol(const char* str) {
609 return LookupSymbol(CStrVector(str));
610 }
611 static Object* LookupSymbol(String* str);
612 static bool LookupSymbolIfExists(String* str, String** symbol);
Steve Blockd0582a62009-12-15 09:54:21 +0000613 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
Steve Blocka7e24c12009-10-30 11:49:00 +0000614
615 // Compute the matching symbol map for a string if possible.
616 // NULL is returned if string is in new space or not flattened.
617 static Map* SymbolMapForString(String* str);
618
619 // Converts the given boolean condition to JavaScript boolean value.
620 static Object* ToBoolean(bool condition) {
621 return condition ? true_value() : false_value();
622 }
623
624 // Code that should be run before and after each GC. Includes some
625 // reporting/verification activities when compiled with DEBUG set.
626 static void GarbageCollectionPrologue();
627 static void GarbageCollectionEpilogue();
628
Steve Blocka7e24c12009-10-30 11:49:00 +0000629 // Performs garbage collection operation.
630 // Returns whether required_space bytes are available after the collection.
631 static bool CollectGarbage(int required_space, AllocationSpace space);
632
633 // Performs a full garbage collection. Force compaction if the
634 // parameter is true.
635 static void CollectAllGarbage(bool force_compaction);
636
637 // Performs a full garbage collection if a context has been disposed
638 // since the last time the check was performed.
639 static void CollectAllGarbageIfContextDisposed();
640
641 // Notify the heap that a context has been disposed.
642 static void NotifyContextDisposed();
643
644 // Utility to invoke the scavenger. This is needed in test code to
645 // ensure correct callback for weak global handles.
646 static void PerformScavenge();
647
648#ifdef DEBUG
649 // Utility used with flag gc-greedy.
650 static bool GarbageCollectionGreedyCheck();
651#endif
652
653 static void SetGlobalGCPrologueCallback(GCCallback callback) {
654 global_gc_prologue_callback_ = callback;
655 }
656 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
657 global_gc_epilogue_callback_ = callback;
658 }
659
660 // Heap root getters. We have versions with and without type::cast() here.
661 // You can't use type::cast during GC because the assert fails.
662#define ROOT_ACCESSOR(type, name, camel_name) \
663 static inline type* name() { \
664 return type::cast(roots_[k##camel_name##RootIndex]); \
665 } \
666 static inline type* raw_unchecked_##name() { \
667 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
668 }
669 ROOT_LIST(ROOT_ACCESSOR)
670#undef ROOT_ACCESSOR
671
672// Utility type maps
673#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
674 static inline Map* name##_map() { \
675 return Map::cast(roots_[k##Name##MapRootIndex]); \
676 }
677 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
678#undef STRUCT_MAP_ACCESSOR
679
680#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
681 return String::cast(roots_[k##name##RootIndex]); \
682 }
683 SYMBOL_LIST(SYMBOL_ACCESSOR)
684#undef SYMBOL_ACCESSOR
685
686 // The hidden_symbol is special because it is the empty string, but does
687 // not match the empty string.
688 static String* hidden_symbol() { return hidden_symbol_; }
689
690 // Iterates over all roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000691 static void IterateRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000692 // Iterates over all strong roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000693 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
Leon Clarked91b9f72010-01-27 17:25:45 +0000694 // Iterates over all the other roots in the heap.
695 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000696
697 // Iterates remembered set of an old space.
698 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
699
700 // Iterates a range of remembered set addresses starting with rset_start
701 // corresponding to the range of allocated pointers
702 // [object_start, object_end).
703 // Returns the number of bits that were set.
704 static int IterateRSetRange(Address object_start,
705 Address object_end,
706 Address rset_start,
707 ObjectSlotCallback copy_object_func);
708
709 // Returns whether the object resides in new space.
710 static inline bool InNewSpace(Object* object);
711 static inline bool InFromSpace(Object* object);
712 static inline bool InToSpace(Object* object);
713
714 // Checks whether an address/object in the heap (including auxiliary
715 // area and unused area).
716 static bool Contains(Address addr);
717 static bool Contains(HeapObject* value);
718
719 // Checks whether an address/object in a space.
Steve Blockd0582a62009-12-15 09:54:21 +0000720 // Currently used by tests, serialization and heap verification only.
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 static bool InSpace(Address addr, AllocationSpace space);
722 static bool InSpace(HeapObject* value, AllocationSpace space);
723
724 // Finds out which space an object should get promoted to based on its type.
725 static inline OldSpace* TargetSpace(HeapObject* object);
726 static inline AllocationSpace TargetSpaceId(InstanceType type);
727
728 // Sets the stub_cache_ (only used when expanding the dictionary).
729 static void public_set_code_stubs(NumberDictionary* value) {
730 roots_[kCodeStubsRootIndex] = value;
731 }
732
733 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
734 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
735 roots_[kNonMonomorphicCacheRootIndex] = value;
736 }
737
Andrei Popescu31002712010-02-23 13:46:05 +0000738 static void public_set_empty_script(Script* script) {
739 roots_[kEmptyScriptRootIndex] = script;
740 }
741
Steve Blocka7e24c12009-10-30 11:49:00 +0000742 // Update the next script id.
743 static inline void SetLastScriptId(Object* last_script_id);
744
745 // Generated code can embed this address to get access to the roots.
746 static Object** roots_address() { return roots_; }
747
748#ifdef DEBUG
749 static void Print();
750 static void PrintHandles();
751
752 // Verify the heap is in its normal state before or after a GC.
753 static void Verify();
754
755 // Report heap statistics.
756 static void ReportHeapStatistics(const char* title);
757 static void ReportCodeStatistics(const char* title);
758
759 // Fill in bogus values in from space
760 static void ZapFromSpace();
761#endif
762
763#if defined(ENABLE_LOGGING_AND_PROFILING)
764 // Print short heap statistics.
765 static void PrintShortHeapStatistics();
766#endif
767
768 // Makes a new symbol object
769 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
770 // failed.
771 // Please note this function does not perform a garbage collection.
772 static Object* CreateSymbol(const char* str, int length, int hash);
773 static Object* CreateSymbol(String* str);
774
775 // Write barrier support for address[offset] = o.
776 static inline void RecordWrite(Address address, int offset);
777
778 // Given an address occupied by a live code object, return that object.
779 static Object* FindCodeObject(Address a);
780
781 // Invoke Shrink on shrinkable spaces.
782 static void Shrink();
783
784 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
785 static inline HeapState gc_state() { return gc_state_; }
786
787#ifdef DEBUG
788 static bool IsAllocationAllowed() { return allocation_allowed_; }
789 static inline bool allow_allocation(bool enable);
790
791 static bool disallow_allocation_failure() {
792 return disallow_allocation_failure_;
793 }
794
Leon Clarkee46be812010-01-19 14:06:41 +0000795 static void TracePathToObject(Object* target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000796 static void TracePathToGlobal();
797#endif
798
799 // Callback function passed to Heap::Iterate etc. Copies an object if
800 // necessary, the object might be promoted to an old space. The caller must
801 // ensure the precondition that the object is (a) a heap object and (b) in
802 // the heap's from space.
803 static void ScavengePointer(HeapObject** p);
804 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
805
806 // Clear a range of remembered set addresses corresponding to the object
807 // area address 'start' with size 'size_in_bytes', eg, when adding blocks
808 // to the free list.
809 static void ClearRSetRange(Address start, int size_in_bytes);
810
811 // Rebuild remembered set in old and map spaces.
812 static void RebuildRSets();
813
Leon Clarkee46be812010-01-19 14:06:41 +0000814 // Update an old object's remembered set
815 static int UpdateRSet(HeapObject* obj);
816
Steve Blocka7e24c12009-10-30 11:49:00 +0000817 // Commits from space if it is uncommitted.
818 static void EnsureFromSpaceIsCommitted();
819
Leon Clarkee46be812010-01-19 14:06:41 +0000820 // Support for partial snapshots. After calling this we can allocate a
821 // certain number of bytes using only linear allocation (with a
822 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
823 // or causing a GC. It returns true of space was reserved or false if a GC is
824 // needed. For paged spaces the space requested must include the space wasted
825 // at the end of each page when allocating linearly.
826 static void ReserveSpace(
827 int new_space_size,
828 int pointer_space_size,
829 int data_space_size,
830 int code_space_size,
831 int map_space_size,
832 int cell_space_size,
833 int large_object_size);
834
Steve Blocka7e24c12009-10-30 11:49:00 +0000835 //
836 // Support for the API.
837 //
838
839 static bool CreateApiObjects();
840
841 // Attempt to find the number in a small cache. If we finds it, return
842 // the string representation of the number. Otherwise return undefined.
843 static Object* GetNumberStringCache(Object* number);
844
845 // Update the cache with a new number-string pair.
846 static void SetNumberStringCache(Object* number, String* str);
847
Steve Blocka7e24c12009-10-30 11:49:00 +0000848 // Adjusts the amount of registered external memory.
849 // Returns the adjusted value.
850 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
851
852 // Allocate unitialized fixed array (pretenure == NON_TENURE).
853 static Object* AllocateRawFixedArray(int length);
854
855 // True if we have reached the allocation limit in the old generation that
856 // should force the next GC (caused normally) to be a full one.
857 static bool OldGenerationPromotionLimitReached() {
858 return (PromotedSpaceSize() + PromotedExternalMemorySize())
859 > old_gen_promotion_limit_;
860 }
861
Leon Clarkee46be812010-01-19 14:06:41 +0000862 static intptr_t OldGenerationSpaceAvailable() {
863 return old_gen_allocation_limit_ -
864 (PromotedSpaceSize() + PromotedExternalMemorySize());
865 }
866
Steve Blocka7e24c12009-10-30 11:49:00 +0000867 // True if we have reached the allocation limit in the old generation that
868 // should artificially cause a GC right now.
869 static bool OldGenerationAllocationLimitReached() {
Leon Clarkee46be812010-01-19 14:06:41 +0000870 return OldGenerationSpaceAvailable() < 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000871 }
872
873 // Can be called when the embedding application is idle.
874 static bool IdleNotification();
875
876 // Declare all the root indices.
877 enum RootListIndex {
878#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
879 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
880#undef ROOT_INDEX_DECLARATION
881
882// Utility type maps
883#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
884 STRUCT_LIST(DECLARE_STRUCT_MAP)
885#undef DECLARE_STRUCT_MAP
886
887#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
888 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
889#undef SYMBOL_DECLARATION
890
891 kSymbolTableRootIndex,
892 kStrongRootListLength = kSymbolTableRootIndex,
893 kRootListLength
894 };
895
896 static Object* NumberToString(Object* number);
897
Steve Block3ce2e202009-11-05 08:53:23 +0000898 static Map* MapForExternalArrayType(ExternalArrayType array_type);
899 static RootListIndex RootIndexForExternalArrayType(
900 ExternalArrayType array_type);
901
Steve Blockd0582a62009-12-15 09:54:21 +0000902 static void RecordStats(HeapStats* stats);
903
Steve Blocka7e24c12009-10-30 11:49:00 +0000904 private:
Steve Block3ce2e202009-11-05 08:53:23 +0000905 static int reserved_semispace_size_;
906 static int max_semispace_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000907 static int initial_semispace_size_;
Steve Block3ce2e202009-11-05 08:53:23 +0000908 static int max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000909 static size_t code_range_size_;
910
911 // For keeping track of how much data has survived
912 // scavenge since last new space expansion.
913 static int survived_since_last_expansion_;
914
915 static int always_allocate_scope_depth_;
Steve Blockd0582a62009-12-15 09:54:21 +0000916 static int linear_allocation_scope_depth_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000917 static bool context_disposed_pending_;
918
Steve Blocka7e24c12009-10-30 11:49:00 +0000919#if defined(V8_TARGET_ARCH_X64)
920 static const int kMaxObjectSizeInNewSpace = 512*KB;
921#else
922 static const int kMaxObjectSizeInNewSpace = 256*KB;
923#endif
924
925 static NewSpace new_space_;
926 static OldSpace* old_pointer_space_;
927 static OldSpace* old_data_space_;
928 static OldSpace* code_space_;
929 static MapSpace* map_space_;
930 static CellSpace* cell_space_;
931 static LargeObjectSpace* lo_space_;
932 static HeapState gc_state_;
933
934 // Returns the size of object residing in non new spaces.
935 static int PromotedSpaceSize();
936
937 // Returns the amount of external memory registered since last global gc.
938 static int PromotedExternalMemorySize();
939
940 static int mc_count_; // how many mark-compact collections happened
941 static int gc_count_; // how many gc happened
942
943#define ROOT_ACCESSOR(type, name, camel_name) \
944 static inline void set_##name(type* value) { \
945 roots_[k##camel_name##RootIndex] = value; \
946 }
947 ROOT_LIST(ROOT_ACCESSOR)
948#undef ROOT_ACCESSOR
949
950#ifdef DEBUG
951 static bool allocation_allowed_;
952
953 // If the --gc-interval flag is set to a positive value, this
954 // variable holds the value indicating the number of allocations
955 // remain until the next failure and garbage collection.
956 static int allocation_timeout_;
957
958 // Do we expect to be able to handle allocation failure at this
959 // time?
960 static bool disallow_allocation_failure_;
961#endif // DEBUG
962
963 // Limit that triggers a global GC on the next (normally caused) GC. This
964 // is checked when we have already decided to do a GC to help determine
965 // which collector to invoke.
966 static int old_gen_promotion_limit_;
967
968 // Limit that triggers a global GC as soon as is reasonable. This is
969 // checked before expanding a paged space in the old generation and on
970 // every allocation in large object space.
971 static int old_gen_allocation_limit_;
972
973 // Limit on the amount of externally allocated memory allowed
974 // between global GCs. If reached a global GC is forced.
975 static int external_allocation_limit_;
976
977 // The amount of external memory registered through the API kept alive
978 // by global handles
979 static int amount_of_external_allocated_memory_;
980
981 // Caches the amount of external memory registered at the last global gc.
982 static int amount_of_external_allocated_memory_at_last_global_gc_;
983
984 // Indicates that an allocation has failed in the old generation since the
985 // last GC.
986 static int old_gen_exhausted_;
987
988 static Object* roots_[kRootListLength];
989
990 struct StringTypeTable {
991 InstanceType type;
992 int size;
993 RootListIndex index;
994 };
995
996 struct ConstantSymbolTable {
997 const char* contents;
998 RootListIndex index;
999 };
1000
1001 struct StructTable {
1002 InstanceType type;
1003 int size;
1004 RootListIndex index;
1005 };
1006
1007 static const StringTypeTable string_type_table[];
1008 static const ConstantSymbolTable constant_symbol_table[];
1009 static const StructTable struct_table[];
1010
1011 // The special hidden symbol which is an empty string, but does not match
1012 // any string when looked up in properties.
1013 static String* hidden_symbol_;
1014
1015 // GC callback function, called before and after mark-compact GC.
1016 // Allocations in the callback function are disallowed.
1017 static GCCallback global_gc_prologue_callback_;
1018 static GCCallback global_gc_epilogue_callback_;
1019
1020 // Checks whether a global GC is necessary
1021 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1022
1023 // Performs garbage collection
1024 static void PerformGarbageCollection(AllocationSpace space,
1025 GarbageCollector collector,
1026 GCTracer* tracer);
1027
1028 // Returns either a Smi or a Number object from 'value'. If 'new_object'
1029 // is false, it may return a preallocated immutable object.
1030 static Object* SmiOrNumberFromDouble(double value,
1031 bool new_object,
1032 PretenureFlag pretenure = NOT_TENURED);
1033
1034 // Allocate an uninitialized object in map space. The behavior is identical
1035 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1036 // have to test the allocation space argument and (b) can reduce code size
1037 // (since both AllocateRaw and AllocateRawMap are inlined).
1038 static inline Object* AllocateRawMap();
1039
1040 // Allocate an uninitialized object in the global property cell space.
1041 static inline Object* AllocateRawCell();
1042
1043 // Initializes a JSObject based on its map.
1044 static void InitializeJSObjectFromMap(JSObject* obj,
1045 FixedArray* properties,
1046 Map* map);
1047
1048 static bool CreateInitialMaps();
1049 static bool CreateInitialObjects();
1050
1051 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1052 // that assigns wrong vtable entries.
1053 static void CreateCEntryStub();
1054 static void CreateCEntryDebugBreakStub();
1055 static void CreateJSEntryStub();
1056 static void CreateJSConstructEntryStub();
1057 static void CreateRegExpCEntryStub();
1058
1059 static void CreateFixedStubs();
1060
1061 static Object* CreateOddball(Map* map,
1062 const char* to_string,
1063 Object* to_number);
1064
1065 // Allocate empty fixed array.
1066 static Object* AllocateEmptyFixedArray();
1067
1068 // Performs a minor collection in new generation.
1069 static void Scavenge();
Leon Clarkee46be812010-01-19 14:06:41 +00001070 static void ScavengeExternalStringTable();
1071 static Address DoScavenge(ObjectVisitor* scavenge_visitor,
1072 Address new_space_front);
Steve Blocka7e24c12009-10-30 11:49:00 +00001073
1074 // Performs a major collection in the whole heap.
1075 static void MarkCompact(GCTracer* tracer);
1076
1077 // Code to be run before and after mark-compact.
1078 static void MarkCompactPrologue(bool is_compacting);
1079 static void MarkCompactEpilogue(bool is_compacting);
1080
1081 // Helper function used by CopyObject to copy a source object to an
1082 // allocated target object and update the forwarding pointer in the source
1083 // object. Returns the target object.
Leon Clarkee46be812010-01-19 14:06:41 +00001084 static inline HeapObject* MigrateObject(HeapObject* source,
1085 HeapObject* target,
1086 int size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001087
1088 // Helper function that governs the promotion policy from new space to
1089 // old. If the object's old address lies below the new space's age
1090 // mark or if we've already filled the bottom 1/16th of the to space,
1091 // we try to promote this object.
1092 static inline bool ShouldBePromoted(Address old_address, int object_size);
1093#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1094 // Record the copy of an object in the NewSpace's statistics.
1095 static void RecordCopiedObject(HeapObject* obj);
1096
1097 // Record statistics before and after garbage collection.
1098 static void ReportStatisticsBeforeGC();
1099 static void ReportStatisticsAfterGC();
1100#endif
1101
Steve Blocka7e24c12009-10-30 11:49:00 +00001102 // Rebuild remembered set in an old space.
1103 static void RebuildRSets(PagedSpace* space);
1104
1105 // Rebuild remembered set in the large object space.
1106 static void RebuildRSets(LargeObjectSpace* space);
1107
1108 // Slow part of scavenge object.
1109 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1110
1111 // Copy memory from src to dst.
1112 static inline void CopyBlock(Object** dst, Object** src, int byte_size);
1113
1114 // Initializes a function with a shared part and prototype.
1115 // Returns the function.
1116 // Note: this code was factored out of AllocateFunction such that
1117 // other parts of the VM could use it. Specifically, a function that creates
1118 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1119 // Please note this does not perform a garbage collection.
1120 static inline Object* InitializeFunction(JSFunction* function,
1121 SharedFunctionInfo* shared,
1122 Object* prototype);
1123
Leon Clarkee46be812010-01-19 14:06:41 +00001124
1125 // Initializes the number to string cache based on the max semispace size.
1126 static Object* InitializeNumberStringCache();
1127 // Flush the number to string cache.
1128 static void FlushNumberStringCache();
1129
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 static const int kInitialSymbolTableSize = 2048;
1131 static const int kInitialEvalCacheSize = 64;
1132
1133 friend class Factory;
1134 friend class DisallowAllocationFailure;
1135 friend class AlwaysAllocateScope;
Steve Blockd0582a62009-12-15 09:54:21 +00001136 friend class LinearAllocationScope;
1137};
1138
1139
1140class HeapStats {
1141 public:
1142 int *start_marker;
1143 int *new_space_size;
1144 int *new_space_capacity;
1145 int *old_pointer_space_size;
1146 int *old_pointer_space_capacity;
1147 int *old_data_space_size;
1148 int *old_data_space_capacity;
1149 int *code_space_size;
1150 int *code_space_capacity;
1151 int *map_space_size;
1152 int *map_space_capacity;
1153 int *cell_space_size;
1154 int *cell_space_capacity;
1155 int *lo_space_size;
1156 int *global_handle_count;
1157 int *weak_global_handle_count;
1158 int *pending_global_handle_count;
1159 int *near_death_global_handle_count;
1160 int *destroyed_global_handle_count;
1161 int *end_marker;
Steve Blocka7e24c12009-10-30 11:49:00 +00001162};
1163
1164
1165class AlwaysAllocateScope {
1166 public:
1167 AlwaysAllocateScope() {
1168 // We shouldn't hit any nested scopes, because that requires
1169 // non-handle code to call handle code. The code still works but
1170 // performance will degrade, so we want to catch this situation
1171 // in debug mode.
1172 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1173 Heap::always_allocate_scope_depth_++;
1174 }
1175
1176 ~AlwaysAllocateScope() {
1177 Heap::always_allocate_scope_depth_--;
1178 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1179 }
1180};
1181
1182
Steve Blockd0582a62009-12-15 09:54:21 +00001183class LinearAllocationScope {
1184 public:
1185 LinearAllocationScope() {
1186 Heap::linear_allocation_scope_depth_++;
1187 }
1188
1189 ~LinearAllocationScope() {
1190 Heap::linear_allocation_scope_depth_--;
1191 ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
1192 }
1193};
1194
1195
Steve Blocka7e24c12009-10-30 11:49:00 +00001196#ifdef DEBUG
1197// Visitor class to verify interior pointers that do not have remembered set
1198// bits. All heap object pointers have to point into the heap to a location
1199// that has a map pointer at its first word. Caveat: Heap::Contains is an
1200// approximation because it can return true for objects in a heap space but
1201// above the allocation pointer.
1202class VerifyPointersVisitor: public ObjectVisitor {
1203 public:
1204 void VisitPointers(Object** start, Object** end) {
1205 for (Object** current = start; current < end; current++) {
1206 if ((*current)->IsHeapObject()) {
1207 HeapObject* object = HeapObject::cast(*current);
1208 ASSERT(Heap::Contains(object));
1209 ASSERT(object->map()->IsMap());
1210 }
1211 }
1212 }
1213};
1214
1215
1216// Visitor class to verify interior pointers that have remembered set bits.
1217// As VerifyPointersVisitor but also checks that remembered set bits are
1218// always set for pointers into new space.
1219class VerifyPointersAndRSetVisitor: public ObjectVisitor {
1220 public:
1221 void VisitPointers(Object** start, Object** end) {
1222 for (Object** current = start; current < end; current++) {
1223 if ((*current)->IsHeapObject()) {
1224 HeapObject* object = HeapObject::cast(*current);
1225 ASSERT(Heap::Contains(object));
1226 ASSERT(object->map()->IsMap());
1227 if (Heap::InNewSpace(object)) {
1228 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
1229 }
1230 }
1231 }
1232 }
1233};
1234#endif
1235
1236
1237// Space iterator for iterating over all spaces of the heap.
1238// Returns each space in turn, and null when it is done.
1239class AllSpaces BASE_EMBEDDED {
1240 public:
1241 Space* next();
1242 AllSpaces() { counter_ = FIRST_SPACE; }
1243 private:
1244 int counter_;
1245};
1246
1247
1248// Space iterator for iterating over all old spaces of the heap: Old pointer
1249// space, old data space and code space.
1250// Returns each space in turn, and null when it is done.
1251class OldSpaces BASE_EMBEDDED {
1252 public:
1253 OldSpace* next();
1254 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1255 private:
1256 int counter_;
1257};
1258
1259
1260// Space iterator for iterating over all the paged spaces of the heap:
Leon Clarkee46be812010-01-19 14:06:41 +00001261// Map space, old pointer space, old data space, code space and cell space.
Steve Blocka7e24c12009-10-30 11:49:00 +00001262// Returns each space in turn, and null when it is done.
1263class PagedSpaces BASE_EMBEDDED {
1264 public:
1265 PagedSpace* next();
1266 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1267 private:
1268 int counter_;
1269};
1270
1271
1272// Space iterator for iterating over all spaces of the heap.
1273// For each space an object iterator is provided. The deallocation of the
1274// returned object iterators is handled by the space iterator.
1275class SpaceIterator : public Malloced {
1276 public:
1277 SpaceIterator();
1278 virtual ~SpaceIterator();
1279
1280 bool has_next();
1281 ObjectIterator* next();
1282
1283 private:
1284 ObjectIterator* CreateIterator();
1285
1286 int current_space_; // from enum AllocationSpace.
1287 ObjectIterator* iterator_; // object iterator for the current space.
1288};
1289
1290
1291// A HeapIterator provides iteration over the whole heap It aggregates a the
1292// specific iterators for the different spaces as these can only iterate over
1293// one space only.
1294
1295class HeapIterator BASE_EMBEDDED {
1296 public:
1297 explicit HeapIterator();
1298 virtual ~HeapIterator();
1299
Steve Blocka7e24c12009-10-30 11:49:00 +00001300 HeapObject* next();
1301 void reset();
1302
1303 private:
1304 // Perform the initialization.
1305 void Init();
1306
1307 // Perform all necessary shutdown (destruction) work.
1308 void Shutdown();
1309
1310 // Space iterator for iterating all the spaces.
1311 SpaceIterator* space_iterator_;
1312 // Object iterator for the space currently being iterated.
1313 ObjectIterator* object_iterator_;
1314};
1315
1316
1317// Cache for mapping (map, property name) into field offset.
1318// Cleared at startup and prior to mark sweep collection.
1319class KeyedLookupCache {
1320 public:
1321 // Lookup field offset for (map, name). If absent, -1 is returned.
1322 static int Lookup(Map* map, String* name);
1323
1324 // Update an element in the cache.
1325 static void Update(Map* map, String* name, int field_offset);
1326
1327 // Clear the cache.
1328 static void Clear();
Leon Clarkee46be812010-01-19 14:06:41 +00001329
1330 static const int kLength = 64;
1331 static const int kCapacityMask = kLength - 1;
1332 static const int kMapHashShift = 2;
1333
Steve Blocka7e24c12009-10-30 11:49:00 +00001334 private:
1335 static inline int Hash(Map* map, String* name);
Leon Clarkee46be812010-01-19 14:06:41 +00001336
1337 // Get the address of the keys and field_offsets arrays. Used in
1338 // generated code to perform cache lookups.
1339 static Address keys_address() {
1340 return reinterpret_cast<Address>(&keys_);
1341 }
1342
1343 static Address field_offsets_address() {
1344 return reinterpret_cast<Address>(&field_offsets_);
1345 }
1346
Steve Blocka7e24c12009-10-30 11:49:00 +00001347 struct Key {
1348 Map* map;
1349 String* name;
1350 };
1351 static Key keys_[kLength];
1352 static int field_offsets_[kLength];
Steve Blocka7e24c12009-10-30 11:49:00 +00001353
Leon Clarkee46be812010-01-19 14:06:41 +00001354 friend class ExternalReference;
1355};
Steve Blocka7e24c12009-10-30 11:49:00 +00001356
1357
1358// Cache for mapping (array, property name) into descriptor index.
1359// The cache contains both positive and negative results.
1360// Descriptor index equals kNotFound means the property is absent.
1361// Cleared at startup and prior to any gc.
1362class DescriptorLookupCache {
1363 public:
1364 // Lookup descriptor index for (map, name).
1365 // If absent, kAbsent is returned.
1366 static int Lookup(DescriptorArray* array, String* name) {
1367 if (!StringShape(name).IsSymbol()) return kAbsent;
1368 int index = Hash(array, name);
1369 Key& key = keys_[index];
1370 if ((key.array == array) && (key.name == name)) return results_[index];
1371 return kAbsent;
1372 }
1373
1374 // Update an element in the cache.
1375 static void Update(DescriptorArray* array, String* name, int result) {
1376 ASSERT(result != kAbsent);
1377 if (StringShape(name).IsSymbol()) {
1378 int index = Hash(array, name);
1379 Key& key = keys_[index];
1380 key.array = array;
1381 key.name = name;
1382 results_[index] = result;
1383 }
1384 }
1385
1386 // Clear the cache.
1387 static void Clear();
1388
1389 static const int kAbsent = -2;
1390 private:
1391 static int Hash(DescriptorArray* array, String* name) {
1392 // Uses only lower 32 bits if pointers are larger.
1393 uintptr_t array_hash =
1394 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1395 uintptr_t name_hash =
1396 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1397 return (array_hash ^ name_hash) % kLength;
1398 }
1399
1400 static const int kLength = 64;
1401 struct Key {
1402 DescriptorArray* array;
1403 String* name;
1404 };
1405
1406 static Key keys_[kLength];
1407 static int results_[kLength];
1408};
1409
1410
1411// ----------------------------------------------------------------------------
1412// Marking stack for tracing live objects.
1413
1414class MarkingStack {
1415 public:
1416 void Initialize(Address low, Address high) {
1417 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1418 high_ = reinterpret_cast<HeapObject**>(high);
1419 overflowed_ = false;
1420 }
1421
1422 bool is_full() { return top_ >= high_; }
1423
1424 bool is_empty() { return top_ <= low_; }
1425
1426 bool overflowed() { return overflowed_; }
1427
1428 void clear_overflowed() { overflowed_ = false; }
1429
1430 // Push the (marked) object on the marking stack if there is room,
1431 // otherwise mark the object as overflowed and wait for a rescan of the
1432 // heap.
1433 void Push(HeapObject* object) {
1434 CHECK(object->IsHeapObject());
1435 if (is_full()) {
1436 object->SetOverflow();
1437 overflowed_ = true;
1438 } else {
1439 *(top_++) = object;
1440 }
1441 }
1442
1443 HeapObject* Pop() {
1444 ASSERT(!is_empty());
1445 HeapObject* object = *(--top_);
1446 CHECK(object->IsHeapObject());
1447 return object;
1448 }
1449
1450 private:
1451 HeapObject** low_;
1452 HeapObject** top_;
1453 HeapObject** high_;
1454 bool overflowed_;
1455};
1456
1457
1458// A helper class to document/test C++ scopes where we do not
1459// expect a GC. Usage:
1460//
1461// /* Allocation not allowed: we cannot handle a GC in this scope. */
1462// { AssertNoAllocation nogc;
1463// ...
1464// }
1465
1466#ifdef DEBUG
1467
1468class DisallowAllocationFailure {
1469 public:
1470 DisallowAllocationFailure() {
1471 old_state_ = Heap::disallow_allocation_failure_;
1472 Heap::disallow_allocation_failure_ = true;
1473 }
1474 ~DisallowAllocationFailure() {
1475 Heap::disallow_allocation_failure_ = old_state_;
1476 }
1477 private:
1478 bool old_state_;
1479};
1480
1481class AssertNoAllocation {
1482 public:
1483 AssertNoAllocation() {
1484 old_state_ = Heap::allow_allocation(false);
1485 }
1486
1487 ~AssertNoAllocation() {
1488 Heap::allow_allocation(old_state_);
1489 }
1490
1491 private:
1492 bool old_state_;
1493};
1494
1495class DisableAssertNoAllocation {
1496 public:
1497 DisableAssertNoAllocation() {
1498 old_state_ = Heap::allow_allocation(true);
1499 }
1500
1501 ~DisableAssertNoAllocation() {
1502 Heap::allow_allocation(old_state_);
1503 }
1504
1505 private:
1506 bool old_state_;
1507};
1508
1509#else // ndef DEBUG
1510
1511class AssertNoAllocation {
1512 public:
1513 AssertNoAllocation() { }
1514 ~AssertNoAllocation() { }
1515};
1516
1517class DisableAssertNoAllocation {
1518 public:
1519 DisableAssertNoAllocation() { }
1520 ~DisableAssertNoAllocation() { }
1521};
1522
1523#endif
1524
1525// GCTracer collects and prints ONE line after each garbage collector
1526// invocation IFF --trace_gc is used.
1527
1528class GCTracer BASE_EMBEDDED {
1529 public:
1530 GCTracer();
1531
1532 ~GCTracer();
1533
1534 // Sets the collector.
1535 void set_collector(GarbageCollector collector) { collector_ = collector; }
1536
1537 // Sets the GC count.
1538 void set_gc_count(int count) { gc_count_ = count; }
1539
1540 // Sets the full GC count.
1541 void set_full_gc_count(int count) { full_gc_count_ = count; }
1542
1543 // Sets the flag that this is a compacting full GC.
1544 void set_is_compacting() { is_compacting_ = true; }
1545
1546 // Increment and decrement the count of marked objects.
1547 void increment_marked_count() { ++marked_count_; }
1548 void decrement_marked_count() { --marked_count_; }
1549
1550 int marked_count() { return marked_count_; }
1551
1552 private:
1553 // Returns a string matching the collector.
1554 const char* CollectorString();
1555
1556 // Returns size of object in heap (in MB).
1557 double SizeOfHeapObjects() {
1558 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1559 }
1560
1561 double start_time_; // Timestamp set in the constructor.
1562 double start_size_; // Size of objects in heap set in constructor.
1563 GarbageCollector collector_; // Type of collector.
1564
1565 // A count (including this one, eg, the first collection is 1) of the
1566 // number of garbage collections.
1567 int gc_count_;
1568
1569 // A count (including this one) of the number of full garbage collections.
1570 int full_gc_count_;
1571
1572 // True if the current GC is a compacting full collection, false
1573 // otherwise.
1574 bool is_compacting_;
1575
1576 // True if the *previous* full GC cwas a compacting collection (will be
1577 // false if there has not been a previous full GC).
1578 bool previous_has_compacted_;
1579
1580 // On a full GC, a count of the number of marked objects. Incremented
1581 // when an object is marked and decremented when an object's mark bit is
1582 // cleared. Will be zero on a scavenge collection.
1583 int marked_count_;
1584
1585 // The count from the end of the previous full GC. Will be zero if there
1586 // was no previous full GC.
1587 int previous_marked_count_;
1588};
1589
1590
1591class TranscendentalCache {
1592 public:
1593 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1594
1595 explicit TranscendentalCache(Type t);
1596
1597 // Returns a heap number with f(input), where f is a math function specified
1598 // by the 'type' argument.
1599 static inline Object* Get(Type type, double input) {
1600 TranscendentalCache* cache = caches_[type];
1601 if (cache == NULL) {
1602 caches_[type] = cache = new TranscendentalCache(type);
1603 }
1604 return cache->Get(input);
1605 }
1606
1607 // The cache contains raw Object pointers. This method disposes of
1608 // them before a garbage collection.
1609 static void Clear();
1610
1611 private:
1612 inline Object* Get(double input) {
1613 Converter c;
1614 c.dbl = input;
1615 int hash = Hash(c);
1616 Element e = elements_[hash];
1617 if (e.in[0] == c.integers[0] &&
1618 e.in[1] == c.integers[1]) {
1619 ASSERT(e.output != NULL);
1620 return e.output;
1621 }
1622 double answer = Calculate(input);
1623 Object* heap_number = Heap::AllocateHeapNumber(answer);
1624 if (!heap_number->IsFailure()) {
1625 elements_[hash].in[0] = c.integers[0];
1626 elements_[hash].in[1] = c.integers[1];
1627 elements_[hash].output = heap_number;
1628 }
1629 return heap_number;
1630 }
1631
1632 inline double Calculate(double input) {
1633 switch (type_) {
1634 case ACOS:
1635 return acos(input);
1636 case ASIN:
1637 return asin(input);
1638 case ATAN:
1639 return atan(input);
1640 case COS:
1641 return cos(input);
1642 case EXP:
1643 return exp(input);
1644 case LOG:
1645 return log(input);
1646 case SIN:
1647 return sin(input);
1648 case TAN:
1649 return tan(input);
1650 default:
1651 return 0.0; // Never happens.
1652 }
1653 }
1654 static const int kCacheSize = 512;
1655 struct Element {
1656 uint32_t in[2];
1657 Object* output;
1658 };
1659 union Converter {
1660 double dbl;
1661 uint32_t integers[2];
1662 };
1663 inline static int Hash(const Converter& c) {
1664 uint32_t hash = (c.integers[0] ^ c.integers[1]);
1665 hash ^= hash >> 16;
1666 hash ^= hash >> 8;
1667 return (hash & (kCacheSize - 1));
1668 }
1669 static TranscendentalCache* caches_[kNumberOfCaches];
1670 Element elements_[kCacheSize];
1671 Type type_;
1672};
1673
1674
Leon Clarkee46be812010-01-19 14:06:41 +00001675// External strings table is a place where all external strings are
1676// registered. We need to keep track of such strings to properly
1677// finalize them.
1678class ExternalStringTable : public AllStatic {
1679 public:
1680 // Registers an external string.
1681 inline static void AddString(String* string);
1682
1683 inline static void Iterate(ObjectVisitor* v);
1684
1685 // Restores internal invariant and gets rid of collected strings.
1686 // Must be called after each Iterate() that modified the strings.
1687 static void CleanUp();
1688
1689 // Destroys all allocated memory.
1690 static void TearDown();
1691
1692 private:
1693 friend class Heap;
1694
1695 inline static void Verify();
1696
1697 inline static void AddOldString(String* string);
1698
1699 // Notifies the table that only a prefix of the new list is valid.
1700 inline static void ShrinkNewStrings(int position);
1701
1702 // To speed up scavenge collections new space string are kept
1703 // separate from old space strings.
1704 static List<Object*> new_space_strings_;
1705 static List<Object*> old_space_strings_;
1706};
1707
Steve Blocka7e24c12009-10-30 11:49:00 +00001708} } // namespace v8::internal
1709
1710#endif // V8_HEAP_H_