blob: 0dd20c086c8bfe27c327c16c3671079669892471 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
33#include "zone-inl.h"
34
35
36namespace v8 {
37namespace internal {
38
39// Defines all the roots in Heap.
40#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
Steve Blockd0582a62009-12-15 09:54:21 +000041 /* Put the byte array map early. We need it to be in place by the time */ \
42 /* the deserializer hits the next page, since it wants to put a byte */ \
43 /* array in the unused space at the end of the page. */ \
44 V(Map, byte_array_map, ByteArrayMap) \
45 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
46 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
47 /* Cluster the most popular ones in a few cache lines here at the top. */ \
Steve Blocka7e24c12009-10-30 11:49:00 +000048 V(Smi, stack_limit, StackLimit) \
49 V(Object, undefined_value, UndefinedValue) \
50 V(Object, the_hole_value, TheHoleValue) \
51 V(Object, null_value, NullValue) \
52 V(Object, true_value, TrueValue) \
53 V(Object, false_value, FalseValue) \
54 V(Map, heap_number_map, HeapNumberMap) \
55 V(Map, global_context_map, GlobalContextMap) \
56 V(Map, fixed_array_map, FixedArrayMap) \
57 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
58 V(Map, meta_map, MetaMap) \
59 V(Object, termination_exception, TerminationException) \
60 V(Map, hash_table_map, HashTableMap) \
61 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
Steve Blockd0582a62009-12-15 09:54:21 +000062 V(Map, string_map, StringMap) \
63 V(Map, ascii_string_map, AsciiStringMap) \
64 V(Map, symbol_map, SymbolMap) \
65 V(Map, ascii_symbol_map, AsciiSymbolMap) \
66 V(Map, cons_symbol_map, ConsSymbolMap) \
67 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
68 V(Map, external_symbol_map, ExternalSymbolMap) \
69 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
70 V(Map, cons_string_map, ConsStringMap) \
71 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
72 V(Map, external_string_map, ExternalStringMap) \
73 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
74 V(Map, undetectable_string_map, UndetectableStringMap) \
75 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000076 V(Map, pixel_array_map, PixelArrayMap) \
Steve Block3ce2e202009-11-05 08:53:23 +000077 V(Map, external_byte_array_map, ExternalByteArrayMap) \
78 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
79 V(Map, external_short_array_map, ExternalShortArrayMap) \
80 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
81 V(Map, external_int_array_map, ExternalIntArrayMap) \
82 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
83 V(Map, external_float_array_map, ExternalFloatArrayMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000084 V(Map, context_map, ContextMap) \
85 V(Map, catch_context_map, CatchContextMap) \
86 V(Map, code_map, CodeMap) \
87 V(Map, oddball_map, OddballMap) \
88 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
89 V(Map, boilerplate_function_map, BoilerplateFunctionMap) \
90 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
91 V(Map, proxy_map, ProxyMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000092 V(Object, nan_value, NanValue) \
93 V(Object, minus_zero_value, MinusZeroValue) \
94 V(String, empty_string, EmptyString) \
95 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
96 V(Map, neander_map, NeanderMap) \
97 V(JSObject, message_listeners, MessageListeners) \
98 V(Proxy, prototype_accessors, PrototypeAccessors) \
99 V(NumberDictionary, code_stubs, CodeStubs) \
100 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
101 V(Code, js_entry_code, JsEntryCode) \
102 V(Code, js_construct_entry_code, JsConstructEntryCode) \
103 V(Code, c_entry_code, CEntryCode) \
104 V(Code, c_entry_debug_break_code, CEntryDebugBreakCode) \
105 V(FixedArray, number_string_cache, NumberStringCache) \
106 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
107 V(FixedArray, natives_source_cache, NativesSourceCache) \
108 V(Object, last_script_id, LastScriptId) \
Steve Blockd0582a62009-12-15 09:54:21 +0000109 V(Smi, real_stack_limit, RealStackLimit) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000110
111#if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
112#define STRONG_ROOT_LIST(V) \
113 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
114 V(Code, re_c_entry_code, RegExpCEntryCode)
115#else
116#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
117#endif
118
119#define ROOT_LIST(V) \
120 STRONG_ROOT_LIST(V) \
121 V(SymbolTable, symbol_table, SymbolTable)
122
123#define SYMBOL_LIST(V) \
124 V(Array_symbol, "Array") \
125 V(Object_symbol, "Object") \
126 V(Proto_symbol, "__proto__") \
127 V(StringImpl_symbol, "StringImpl") \
128 V(arguments_symbol, "arguments") \
129 V(Arguments_symbol, "Arguments") \
130 V(arguments_shadow_symbol, ".arguments") \
131 V(call_symbol, "call") \
132 V(apply_symbol, "apply") \
133 V(caller_symbol, "caller") \
134 V(boolean_symbol, "boolean") \
135 V(Boolean_symbol, "Boolean") \
136 V(callee_symbol, "callee") \
137 V(constructor_symbol, "constructor") \
138 V(code_symbol, ".code") \
139 V(result_symbol, ".result") \
140 V(catch_var_symbol, ".catch-var") \
141 V(empty_symbol, "") \
142 V(eval_symbol, "eval") \
143 V(function_symbol, "function") \
144 V(length_symbol, "length") \
145 V(name_symbol, "name") \
146 V(number_symbol, "number") \
147 V(Number_symbol, "Number") \
148 V(RegExp_symbol, "RegExp") \
149 V(object_symbol, "object") \
150 V(prototype_symbol, "prototype") \
151 V(string_symbol, "string") \
152 V(String_symbol, "String") \
153 V(Date_symbol, "Date") \
154 V(this_symbol, "this") \
155 V(to_string_symbol, "toString") \
156 V(char_at_symbol, "CharAt") \
157 V(undefined_symbol, "undefined") \
158 V(value_of_symbol, "valueOf") \
159 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
160 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
161 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
162 V(illegal_access_symbol, "illegal access") \
163 V(out_of_memory_symbol, "out-of-memory") \
164 V(illegal_execution_state_symbol, "illegal execution state") \
165 V(get_symbol, "get") \
166 V(set_symbol, "set") \
167 V(function_class_symbol, "Function") \
168 V(illegal_argument_symbol, "illegal argument") \
169 V(MakeReferenceError_symbol, "MakeReferenceError") \
170 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
171 V(MakeTypeError_symbol, "MakeTypeError") \
172 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
173 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
174 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
175 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
176 V(illegal_return_symbol, "illegal_return") \
177 V(illegal_break_symbol, "illegal_break") \
178 V(illegal_continue_symbol, "illegal_continue") \
179 V(unknown_label_symbol, "unknown_label") \
180 V(redeclaration_symbol, "redeclaration") \
181 V(failure_symbol, "<failure>") \
182 V(space_symbol, " ") \
183 V(exec_symbol, "exec") \
184 V(zero_symbol, "0") \
185 V(global_eval_symbol, "GlobalEval") \
Steve Blockd0582a62009-12-15 09:54:21 +0000186 V(identity_hash_symbol, "v8::IdentityHash") \
187 V(closure_symbol, "(closure)")
Steve Blocka7e24c12009-10-30 11:49:00 +0000188
189
190// Forward declaration of the GCTracer class.
191class GCTracer;
Steve Blockd0582a62009-12-15 09:54:21 +0000192class HeapStats;
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
194
195// The all static Heap captures the interface to the global object heap.
196// All JavaScript contexts by this process share the same object heap.
197
198class Heap : public AllStatic {
199 public:
200 // Configure heap size before setup. Return false if the heap has been
201 // setup already.
Steve Block3ce2e202009-11-05 08:53:23 +0000202 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 static bool ConfigureHeapDefault();
204
205 // Initializes the global object heap. If create_heap_objects is true,
206 // also creates the basic non-mutable objects.
207 // Returns whether it succeeded.
208 static bool Setup(bool create_heap_objects);
209
210 // Destroys all memory allocated by the heap.
211 static void TearDown();
212
Steve Blockd0582a62009-12-15 09:54:21 +0000213 // Set the stack limit in the roots_ array. Some architectures generate
214 // code that looks here, because it is faster than loading from the static
215 // jslimit_/real_jslimit_ variable in the StackGuard.
216 static void SetStackLimits();
Steve Blocka7e24c12009-10-30 11:49:00 +0000217
218 // Returns whether Setup has been called.
219 static bool HasBeenSetup();
220
Steve Block3ce2e202009-11-05 08:53:23 +0000221 // Returns the maximum amount of memory reserved for the heap. For
222 // the young generation, we reserve 4 times the amount needed for a
223 // semi space. The young generation consists of two semi spaces and
224 // we reserve twice the amount needed for those in order to ensure
225 // that new space can be aligned to its size.
226 static int MaxReserved() {
227 return 4 * reserved_semispace_size_ + max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000228 }
Steve Block3ce2e202009-11-05 08:53:23 +0000229 static int MaxSemiSpaceSize() { return max_semispace_size_; }
230 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000231 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
Steve Block3ce2e202009-11-05 08:53:23 +0000232 static int MaxOldGenerationSize() { return max_old_generation_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000233
234 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
235 // more spaces are needed until it reaches the limit.
236 static int Capacity();
237
Steve Block3ce2e202009-11-05 08:53:23 +0000238 // Returns the amount of memory currently committed for the heap.
239 static int CommittedMemory();
240
Steve Blocka7e24c12009-10-30 11:49:00 +0000241 // Returns the available bytes in space w/o growing.
242 // Heap doesn't guarantee that it can allocate an object that requires
243 // all available bytes. Check MaxHeapObjectSize() instead.
244 static int Available();
245
246 // Returns the maximum object size in paged space.
247 static inline int MaxObjectSizeInPagedSpace();
248
249 // Returns of size of all objects residing in the heap.
250 static int SizeOfObjects();
251
252 // Return the starting address and a mask for the new space. And-masking an
253 // address with the mask will result in the start address of the new space
254 // for all addresses in either semispace.
255 static Address NewSpaceStart() { return new_space_.start(); }
256 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
257 static Address NewSpaceTop() { return new_space_.top(); }
258
259 static NewSpace* new_space() { return &new_space_; }
260 static OldSpace* old_pointer_space() { return old_pointer_space_; }
261 static OldSpace* old_data_space() { return old_data_space_; }
262 static OldSpace* code_space() { return code_space_; }
263 static MapSpace* map_space() { return map_space_; }
264 static CellSpace* cell_space() { return cell_space_; }
265 static LargeObjectSpace* lo_space() { return lo_space_; }
266
267 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
268 static Address always_allocate_scope_depth_address() {
269 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
270 }
Steve Blockd0582a62009-12-15 09:54:21 +0000271 static bool linear_allocation() {
Leon Clarkee46be812010-01-19 14:06:41 +0000272 return linear_allocation_scope_depth_ != 0;
Steve Blockd0582a62009-12-15 09:54:21 +0000273 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000274
275 static Address* NewSpaceAllocationTopAddress() {
276 return new_space_.allocation_top_address();
277 }
278 static Address* NewSpaceAllocationLimitAddress() {
279 return new_space_.allocation_limit_address();
280 }
281
282 // Uncommit unused semi space.
283 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
284
285#ifdef ENABLE_HEAP_PROTECTION
286 // Protect/unprotect the heap by marking all spaces read-only/writable.
287 static void Protect();
288 static void Unprotect();
289#endif
290
291 // Allocates and initializes a new JavaScript object based on a
292 // constructor.
293 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
294 // failed.
295 // Please note this does not perform a garbage collection.
296 static Object* AllocateJSObject(JSFunction* constructor,
297 PretenureFlag pretenure = NOT_TENURED);
298
299 // Allocates and initializes a new global object based on a constructor.
300 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
301 // failed.
302 // Please note this does not perform a garbage collection.
303 static Object* AllocateGlobalObject(JSFunction* constructor);
304
305 // Returns a deep copy of the JavaScript object.
306 // Properties and elements are copied too.
307 // Returns failure if allocation failed.
308 static Object* CopyJSObject(JSObject* source);
309
310 // Allocates the function prototype.
311 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
312 // failed.
313 // Please note this does not perform a garbage collection.
314 static Object* AllocateFunctionPrototype(JSFunction* function);
315
316 // Reinitialize an JSGlobalProxy based on a constructor. The object
317 // must have the same size as objects allocated using the
318 // constructor. The object is reinitialized and behaves as an
319 // object that has been freshly allocated using the constructor.
320 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
321 JSGlobalProxy* global);
322
323 // Allocates and initializes a new JavaScript object based on a map.
324 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
325 // failed.
326 // Please note this does not perform a garbage collection.
327 static Object* AllocateJSObjectFromMap(Map* map,
328 PretenureFlag pretenure = NOT_TENURED);
329
330 // Allocates a heap object based on the map.
331 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
332 // failed.
333 // Please note this function does not perform a garbage collection.
334 static Object* Allocate(Map* map, AllocationSpace space);
335
336 // Allocates a JS Map in the heap.
337 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
338 // failed.
339 // Please note this function does not perform a garbage collection.
340 static Object* AllocateMap(InstanceType instance_type, int instance_size);
341
342 // Allocates a partial map for bootstrapping.
343 static Object* AllocatePartialMap(InstanceType instance_type,
344 int instance_size);
345
346 // Allocate a map for the specified function
347 static Object* AllocateInitialMap(JSFunction* fun);
348
349 // Allocates and fully initializes a String. There are two String
350 // encodings: ASCII and two byte. One should choose between the three string
351 // allocation functions based on the encoding of the string buffer used to
352 // initialized the string.
353 // - ...FromAscii initializes the string from a buffer that is ASCII
354 // encoded (it does not check that the buffer is ASCII encoded) and the
355 // result will be ASCII encoded.
356 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
357 // encoded. If the characters are all single-byte characters, the
358 // result will be ASCII encoded, otherwise it will converted to two
359 // byte.
360 // - ...FromTwoByte initializes the string from a buffer that is two-byte
361 // encoded. If the characters are all single-byte characters, the
362 // result will be converted to ASCII, otherwise it will be left as
363 // two-byte.
364 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
365 // failed.
366 // Please note this does not perform a garbage collection.
367 static Object* AllocateStringFromAscii(
368 Vector<const char> str,
369 PretenureFlag pretenure = NOT_TENURED);
370 static Object* AllocateStringFromUtf8(
371 Vector<const char> str,
372 PretenureFlag pretenure = NOT_TENURED);
373 static Object* AllocateStringFromTwoByte(
374 Vector<const uc16> str,
375 PretenureFlag pretenure = NOT_TENURED);
376
377 // Allocates a symbol in old space based on the character stream.
378 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
379 // failed.
380 // Please note this function does not perform a garbage collection.
381 static inline Object* AllocateSymbol(Vector<const char> str,
382 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000383 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000384
385 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
386 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000387 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000388
389 static Object* AllocateExternalSymbol(Vector<const char> str,
390 int chars);
391
392
393 // Allocates and partially initializes a String. There are two String
394 // encodings: ASCII and two byte. These functions allocate a string of the
395 // given length and set its map and length fields. The characters of the
396 // string are uninitialized.
397 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
398 // failed.
399 // Please note this does not perform a garbage collection.
400 static Object* AllocateRawAsciiString(
401 int length,
402 PretenureFlag pretenure = NOT_TENURED);
403 static Object* AllocateRawTwoByteString(
404 int length,
405 PretenureFlag pretenure = NOT_TENURED);
406
407 // Computes a single character string where the character has code.
408 // A cache is used for ascii codes.
409 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
410 // failed. Please note this does not perform a garbage collection.
411 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
412
413 // Allocate a byte array of the specified length
414 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
415 // failed.
416 // Please note this does not perform a garbage collection.
417 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
418
419 // Allocate a non-tenured byte array of the specified length
420 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
421 // failed.
422 // Please note this does not perform a garbage collection.
423 static Object* AllocateByteArray(int length);
424
425 // Allocate a pixel array of the specified length
426 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
427 // failed.
428 // Please note this does not perform a garbage collection.
429 static Object* AllocatePixelArray(int length,
430 uint8_t* external_pointer,
431 PretenureFlag pretenure);
432
Steve Block3ce2e202009-11-05 08:53:23 +0000433 // Allocates an external array of the specified length and type.
434 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
435 // failed.
436 // Please note this does not perform a garbage collection.
437 static Object* AllocateExternalArray(int length,
438 ExternalArrayType array_type,
439 void* external_pointer,
440 PretenureFlag pretenure);
441
Steve Blocka7e24c12009-10-30 11:49:00 +0000442 // Allocate a tenured JS global property cell.
443 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
444 // failed.
445 // Please note this does not perform a garbage collection.
446 static Object* AllocateJSGlobalPropertyCell(Object* value);
447
448 // Allocates a fixed array initialized with undefined values
449 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
450 // failed.
451 // Please note this does not perform a garbage collection.
452 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
453 // Allocate uninitialized, non-tenured fixed array with length elements.
454 static Object* AllocateFixedArray(int length);
455
456 // Make a copy of src and return it. Returns
457 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
458 static Object* CopyFixedArray(FixedArray* src);
459
460 // Allocates a fixed array initialized with the hole values.
461 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
462 // failed.
463 // Please note this does not perform a garbage collection.
464 static Object* AllocateFixedArrayWithHoles(int length);
465
466 // AllocateHashTable is identical to AllocateFixedArray except
467 // that the resulting object has hash_table_map as map.
468 static Object* AllocateHashTable(int length);
469
470 // Allocate a global (but otherwise uninitialized) context.
471 static Object* AllocateGlobalContext();
472
473 // Allocate a function context.
474 static Object* AllocateFunctionContext(int length, JSFunction* closure);
475
476 // Allocate a 'with' context.
477 static Object* AllocateWithContext(Context* previous,
478 JSObject* extension,
479 bool is_catch_context);
480
481 // Allocates a new utility object in the old generation.
482 static Object* AllocateStruct(InstanceType type);
483
484 // Allocates a function initialized with a shared part.
485 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
486 // failed.
487 // Please note this does not perform a garbage collection.
488 static Object* AllocateFunction(Map* function_map,
489 SharedFunctionInfo* shared,
Leon Clarkee46be812010-01-19 14:06:41 +0000490 Object* prototype,
491 PretenureFlag pretenure = TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000492
493 // Indicies for direct access into argument objects.
Leon Clarkee46be812010-01-19 14:06:41 +0000494 static const int kArgumentsObjectSize =
495 JSObject::kHeaderSize + 2 * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000496 static const int arguments_callee_index = 0;
497 static const int arguments_length_index = 1;
498
499 // Allocates an arguments object - optionally with an elements array.
500 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
501 // failed.
502 // Please note this does not perform a garbage collection.
503 static Object* AllocateArgumentsObject(Object* callee, int length);
504
505 // Converts a double into either a Smi or a HeapNumber object.
506 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
507 // failed.
508 // Please note this does not perform a garbage collection.
509 static Object* NewNumberFromDouble(double value,
510 PretenureFlag pretenure = NOT_TENURED);
511
512 // Same as NewNumberFromDouble, but may return a preallocated/immutable
513 // number object (e.g., minus_zero_value_, nan_value_)
514 static Object* NumberFromDouble(double value,
515 PretenureFlag pretenure = NOT_TENURED);
516
517 // Allocated a HeapNumber from value.
518 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
519 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
520
521 // Converts an int into either a Smi or a HeapNumber object.
522 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
523 // failed.
524 // Please note this does not perform a garbage collection.
525 static inline Object* NumberFromInt32(int32_t value);
526
527 // Converts an int into either a Smi or a HeapNumber object.
528 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
529 // failed.
530 // Please note this does not perform a garbage collection.
531 static inline Object* NumberFromUint32(uint32_t value);
532
533 // Allocates a new proxy object.
534 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
535 // failed.
536 // Please note this does not perform a garbage collection.
537 static Object* AllocateProxy(Address proxy,
538 PretenureFlag pretenure = NOT_TENURED);
539
540 // Allocates a new SharedFunctionInfo object.
541 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
542 // failed.
543 // Please note this does not perform a garbage collection.
544 static Object* AllocateSharedFunctionInfo(Object* name);
545
546 // Allocates a new cons string object.
547 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
548 // failed.
549 // Please note this does not perform a garbage collection.
550 static Object* AllocateConsString(String* first, String* second);
551
Steve Blocka7e24c12009-10-30 11:49:00 +0000552 // Allocates a new sub string object which is a substring of an underlying
553 // string buffer stretching from the index start (inclusive) to the index
554 // end (exclusive).
555 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
556 // failed.
557 // Please note this does not perform a garbage collection.
558 static Object* AllocateSubString(String* buffer,
559 int start,
560 int end);
561
562 // Allocate a new external string object, which is backed by a string
563 // resource that resides outside the V8 heap.
564 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
565 // failed.
566 // Please note this does not perform a garbage collection.
567 static Object* AllocateExternalStringFromAscii(
568 ExternalAsciiString::Resource* resource);
569 static Object* AllocateExternalStringFromTwoByte(
570 ExternalTwoByteString::Resource* resource);
571
Leon Clarkee46be812010-01-19 14:06:41 +0000572 // Finalizes an external string by deleting the associated external
573 // data and clearing the resource pointer.
574 static inline void FinalizeExternalString(String* string);
575
Steve Blocka7e24c12009-10-30 11:49:00 +0000576 // Allocates an uninitialized object. The memory is non-executable if the
577 // hardware and OS allow.
578 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
579 // failed.
580 // Please note this function does not perform a garbage collection.
581 static inline Object* AllocateRaw(int size_in_bytes,
582 AllocationSpace space,
583 AllocationSpace retry_space);
584
585 // Initialize a filler object to keep the ability to iterate over the heap
586 // when shortening objects.
587 static void CreateFillerObjectAt(Address addr, int size);
588
589 // Makes a new native code object
590 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
591 // failed. On success, the pointer to the Code object is stored in the
592 // self_reference. This allows generated code to reference its own Code
593 // object by containing this pointer.
594 // Please note this function does not perform a garbage collection.
595 static Object* CreateCode(const CodeDesc& desc,
596 ZoneScopeInfo* sinfo,
597 Code::Flags flags,
598 Handle<Object> self_reference);
599
600 static Object* CopyCode(Code* code);
601 // Finds the symbol for string in the symbol table.
602 // If not found, a new symbol is added to the table and returned.
603 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
604 // failed.
605 // Please note this function does not perform a garbage collection.
606 static Object* LookupSymbol(Vector<const char> str);
607 static Object* LookupAsciiSymbol(const char* str) {
608 return LookupSymbol(CStrVector(str));
609 }
610 static Object* LookupSymbol(String* str);
611 static bool LookupSymbolIfExists(String* str, String** symbol);
Steve Blockd0582a62009-12-15 09:54:21 +0000612 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
Steve Blocka7e24c12009-10-30 11:49:00 +0000613
614 // Compute the matching symbol map for a string if possible.
615 // NULL is returned if string is in new space or not flattened.
616 static Map* SymbolMapForString(String* str);
617
618 // Converts the given boolean condition to JavaScript boolean value.
619 static Object* ToBoolean(bool condition) {
620 return condition ? true_value() : false_value();
621 }
622
623 // Code that should be run before and after each GC. Includes some
624 // reporting/verification activities when compiled with DEBUG set.
625 static void GarbageCollectionPrologue();
626 static void GarbageCollectionEpilogue();
627
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 // Performs garbage collection operation.
629 // Returns whether required_space bytes are available after the collection.
630 static bool CollectGarbage(int required_space, AllocationSpace space);
631
632 // Performs a full garbage collection. Force compaction if the
633 // parameter is true.
634 static void CollectAllGarbage(bool force_compaction);
635
636 // Performs a full garbage collection if a context has been disposed
637 // since the last time the check was performed.
638 static void CollectAllGarbageIfContextDisposed();
639
640 // Notify the heap that a context has been disposed.
641 static void NotifyContextDisposed();
642
643 // Utility to invoke the scavenger. This is needed in test code to
644 // ensure correct callback for weak global handles.
645 static void PerformScavenge();
646
647#ifdef DEBUG
648 // Utility used with flag gc-greedy.
649 static bool GarbageCollectionGreedyCheck();
650#endif
651
652 static void SetGlobalGCPrologueCallback(GCCallback callback) {
653 global_gc_prologue_callback_ = callback;
654 }
655 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
656 global_gc_epilogue_callback_ = callback;
657 }
658
659 // Heap root getters. We have versions with and without type::cast() here.
660 // You can't use type::cast during GC because the assert fails.
661#define ROOT_ACCESSOR(type, name, camel_name) \
662 static inline type* name() { \
663 return type::cast(roots_[k##camel_name##RootIndex]); \
664 } \
665 static inline type* raw_unchecked_##name() { \
666 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
667 }
668 ROOT_LIST(ROOT_ACCESSOR)
669#undef ROOT_ACCESSOR
670
671// Utility type maps
672#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
673 static inline Map* name##_map() { \
674 return Map::cast(roots_[k##Name##MapRootIndex]); \
675 }
676 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
677#undef STRUCT_MAP_ACCESSOR
678
679#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
680 return String::cast(roots_[k##name##RootIndex]); \
681 }
682 SYMBOL_LIST(SYMBOL_ACCESSOR)
683#undef SYMBOL_ACCESSOR
684
685 // The hidden_symbol is special because it is the empty string, but does
686 // not match the empty string.
687 static String* hidden_symbol() { return hidden_symbol_; }
688
689 // Iterates over all roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000690 static void IterateRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000691 // Iterates over all strong roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000692 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
Leon Clarked91b9f72010-01-27 17:25:45 +0000693 // Iterates over all the other roots in the heap.
694 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000695
696 // Iterates remembered set of an old space.
697 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
698
699 // Iterates a range of remembered set addresses starting with rset_start
700 // corresponding to the range of allocated pointers
701 // [object_start, object_end).
702 // Returns the number of bits that were set.
703 static int IterateRSetRange(Address object_start,
704 Address object_end,
705 Address rset_start,
706 ObjectSlotCallback copy_object_func);
707
708 // Returns whether the object resides in new space.
709 static inline bool InNewSpace(Object* object);
710 static inline bool InFromSpace(Object* object);
711 static inline bool InToSpace(Object* object);
712
713 // Checks whether an address/object in the heap (including auxiliary
714 // area and unused area).
715 static bool Contains(Address addr);
716 static bool Contains(HeapObject* value);
717
718 // Checks whether an address/object in a space.
Steve Blockd0582a62009-12-15 09:54:21 +0000719 // Currently used by tests, serialization and heap verification only.
Steve Blocka7e24c12009-10-30 11:49:00 +0000720 static bool InSpace(Address addr, AllocationSpace space);
721 static bool InSpace(HeapObject* value, AllocationSpace space);
722
723 // Finds out which space an object should get promoted to based on its type.
724 static inline OldSpace* TargetSpace(HeapObject* object);
725 static inline AllocationSpace TargetSpaceId(InstanceType type);
726
727 // Sets the stub_cache_ (only used when expanding the dictionary).
728 static void public_set_code_stubs(NumberDictionary* value) {
729 roots_[kCodeStubsRootIndex] = value;
730 }
731
732 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
733 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
734 roots_[kNonMonomorphicCacheRootIndex] = value;
735 }
736
737 // Update the next script id.
738 static inline void SetLastScriptId(Object* last_script_id);
739
740 // Generated code can embed this address to get access to the roots.
741 static Object** roots_address() { return roots_; }
742
743#ifdef DEBUG
744 static void Print();
745 static void PrintHandles();
746
747 // Verify the heap is in its normal state before or after a GC.
748 static void Verify();
749
750 // Report heap statistics.
751 static void ReportHeapStatistics(const char* title);
752 static void ReportCodeStatistics(const char* title);
753
754 // Fill in bogus values in from space
755 static void ZapFromSpace();
756#endif
757
758#if defined(ENABLE_LOGGING_AND_PROFILING)
759 // Print short heap statistics.
760 static void PrintShortHeapStatistics();
761#endif
762
763 // Makes a new symbol object
764 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
765 // failed.
766 // Please note this function does not perform a garbage collection.
767 static Object* CreateSymbol(const char* str, int length, int hash);
768 static Object* CreateSymbol(String* str);
769
770 // Write barrier support for address[offset] = o.
771 static inline void RecordWrite(Address address, int offset);
772
773 // Given an address occupied by a live code object, return that object.
774 static Object* FindCodeObject(Address a);
775
776 // Invoke Shrink on shrinkable spaces.
777 static void Shrink();
778
779 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
780 static inline HeapState gc_state() { return gc_state_; }
781
782#ifdef DEBUG
783 static bool IsAllocationAllowed() { return allocation_allowed_; }
784 static inline bool allow_allocation(bool enable);
785
786 static bool disallow_allocation_failure() {
787 return disallow_allocation_failure_;
788 }
789
Leon Clarkee46be812010-01-19 14:06:41 +0000790 static void TracePathToObject(Object* target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000791 static void TracePathToGlobal();
792#endif
793
794 // Callback function passed to Heap::Iterate etc. Copies an object if
795 // necessary, the object might be promoted to an old space. The caller must
796 // ensure the precondition that the object is (a) a heap object and (b) in
797 // the heap's from space.
798 static void ScavengePointer(HeapObject** p);
799 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
800
801 // Clear a range of remembered set addresses corresponding to the object
802 // area address 'start' with size 'size_in_bytes', eg, when adding blocks
803 // to the free list.
804 static void ClearRSetRange(Address start, int size_in_bytes);
805
806 // Rebuild remembered set in old and map spaces.
807 static void RebuildRSets();
808
Leon Clarkee46be812010-01-19 14:06:41 +0000809 // Update an old object's remembered set
810 static int UpdateRSet(HeapObject* obj);
811
Steve Blocka7e24c12009-10-30 11:49:00 +0000812 // Commits from space if it is uncommitted.
813 static void EnsureFromSpaceIsCommitted();
814
Leon Clarkee46be812010-01-19 14:06:41 +0000815 // Support for partial snapshots. After calling this we can allocate a
816 // certain number of bytes using only linear allocation (with a
817 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
818 // or causing a GC. It returns true of space was reserved or false if a GC is
819 // needed. For paged spaces the space requested must include the space wasted
820 // at the end of each page when allocating linearly.
821 static void ReserveSpace(
822 int new_space_size,
823 int pointer_space_size,
824 int data_space_size,
825 int code_space_size,
826 int map_space_size,
827 int cell_space_size,
828 int large_object_size);
829
Steve Blocka7e24c12009-10-30 11:49:00 +0000830 //
831 // Support for the API.
832 //
833
834 static bool CreateApiObjects();
835
836 // Attempt to find the number in a small cache. If we finds it, return
837 // the string representation of the number. Otherwise return undefined.
838 static Object* GetNumberStringCache(Object* number);
839
840 // Update the cache with a new number-string pair.
841 static void SetNumberStringCache(Object* number, String* str);
842
Steve Blocka7e24c12009-10-30 11:49:00 +0000843 // Adjusts the amount of registered external memory.
844 // Returns the adjusted value.
845 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
846
847 // Allocate unitialized fixed array (pretenure == NON_TENURE).
848 static Object* AllocateRawFixedArray(int length);
849
850 // True if we have reached the allocation limit in the old generation that
851 // should force the next GC (caused normally) to be a full one.
852 static bool OldGenerationPromotionLimitReached() {
853 return (PromotedSpaceSize() + PromotedExternalMemorySize())
854 > old_gen_promotion_limit_;
855 }
856
Leon Clarkee46be812010-01-19 14:06:41 +0000857 static intptr_t OldGenerationSpaceAvailable() {
858 return old_gen_allocation_limit_ -
859 (PromotedSpaceSize() + PromotedExternalMemorySize());
860 }
861
Steve Blocka7e24c12009-10-30 11:49:00 +0000862 // True if we have reached the allocation limit in the old generation that
863 // should artificially cause a GC right now.
864 static bool OldGenerationAllocationLimitReached() {
Leon Clarkee46be812010-01-19 14:06:41 +0000865 return OldGenerationSpaceAvailable() < 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000866 }
867
868 // Can be called when the embedding application is idle.
869 static bool IdleNotification();
870
871 // Declare all the root indices.
872 enum RootListIndex {
873#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
874 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
875#undef ROOT_INDEX_DECLARATION
876
877// Utility type maps
878#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
879 STRUCT_LIST(DECLARE_STRUCT_MAP)
880#undef DECLARE_STRUCT_MAP
881
882#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
883 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
884#undef SYMBOL_DECLARATION
885
886 kSymbolTableRootIndex,
887 kStrongRootListLength = kSymbolTableRootIndex,
888 kRootListLength
889 };
890
891 static Object* NumberToString(Object* number);
892
Steve Block3ce2e202009-11-05 08:53:23 +0000893 static Map* MapForExternalArrayType(ExternalArrayType array_type);
894 static RootListIndex RootIndexForExternalArrayType(
895 ExternalArrayType array_type);
896
Steve Blockd0582a62009-12-15 09:54:21 +0000897 static void RecordStats(HeapStats* stats);
898
Steve Blocka7e24c12009-10-30 11:49:00 +0000899 private:
Steve Block3ce2e202009-11-05 08:53:23 +0000900 static int reserved_semispace_size_;
901 static int max_semispace_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000902 static int initial_semispace_size_;
Steve Block3ce2e202009-11-05 08:53:23 +0000903 static int max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000904 static size_t code_range_size_;
905
906 // For keeping track of how much data has survived
907 // scavenge since last new space expansion.
908 static int survived_since_last_expansion_;
909
910 static int always_allocate_scope_depth_;
Steve Blockd0582a62009-12-15 09:54:21 +0000911 static int linear_allocation_scope_depth_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000912 static bool context_disposed_pending_;
913
Steve Blocka7e24c12009-10-30 11:49:00 +0000914#if defined(V8_TARGET_ARCH_X64)
915 static const int kMaxObjectSizeInNewSpace = 512*KB;
916#else
917 static const int kMaxObjectSizeInNewSpace = 256*KB;
918#endif
919
920 static NewSpace new_space_;
921 static OldSpace* old_pointer_space_;
922 static OldSpace* old_data_space_;
923 static OldSpace* code_space_;
924 static MapSpace* map_space_;
925 static CellSpace* cell_space_;
926 static LargeObjectSpace* lo_space_;
927 static HeapState gc_state_;
928
929 // Returns the size of object residing in non new spaces.
930 static int PromotedSpaceSize();
931
932 // Returns the amount of external memory registered since last global gc.
933 static int PromotedExternalMemorySize();
934
935 static int mc_count_; // how many mark-compact collections happened
936 static int gc_count_; // how many gc happened
937
938#define ROOT_ACCESSOR(type, name, camel_name) \
939 static inline void set_##name(type* value) { \
940 roots_[k##camel_name##RootIndex] = value; \
941 }
942 ROOT_LIST(ROOT_ACCESSOR)
943#undef ROOT_ACCESSOR
944
945#ifdef DEBUG
946 static bool allocation_allowed_;
947
948 // If the --gc-interval flag is set to a positive value, this
949 // variable holds the value indicating the number of allocations
950 // remain until the next failure and garbage collection.
951 static int allocation_timeout_;
952
953 // Do we expect to be able to handle allocation failure at this
954 // time?
955 static bool disallow_allocation_failure_;
956#endif // DEBUG
957
958 // Limit that triggers a global GC on the next (normally caused) GC. This
959 // is checked when we have already decided to do a GC to help determine
960 // which collector to invoke.
961 static int old_gen_promotion_limit_;
962
963 // Limit that triggers a global GC as soon as is reasonable. This is
964 // checked before expanding a paged space in the old generation and on
965 // every allocation in large object space.
966 static int old_gen_allocation_limit_;
967
968 // Limit on the amount of externally allocated memory allowed
969 // between global GCs. If reached a global GC is forced.
970 static int external_allocation_limit_;
971
972 // The amount of external memory registered through the API kept alive
973 // by global handles
974 static int amount_of_external_allocated_memory_;
975
976 // Caches the amount of external memory registered at the last global gc.
977 static int amount_of_external_allocated_memory_at_last_global_gc_;
978
979 // Indicates that an allocation has failed in the old generation since the
980 // last GC.
981 static int old_gen_exhausted_;
982
983 static Object* roots_[kRootListLength];
984
985 struct StringTypeTable {
986 InstanceType type;
987 int size;
988 RootListIndex index;
989 };
990
991 struct ConstantSymbolTable {
992 const char* contents;
993 RootListIndex index;
994 };
995
996 struct StructTable {
997 InstanceType type;
998 int size;
999 RootListIndex index;
1000 };
1001
1002 static const StringTypeTable string_type_table[];
1003 static const ConstantSymbolTable constant_symbol_table[];
1004 static const StructTable struct_table[];
1005
1006 // The special hidden symbol which is an empty string, but does not match
1007 // any string when looked up in properties.
1008 static String* hidden_symbol_;
1009
1010 // GC callback function, called before and after mark-compact GC.
1011 // Allocations in the callback function are disallowed.
1012 static GCCallback global_gc_prologue_callback_;
1013 static GCCallback global_gc_epilogue_callback_;
1014
1015 // Checks whether a global GC is necessary
1016 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1017
1018 // Performs garbage collection
1019 static void PerformGarbageCollection(AllocationSpace space,
1020 GarbageCollector collector,
1021 GCTracer* tracer);
1022
1023 // Returns either a Smi or a Number object from 'value'. If 'new_object'
1024 // is false, it may return a preallocated immutable object.
1025 static Object* SmiOrNumberFromDouble(double value,
1026 bool new_object,
1027 PretenureFlag pretenure = NOT_TENURED);
1028
1029 // Allocate an uninitialized object in map space. The behavior is identical
1030 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1031 // have to test the allocation space argument and (b) can reduce code size
1032 // (since both AllocateRaw and AllocateRawMap are inlined).
1033 static inline Object* AllocateRawMap();
1034
1035 // Allocate an uninitialized object in the global property cell space.
1036 static inline Object* AllocateRawCell();
1037
1038 // Initializes a JSObject based on its map.
1039 static void InitializeJSObjectFromMap(JSObject* obj,
1040 FixedArray* properties,
1041 Map* map);
1042
1043 static bool CreateInitialMaps();
1044 static bool CreateInitialObjects();
1045
1046 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1047 // that assigns wrong vtable entries.
1048 static void CreateCEntryStub();
1049 static void CreateCEntryDebugBreakStub();
1050 static void CreateJSEntryStub();
1051 static void CreateJSConstructEntryStub();
1052 static void CreateRegExpCEntryStub();
1053
1054 static void CreateFixedStubs();
1055
1056 static Object* CreateOddball(Map* map,
1057 const char* to_string,
1058 Object* to_number);
1059
1060 // Allocate empty fixed array.
1061 static Object* AllocateEmptyFixedArray();
1062
1063 // Performs a minor collection in new generation.
1064 static void Scavenge();
Leon Clarkee46be812010-01-19 14:06:41 +00001065 static void ScavengeExternalStringTable();
1066 static Address DoScavenge(ObjectVisitor* scavenge_visitor,
1067 Address new_space_front);
Steve Blocka7e24c12009-10-30 11:49:00 +00001068
1069 // Performs a major collection in the whole heap.
1070 static void MarkCompact(GCTracer* tracer);
1071
1072 // Code to be run before and after mark-compact.
1073 static void MarkCompactPrologue(bool is_compacting);
1074 static void MarkCompactEpilogue(bool is_compacting);
1075
1076 // Helper function used by CopyObject to copy a source object to an
1077 // allocated target object and update the forwarding pointer in the source
1078 // object. Returns the target object.
Leon Clarkee46be812010-01-19 14:06:41 +00001079 static inline HeapObject* MigrateObject(HeapObject* source,
1080 HeapObject* target,
1081 int size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001082
1083 // Helper function that governs the promotion policy from new space to
1084 // old. If the object's old address lies below the new space's age
1085 // mark or if we've already filled the bottom 1/16th of the to space,
1086 // we try to promote this object.
1087 static inline bool ShouldBePromoted(Address old_address, int object_size);
1088#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1089 // Record the copy of an object in the NewSpace's statistics.
1090 static void RecordCopiedObject(HeapObject* obj);
1091
1092 // Record statistics before and after garbage collection.
1093 static void ReportStatisticsBeforeGC();
1094 static void ReportStatisticsAfterGC();
1095#endif
1096
Steve Blocka7e24c12009-10-30 11:49:00 +00001097 // Rebuild remembered set in an old space.
1098 static void RebuildRSets(PagedSpace* space);
1099
1100 // Rebuild remembered set in the large object space.
1101 static void RebuildRSets(LargeObjectSpace* space);
1102
1103 // Slow part of scavenge object.
1104 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1105
1106 // Copy memory from src to dst.
1107 static inline void CopyBlock(Object** dst, Object** src, int byte_size);
1108
1109 // Initializes a function with a shared part and prototype.
1110 // Returns the function.
1111 // Note: this code was factored out of AllocateFunction such that
1112 // other parts of the VM could use it. Specifically, a function that creates
1113 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1114 // Please note this does not perform a garbage collection.
1115 static inline Object* InitializeFunction(JSFunction* function,
1116 SharedFunctionInfo* shared,
1117 Object* prototype);
1118
Leon Clarkee46be812010-01-19 14:06:41 +00001119
1120 // Initializes the number to string cache based on the max semispace size.
1121 static Object* InitializeNumberStringCache();
1122 // Flush the number to string cache.
1123 static void FlushNumberStringCache();
1124
Steve Blocka7e24c12009-10-30 11:49:00 +00001125 static const int kInitialSymbolTableSize = 2048;
1126 static const int kInitialEvalCacheSize = 64;
1127
1128 friend class Factory;
1129 friend class DisallowAllocationFailure;
1130 friend class AlwaysAllocateScope;
Steve Blockd0582a62009-12-15 09:54:21 +00001131 friend class LinearAllocationScope;
1132};
1133
1134
1135class HeapStats {
1136 public:
1137 int *start_marker;
1138 int *new_space_size;
1139 int *new_space_capacity;
1140 int *old_pointer_space_size;
1141 int *old_pointer_space_capacity;
1142 int *old_data_space_size;
1143 int *old_data_space_capacity;
1144 int *code_space_size;
1145 int *code_space_capacity;
1146 int *map_space_size;
1147 int *map_space_capacity;
1148 int *cell_space_size;
1149 int *cell_space_capacity;
1150 int *lo_space_size;
1151 int *global_handle_count;
1152 int *weak_global_handle_count;
1153 int *pending_global_handle_count;
1154 int *near_death_global_handle_count;
1155 int *destroyed_global_handle_count;
1156 int *end_marker;
Steve Blocka7e24c12009-10-30 11:49:00 +00001157};
1158
1159
1160class AlwaysAllocateScope {
1161 public:
1162 AlwaysAllocateScope() {
1163 // We shouldn't hit any nested scopes, because that requires
1164 // non-handle code to call handle code. The code still works but
1165 // performance will degrade, so we want to catch this situation
1166 // in debug mode.
1167 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1168 Heap::always_allocate_scope_depth_++;
1169 }
1170
1171 ~AlwaysAllocateScope() {
1172 Heap::always_allocate_scope_depth_--;
1173 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1174 }
1175};
1176
1177
Steve Blockd0582a62009-12-15 09:54:21 +00001178class LinearAllocationScope {
1179 public:
1180 LinearAllocationScope() {
1181 Heap::linear_allocation_scope_depth_++;
1182 }
1183
1184 ~LinearAllocationScope() {
1185 Heap::linear_allocation_scope_depth_--;
1186 ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
1187 }
1188};
1189
1190
Steve Blocka7e24c12009-10-30 11:49:00 +00001191#ifdef DEBUG
1192// Visitor class to verify interior pointers that do not have remembered set
1193// bits. All heap object pointers have to point into the heap to a location
1194// that has a map pointer at its first word. Caveat: Heap::Contains is an
1195// approximation because it can return true for objects in a heap space but
1196// above the allocation pointer.
1197class VerifyPointersVisitor: public ObjectVisitor {
1198 public:
1199 void VisitPointers(Object** start, Object** end) {
1200 for (Object** current = start; current < end; current++) {
1201 if ((*current)->IsHeapObject()) {
1202 HeapObject* object = HeapObject::cast(*current);
1203 ASSERT(Heap::Contains(object));
1204 ASSERT(object->map()->IsMap());
1205 }
1206 }
1207 }
1208};
1209
1210
1211// Visitor class to verify interior pointers that have remembered set bits.
1212// As VerifyPointersVisitor but also checks that remembered set bits are
1213// always set for pointers into new space.
1214class VerifyPointersAndRSetVisitor: public ObjectVisitor {
1215 public:
1216 void VisitPointers(Object** start, Object** end) {
1217 for (Object** current = start; current < end; current++) {
1218 if ((*current)->IsHeapObject()) {
1219 HeapObject* object = HeapObject::cast(*current);
1220 ASSERT(Heap::Contains(object));
1221 ASSERT(object->map()->IsMap());
1222 if (Heap::InNewSpace(object)) {
1223 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
1224 }
1225 }
1226 }
1227 }
1228};
1229#endif
1230
1231
1232// Space iterator for iterating over all spaces of the heap.
1233// Returns each space in turn, and null when it is done.
1234class AllSpaces BASE_EMBEDDED {
1235 public:
1236 Space* next();
1237 AllSpaces() { counter_ = FIRST_SPACE; }
1238 private:
1239 int counter_;
1240};
1241
1242
1243// Space iterator for iterating over all old spaces of the heap: Old pointer
1244// space, old data space and code space.
1245// Returns each space in turn, and null when it is done.
1246class OldSpaces BASE_EMBEDDED {
1247 public:
1248 OldSpace* next();
1249 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1250 private:
1251 int counter_;
1252};
1253
1254
1255// Space iterator for iterating over all the paged spaces of the heap:
Leon Clarkee46be812010-01-19 14:06:41 +00001256// Map space, old pointer space, old data space, code space and cell space.
Steve Blocka7e24c12009-10-30 11:49:00 +00001257// Returns each space in turn, and null when it is done.
1258class PagedSpaces BASE_EMBEDDED {
1259 public:
1260 PagedSpace* next();
1261 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1262 private:
1263 int counter_;
1264};
1265
1266
1267// Space iterator for iterating over all spaces of the heap.
1268// For each space an object iterator is provided. The deallocation of the
1269// returned object iterators is handled by the space iterator.
1270class SpaceIterator : public Malloced {
1271 public:
1272 SpaceIterator();
1273 virtual ~SpaceIterator();
1274
1275 bool has_next();
1276 ObjectIterator* next();
1277
1278 private:
1279 ObjectIterator* CreateIterator();
1280
1281 int current_space_; // from enum AllocationSpace.
1282 ObjectIterator* iterator_; // object iterator for the current space.
1283};
1284
1285
1286// A HeapIterator provides iteration over the whole heap It aggregates a the
1287// specific iterators for the different spaces as these can only iterate over
1288// one space only.
1289
1290class HeapIterator BASE_EMBEDDED {
1291 public:
1292 explicit HeapIterator();
1293 virtual ~HeapIterator();
1294
Steve Blocka7e24c12009-10-30 11:49:00 +00001295 HeapObject* next();
1296 void reset();
1297
1298 private:
1299 // Perform the initialization.
1300 void Init();
1301
1302 // Perform all necessary shutdown (destruction) work.
1303 void Shutdown();
1304
1305 // Space iterator for iterating all the spaces.
1306 SpaceIterator* space_iterator_;
1307 // Object iterator for the space currently being iterated.
1308 ObjectIterator* object_iterator_;
1309};
1310
1311
1312// Cache for mapping (map, property name) into field offset.
1313// Cleared at startup and prior to mark sweep collection.
1314class KeyedLookupCache {
1315 public:
1316 // Lookup field offset for (map, name). If absent, -1 is returned.
1317 static int Lookup(Map* map, String* name);
1318
1319 // Update an element in the cache.
1320 static void Update(Map* map, String* name, int field_offset);
1321
1322 // Clear the cache.
1323 static void Clear();
Leon Clarkee46be812010-01-19 14:06:41 +00001324
1325 static const int kLength = 64;
1326 static const int kCapacityMask = kLength - 1;
1327 static const int kMapHashShift = 2;
1328
Steve Blocka7e24c12009-10-30 11:49:00 +00001329 private:
1330 static inline int Hash(Map* map, String* name);
Leon Clarkee46be812010-01-19 14:06:41 +00001331
1332 // Get the address of the keys and field_offsets arrays. Used in
1333 // generated code to perform cache lookups.
1334 static Address keys_address() {
1335 return reinterpret_cast<Address>(&keys_);
1336 }
1337
1338 static Address field_offsets_address() {
1339 return reinterpret_cast<Address>(&field_offsets_);
1340 }
1341
Steve Blocka7e24c12009-10-30 11:49:00 +00001342 struct Key {
1343 Map* map;
1344 String* name;
1345 };
1346 static Key keys_[kLength];
1347 static int field_offsets_[kLength];
Steve Blocka7e24c12009-10-30 11:49:00 +00001348
Leon Clarkee46be812010-01-19 14:06:41 +00001349 friend class ExternalReference;
1350};
Steve Blocka7e24c12009-10-30 11:49:00 +00001351
1352
1353// Cache for mapping (array, property name) into descriptor index.
1354// The cache contains both positive and negative results.
1355// Descriptor index equals kNotFound means the property is absent.
1356// Cleared at startup and prior to any gc.
1357class DescriptorLookupCache {
1358 public:
1359 // Lookup descriptor index for (map, name).
1360 // If absent, kAbsent is returned.
1361 static int Lookup(DescriptorArray* array, String* name) {
1362 if (!StringShape(name).IsSymbol()) return kAbsent;
1363 int index = Hash(array, name);
1364 Key& key = keys_[index];
1365 if ((key.array == array) && (key.name == name)) return results_[index];
1366 return kAbsent;
1367 }
1368
1369 // Update an element in the cache.
1370 static void Update(DescriptorArray* array, String* name, int result) {
1371 ASSERT(result != kAbsent);
1372 if (StringShape(name).IsSymbol()) {
1373 int index = Hash(array, name);
1374 Key& key = keys_[index];
1375 key.array = array;
1376 key.name = name;
1377 results_[index] = result;
1378 }
1379 }
1380
1381 // Clear the cache.
1382 static void Clear();
1383
1384 static const int kAbsent = -2;
1385 private:
1386 static int Hash(DescriptorArray* array, String* name) {
1387 // Uses only lower 32 bits if pointers are larger.
1388 uintptr_t array_hash =
1389 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1390 uintptr_t name_hash =
1391 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1392 return (array_hash ^ name_hash) % kLength;
1393 }
1394
1395 static const int kLength = 64;
1396 struct Key {
1397 DescriptorArray* array;
1398 String* name;
1399 };
1400
1401 static Key keys_[kLength];
1402 static int results_[kLength];
1403};
1404
1405
1406// ----------------------------------------------------------------------------
1407// Marking stack for tracing live objects.
1408
1409class MarkingStack {
1410 public:
1411 void Initialize(Address low, Address high) {
1412 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1413 high_ = reinterpret_cast<HeapObject**>(high);
1414 overflowed_ = false;
1415 }
1416
1417 bool is_full() { return top_ >= high_; }
1418
1419 bool is_empty() { return top_ <= low_; }
1420
1421 bool overflowed() { return overflowed_; }
1422
1423 void clear_overflowed() { overflowed_ = false; }
1424
1425 // Push the (marked) object on the marking stack if there is room,
1426 // otherwise mark the object as overflowed and wait for a rescan of the
1427 // heap.
1428 void Push(HeapObject* object) {
1429 CHECK(object->IsHeapObject());
1430 if (is_full()) {
1431 object->SetOverflow();
1432 overflowed_ = true;
1433 } else {
1434 *(top_++) = object;
1435 }
1436 }
1437
1438 HeapObject* Pop() {
1439 ASSERT(!is_empty());
1440 HeapObject* object = *(--top_);
1441 CHECK(object->IsHeapObject());
1442 return object;
1443 }
1444
1445 private:
1446 HeapObject** low_;
1447 HeapObject** top_;
1448 HeapObject** high_;
1449 bool overflowed_;
1450};
1451
1452
1453// A helper class to document/test C++ scopes where we do not
1454// expect a GC. Usage:
1455//
1456// /* Allocation not allowed: we cannot handle a GC in this scope. */
1457// { AssertNoAllocation nogc;
1458// ...
1459// }
1460
1461#ifdef DEBUG
1462
1463class DisallowAllocationFailure {
1464 public:
1465 DisallowAllocationFailure() {
1466 old_state_ = Heap::disallow_allocation_failure_;
1467 Heap::disallow_allocation_failure_ = true;
1468 }
1469 ~DisallowAllocationFailure() {
1470 Heap::disallow_allocation_failure_ = old_state_;
1471 }
1472 private:
1473 bool old_state_;
1474};
1475
1476class AssertNoAllocation {
1477 public:
1478 AssertNoAllocation() {
1479 old_state_ = Heap::allow_allocation(false);
1480 }
1481
1482 ~AssertNoAllocation() {
1483 Heap::allow_allocation(old_state_);
1484 }
1485
1486 private:
1487 bool old_state_;
1488};
1489
1490class DisableAssertNoAllocation {
1491 public:
1492 DisableAssertNoAllocation() {
1493 old_state_ = Heap::allow_allocation(true);
1494 }
1495
1496 ~DisableAssertNoAllocation() {
1497 Heap::allow_allocation(old_state_);
1498 }
1499
1500 private:
1501 bool old_state_;
1502};
1503
1504#else // ndef DEBUG
1505
1506class AssertNoAllocation {
1507 public:
1508 AssertNoAllocation() { }
1509 ~AssertNoAllocation() { }
1510};
1511
1512class DisableAssertNoAllocation {
1513 public:
1514 DisableAssertNoAllocation() { }
1515 ~DisableAssertNoAllocation() { }
1516};
1517
1518#endif
1519
1520// GCTracer collects and prints ONE line after each garbage collector
1521// invocation IFF --trace_gc is used.
1522
1523class GCTracer BASE_EMBEDDED {
1524 public:
1525 GCTracer();
1526
1527 ~GCTracer();
1528
1529 // Sets the collector.
1530 void set_collector(GarbageCollector collector) { collector_ = collector; }
1531
1532 // Sets the GC count.
1533 void set_gc_count(int count) { gc_count_ = count; }
1534
1535 // Sets the full GC count.
1536 void set_full_gc_count(int count) { full_gc_count_ = count; }
1537
1538 // Sets the flag that this is a compacting full GC.
1539 void set_is_compacting() { is_compacting_ = true; }
1540
1541 // Increment and decrement the count of marked objects.
1542 void increment_marked_count() { ++marked_count_; }
1543 void decrement_marked_count() { --marked_count_; }
1544
1545 int marked_count() { return marked_count_; }
1546
1547 private:
1548 // Returns a string matching the collector.
1549 const char* CollectorString();
1550
1551 // Returns size of object in heap (in MB).
1552 double SizeOfHeapObjects() {
1553 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1554 }
1555
1556 double start_time_; // Timestamp set in the constructor.
1557 double start_size_; // Size of objects in heap set in constructor.
1558 GarbageCollector collector_; // Type of collector.
1559
1560 // A count (including this one, eg, the first collection is 1) of the
1561 // number of garbage collections.
1562 int gc_count_;
1563
1564 // A count (including this one) of the number of full garbage collections.
1565 int full_gc_count_;
1566
1567 // True if the current GC is a compacting full collection, false
1568 // otherwise.
1569 bool is_compacting_;
1570
1571 // True if the *previous* full GC cwas a compacting collection (will be
1572 // false if there has not been a previous full GC).
1573 bool previous_has_compacted_;
1574
1575 // On a full GC, a count of the number of marked objects. Incremented
1576 // when an object is marked and decremented when an object's mark bit is
1577 // cleared. Will be zero on a scavenge collection.
1578 int marked_count_;
1579
1580 // The count from the end of the previous full GC. Will be zero if there
1581 // was no previous full GC.
1582 int previous_marked_count_;
1583};
1584
1585
1586class TranscendentalCache {
1587 public:
1588 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1589
1590 explicit TranscendentalCache(Type t);
1591
1592 // Returns a heap number with f(input), where f is a math function specified
1593 // by the 'type' argument.
1594 static inline Object* Get(Type type, double input) {
1595 TranscendentalCache* cache = caches_[type];
1596 if (cache == NULL) {
1597 caches_[type] = cache = new TranscendentalCache(type);
1598 }
1599 return cache->Get(input);
1600 }
1601
1602 // The cache contains raw Object pointers. This method disposes of
1603 // them before a garbage collection.
1604 static void Clear();
1605
1606 private:
1607 inline Object* Get(double input) {
1608 Converter c;
1609 c.dbl = input;
1610 int hash = Hash(c);
1611 Element e = elements_[hash];
1612 if (e.in[0] == c.integers[0] &&
1613 e.in[1] == c.integers[1]) {
1614 ASSERT(e.output != NULL);
1615 return e.output;
1616 }
1617 double answer = Calculate(input);
1618 Object* heap_number = Heap::AllocateHeapNumber(answer);
1619 if (!heap_number->IsFailure()) {
1620 elements_[hash].in[0] = c.integers[0];
1621 elements_[hash].in[1] = c.integers[1];
1622 elements_[hash].output = heap_number;
1623 }
1624 return heap_number;
1625 }
1626
1627 inline double Calculate(double input) {
1628 switch (type_) {
1629 case ACOS:
1630 return acos(input);
1631 case ASIN:
1632 return asin(input);
1633 case ATAN:
1634 return atan(input);
1635 case COS:
1636 return cos(input);
1637 case EXP:
1638 return exp(input);
1639 case LOG:
1640 return log(input);
1641 case SIN:
1642 return sin(input);
1643 case TAN:
1644 return tan(input);
1645 default:
1646 return 0.0; // Never happens.
1647 }
1648 }
1649 static const int kCacheSize = 512;
1650 struct Element {
1651 uint32_t in[2];
1652 Object* output;
1653 };
1654 union Converter {
1655 double dbl;
1656 uint32_t integers[2];
1657 };
1658 inline static int Hash(const Converter& c) {
1659 uint32_t hash = (c.integers[0] ^ c.integers[1]);
1660 hash ^= hash >> 16;
1661 hash ^= hash >> 8;
1662 return (hash & (kCacheSize - 1));
1663 }
1664 static TranscendentalCache* caches_[kNumberOfCaches];
1665 Element elements_[kCacheSize];
1666 Type type_;
1667};
1668
1669
Leon Clarkee46be812010-01-19 14:06:41 +00001670// External strings table is a place where all external strings are
1671// registered. We need to keep track of such strings to properly
1672// finalize them.
1673class ExternalStringTable : public AllStatic {
1674 public:
1675 // Registers an external string.
1676 inline static void AddString(String* string);
1677
1678 inline static void Iterate(ObjectVisitor* v);
1679
1680 // Restores internal invariant and gets rid of collected strings.
1681 // Must be called after each Iterate() that modified the strings.
1682 static void CleanUp();
1683
1684 // Destroys all allocated memory.
1685 static void TearDown();
1686
1687 private:
1688 friend class Heap;
1689
1690 inline static void Verify();
1691
1692 inline static void AddOldString(String* string);
1693
1694 // Notifies the table that only a prefix of the new list is valid.
1695 inline static void ShrinkNewStrings(int position);
1696
1697 // To speed up scavenge collections new space string are kept
1698 // separate from old space strings.
1699 static List<Object*> new_space_strings_;
1700 static List<Object*> old_space_strings_;
1701};
1702
Steve Blocka7e24c12009-10-30 11:49:00 +00001703} } // namespace v8::internal
1704
1705#endif // V8_HEAP_H_