blob: 74e5a31b190f46c73b842b8c29a7dc7c7b1b3e0f [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
Steve Block6ded16b2010-05-10 14:33:55 +010033#include "splay-tree-inl.h"
34#include "v8-counters.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Steve Block6ded16b2010-05-10 14:33:55 +010039// Forward declarations.
40class ZoneScopeInfo;
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042// Defines all the roots in Heap.
43#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
Steve Blockd0582a62009-12-15 09:54:21 +000044 /* Put the byte array map early. We need it to be in place by the time */ \
45 /* the deserializer hits the next page, since it wants to put a byte */ \
46 /* array in the unused space at the end of the page. */ \
47 V(Map, byte_array_map, ByteArrayMap) \
48 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
49 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
50 /* Cluster the most popular ones in a few cache lines here at the top. */ \
Steve Blocka7e24c12009-10-30 11:49:00 +000051 V(Smi, stack_limit, StackLimit) \
52 V(Object, undefined_value, UndefinedValue) \
53 V(Object, the_hole_value, TheHoleValue) \
54 V(Object, null_value, NullValue) \
55 V(Object, true_value, TrueValue) \
56 V(Object, false_value, FalseValue) \
57 V(Map, heap_number_map, HeapNumberMap) \
58 V(Map, global_context_map, GlobalContextMap) \
59 V(Map, fixed_array_map, FixedArrayMap) \
60 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
61 V(Map, meta_map, MetaMap) \
62 V(Object, termination_exception, TerminationException) \
63 V(Map, hash_table_map, HashTableMap) \
64 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
Steve Blockd0582a62009-12-15 09:54:21 +000065 V(Map, string_map, StringMap) \
66 V(Map, ascii_string_map, AsciiStringMap) \
67 V(Map, symbol_map, SymbolMap) \
68 V(Map, ascii_symbol_map, AsciiSymbolMap) \
69 V(Map, cons_symbol_map, ConsSymbolMap) \
70 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
71 V(Map, external_symbol_map, ExternalSymbolMap) \
72 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
73 V(Map, cons_string_map, ConsStringMap) \
74 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
75 V(Map, external_string_map, ExternalStringMap) \
76 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
77 V(Map, undetectable_string_map, UndetectableStringMap) \
78 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000079 V(Map, pixel_array_map, PixelArrayMap) \
Steve Block3ce2e202009-11-05 08:53:23 +000080 V(Map, external_byte_array_map, ExternalByteArrayMap) \
81 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
82 V(Map, external_short_array_map, ExternalShortArrayMap) \
83 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
84 V(Map, external_int_array_map, ExternalIntArrayMap) \
85 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
86 V(Map, external_float_array_map, ExternalFloatArrayMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000087 V(Map, context_map, ContextMap) \
88 V(Map, catch_context_map, CatchContextMap) \
89 V(Map, code_map, CodeMap) \
90 V(Map, oddball_map, OddballMap) \
91 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000092 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
93 V(Map, proxy_map, ProxyMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000094 V(Object, nan_value, NanValue) \
95 V(Object, minus_zero_value, MinusZeroValue) \
Kristian Monsen25f61362010-05-21 11:50:48 +010096 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
97 V(Object, instanceof_cache_map, InstanceofCacheMap) \
98 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
Steve Blocka7e24c12009-10-30 11:49:00 +000099 V(String, empty_string, EmptyString) \
100 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
101 V(Map, neander_map, NeanderMap) \
102 V(JSObject, message_listeners, MessageListeners) \
103 V(Proxy, prototype_accessors, PrototypeAccessors) \
104 V(NumberDictionary, code_stubs, CodeStubs) \
105 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
106 V(Code, js_entry_code, JsEntryCode) \
107 V(Code, js_construct_entry_code, JsConstructEntryCode) \
108 V(Code, c_entry_code, CEntryCode) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 V(FixedArray, number_string_cache, NumberStringCache) \
110 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
111 V(FixedArray, natives_source_cache, NativesSourceCache) \
112 V(Object, last_script_id, LastScriptId) \
Andrei Popescu31002712010-02-23 13:46:05 +0000113 V(Script, empty_script, EmptyScript) \
Steve Blockd0582a62009-12-15 09:54:21 +0000114 V(Smi, real_stack_limit, RealStackLimit) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000115
Steve Block6ded16b2010-05-10 14:33:55 +0100116#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
Steve Blocka7e24c12009-10-30 11:49:00 +0000117#define STRONG_ROOT_LIST(V) \
118 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
119 V(Code, re_c_entry_code, RegExpCEntryCode)
120#else
121#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
122#endif
123
124#define ROOT_LIST(V) \
125 STRONG_ROOT_LIST(V) \
126 V(SymbolTable, symbol_table, SymbolTable)
127
128#define SYMBOL_LIST(V) \
129 V(Array_symbol, "Array") \
130 V(Object_symbol, "Object") \
131 V(Proto_symbol, "__proto__") \
132 V(StringImpl_symbol, "StringImpl") \
133 V(arguments_symbol, "arguments") \
134 V(Arguments_symbol, "Arguments") \
135 V(arguments_shadow_symbol, ".arguments") \
136 V(call_symbol, "call") \
137 V(apply_symbol, "apply") \
138 V(caller_symbol, "caller") \
139 V(boolean_symbol, "boolean") \
140 V(Boolean_symbol, "Boolean") \
141 V(callee_symbol, "callee") \
142 V(constructor_symbol, "constructor") \
143 V(code_symbol, ".code") \
144 V(result_symbol, ".result") \
145 V(catch_var_symbol, ".catch-var") \
146 V(empty_symbol, "") \
147 V(eval_symbol, "eval") \
148 V(function_symbol, "function") \
149 V(length_symbol, "length") \
150 V(name_symbol, "name") \
151 V(number_symbol, "number") \
152 V(Number_symbol, "Number") \
153 V(RegExp_symbol, "RegExp") \
Steve Block6ded16b2010-05-10 14:33:55 +0100154 V(source_symbol, "source") \
155 V(global_symbol, "global") \
156 V(ignore_case_symbol, "ignoreCase") \
157 V(multiline_symbol, "multiline") \
158 V(input_symbol, "input") \
159 V(index_symbol, "index") \
160 V(last_index_symbol, "lastIndex") \
Steve Blocka7e24c12009-10-30 11:49:00 +0000161 V(object_symbol, "object") \
162 V(prototype_symbol, "prototype") \
163 V(string_symbol, "string") \
164 V(String_symbol, "String") \
165 V(Date_symbol, "Date") \
166 V(this_symbol, "this") \
167 V(to_string_symbol, "toString") \
168 V(char_at_symbol, "CharAt") \
169 V(undefined_symbol, "undefined") \
170 V(value_of_symbol, "valueOf") \
171 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
172 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
173 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
174 V(illegal_access_symbol, "illegal access") \
175 V(out_of_memory_symbol, "out-of-memory") \
176 V(illegal_execution_state_symbol, "illegal execution state") \
177 V(get_symbol, "get") \
178 V(set_symbol, "set") \
179 V(function_class_symbol, "Function") \
180 V(illegal_argument_symbol, "illegal argument") \
181 V(MakeReferenceError_symbol, "MakeReferenceError") \
182 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
183 V(MakeTypeError_symbol, "MakeTypeError") \
184 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
185 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
186 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
187 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
188 V(illegal_return_symbol, "illegal_return") \
189 V(illegal_break_symbol, "illegal_break") \
190 V(illegal_continue_symbol, "illegal_continue") \
191 V(unknown_label_symbol, "unknown_label") \
192 V(redeclaration_symbol, "redeclaration") \
193 V(failure_symbol, "<failure>") \
194 V(space_symbol, " ") \
195 V(exec_symbol, "exec") \
196 V(zero_symbol, "0") \
197 V(global_eval_symbol, "GlobalEval") \
Steve Blockd0582a62009-12-15 09:54:21 +0000198 V(identity_hash_symbol, "v8::IdentityHash") \
199 V(closure_symbol, "(closure)")
Steve Blocka7e24c12009-10-30 11:49:00 +0000200
201
202// Forward declaration of the GCTracer class.
203class GCTracer;
Steve Blockd0582a62009-12-15 09:54:21 +0000204class HeapStats;
Steve Blocka7e24c12009-10-30 11:49:00 +0000205
206
Steve Block6ded16b2010-05-10 14:33:55 +0100207typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
208
209
Steve Blocka7e24c12009-10-30 11:49:00 +0000210// The all static Heap captures the interface to the global object heap.
211// All JavaScript contexts by this process share the same object heap.
212
213class Heap : public AllStatic {
214 public:
215 // Configure heap size before setup. Return false if the heap has been
216 // setup already.
Steve Block3ce2e202009-11-05 08:53:23 +0000217 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000218 static bool ConfigureHeapDefault();
219
220 // Initializes the global object heap. If create_heap_objects is true,
221 // also creates the basic non-mutable objects.
222 // Returns whether it succeeded.
223 static bool Setup(bool create_heap_objects);
224
225 // Destroys all memory allocated by the heap.
226 static void TearDown();
227
Steve Blockd0582a62009-12-15 09:54:21 +0000228 // Set the stack limit in the roots_ array. Some architectures generate
229 // code that looks here, because it is faster than loading from the static
230 // jslimit_/real_jslimit_ variable in the StackGuard.
231 static void SetStackLimits();
Steve Blocka7e24c12009-10-30 11:49:00 +0000232
233 // Returns whether Setup has been called.
234 static bool HasBeenSetup();
235
Steve Block3ce2e202009-11-05 08:53:23 +0000236 // Returns the maximum amount of memory reserved for the heap. For
237 // the young generation, we reserve 4 times the amount needed for a
238 // semi space. The young generation consists of two semi spaces and
239 // we reserve twice the amount needed for those in order to ensure
240 // that new space can be aligned to its size.
241 static int MaxReserved() {
242 return 4 * reserved_semispace_size_ + max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000243 }
Steve Block3ce2e202009-11-05 08:53:23 +0000244 static int MaxSemiSpaceSize() { return max_semispace_size_; }
245 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000246 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
Steve Block3ce2e202009-11-05 08:53:23 +0000247 static int MaxOldGenerationSize() { return max_old_generation_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000248
249 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
250 // more spaces are needed until it reaches the limit.
251 static int Capacity();
252
Steve Block3ce2e202009-11-05 08:53:23 +0000253 // Returns the amount of memory currently committed for the heap.
254 static int CommittedMemory();
255
Steve Blocka7e24c12009-10-30 11:49:00 +0000256 // Returns the available bytes in space w/o growing.
257 // Heap doesn't guarantee that it can allocate an object that requires
258 // all available bytes. Check MaxHeapObjectSize() instead.
259 static int Available();
260
261 // Returns the maximum object size in paged space.
262 static inline int MaxObjectSizeInPagedSpace();
263
264 // Returns of size of all objects residing in the heap.
265 static int SizeOfObjects();
266
267 // Return the starting address and a mask for the new space. And-masking an
268 // address with the mask will result in the start address of the new space
269 // for all addresses in either semispace.
270 static Address NewSpaceStart() { return new_space_.start(); }
271 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
272 static Address NewSpaceTop() { return new_space_.top(); }
273
274 static NewSpace* new_space() { return &new_space_; }
275 static OldSpace* old_pointer_space() { return old_pointer_space_; }
276 static OldSpace* old_data_space() { return old_data_space_; }
277 static OldSpace* code_space() { return code_space_; }
278 static MapSpace* map_space() { return map_space_; }
279 static CellSpace* cell_space() { return cell_space_; }
280 static LargeObjectSpace* lo_space() { return lo_space_; }
281
282 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
283 static Address always_allocate_scope_depth_address() {
284 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
285 }
Steve Blockd0582a62009-12-15 09:54:21 +0000286 static bool linear_allocation() {
Leon Clarkee46be812010-01-19 14:06:41 +0000287 return linear_allocation_scope_depth_ != 0;
Steve Blockd0582a62009-12-15 09:54:21 +0000288 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000289
290 static Address* NewSpaceAllocationTopAddress() {
291 return new_space_.allocation_top_address();
292 }
293 static Address* NewSpaceAllocationLimitAddress() {
294 return new_space_.allocation_limit_address();
295 }
296
297 // Uncommit unused semi space.
298 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
299
300#ifdef ENABLE_HEAP_PROTECTION
301 // Protect/unprotect the heap by marking all spaces read-only/writable.
302 static void Protect();
303 static void Unprotect();
304#endif
305
306 // Allocates and initializes a new JavaScript object based on a
307 // constructor.
308 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
309 // failed.
310 // Please note this does not perform a garbage collection.
311 static Object* AllocateJSObject(JSFunction* constructor,
312 PretenureFlag pretenure = NOT_TENURED);
313
314 // Allocates and initializes a new global object based on a constructor.
315 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
316 // failed.
317 // Please note this does not perform a garbage collection.
318 static Object* AllocateGlobalObject(JSFunction* constructor);
319
320 // Returns a deep copy of the JavaScript object.
321 // Properties and elements are copied too.
322 // Returns failure if allocation failed.
323 static Object* CopyJSObject(JSObject* source);
324
325 // Allocates the function prototype.
326 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
327 // failed.
328 // Please note this does not perform a garbage collection.
329 static Object* AllocateFunctionPrototype(JSFunction* function);
330
331 // Reinitialize an JSGlobalProxy based on a constructor. The object
332 // must have the same size as objects allocated using the
333 // constructor. The object is reinitialized and behaves as an
334 // object that has been freshly allocated using the constructor.
335 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
336 JSGlobalProxy* global);
337
338 // Allocates and initializes a new JavaScript object based on a map.
339 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
340 // failed.
341 // Please note this does not perform a garbage collection.
342 static Object* AllocateJSObjectFromMap(Map* map,
343 PretenureFlag pretenure = NOT_TENURED);
344
345 // Allocates a heap object based on the map.
346 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
347 // failed.
348 // Please note this function does not perform a garbage collection.
349 static Object* Allocate(Map* map, AllocationSpace space);
350
351 // Allocates a JS Map in the heap.
352 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
353 // failed.
354 // Please note this function does not perform a garbage collection.
355 static Object* AllocateMap(InstanceType instance_type, int instance_size);
356
357 // Allocates a partial map for bootstrapping.
358 static Object* AllocatePartialMap(InstanceType instance_type,
359 int instance_size);
360
361 // Allocate a map for the specified function
362 static Object* AllocateInitialMap(JSFunction* fun);
363
Steve Block6ded16b2010-05-10 14:33:55 +0100364 // Allocates an empty code cache.
365 static Object* AllocateCodeCache();
366
Kristian Monsen25f61362010-05-21 11:50:48 +0100367 // Clear the Instanceof cache (used when a prototype changes).
368 static void ClearInstanceofCache() {
369 set_instanceof_cache_function(the_hole_value());
370 }
371
Steve Blocka7e24c12009-10-30 11:49:00 +0000372 // Allocates and fully initializes a String. There are two String
373 // encodings: ASCII and two byte. One should choose between the three string
374 // allocation functions based on the encoding of the string buffer used to
375 // initialized the string.
376 // - ...FromAscii initializes the string from a buffer that is ASCII
377 // encoded (it does not check that the buffer is ASCII encoded) and the
378 // result will be ASCII encoded.
379 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
380 // encoded. If the characters are all single-byte characters, the
381 // result will be ASCII encoded, otherwise it will converted to two
382 // byte.
383 // - ...FromTwoByte initializes the string from a buffer that is two-byte
384 // encoded. If the characters are all single-byte characters, the
385 // result will be converted to ASCII, otherwise it will be left as
386 // two-byte.
387 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
388 // failed.
389 // Please note this does not perform a garbage collection.
390 static Object* AllocateStringFromAscii(
391 Vector<const char> str,
392 PretenureFlag pretenure = NOT_TENURED);
393 static Object* AllocateStringFromUtf8(
394 Vector<const char> str,
395 PretenureFlag pretenure = NOT_TENURED);
396 static Object* AllocateStringFromTwoByte(
397 Vector<const uc16> str,
398 PretenureFlag pretenure = NOT_TENURED);
399
400 // Allocates a symbol in old space based on the character stream.
401 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
402 // failed.
403 // Please note this function does not perform a garbage collection.
404 static inline Object* AllocateSymbol(Vector<const char> str,
405 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000406 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407
408 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
409 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000410 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000411
412 static Object* AllocateExternalSymbol(Vector<const char> str,
413 int chars);
414
415
416 // Allocates and partially initializes a String. There are two String
417 // encodings: ASCII and two byte. These functions allocate a string of the
418 // given length and set its map and length fields. The characters of the
419 // string are uninitialized.
420 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
421 // failed.
422 // Please note this does not perform a garbage collection.
423 static Object* AllocateRawAsciiString(
424 int length,
425 PretenureFlag pretenure = NOT_TENURED);
426 static Object* AllocateRawTwoByteString(
427 int length,
428 PretenureFlag pretenure = NOT_TENURED);
429
430 // Computes a single character string where the character has code.
431 // A cache is used for ascii codes.
432 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
433 // failed. Please note this does not perform a garbage collection.
434 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
435
436 // Allocate a byte array of the specified length
437 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
438 // failed.
439 // Please note this does not perform a garbage collection.
440 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
441
442 // Allocate a non-tenured byte array of the specified length
443 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
444 // failed.
445 // Please note this does not perform a garbage collection.
446 static Object* AllocateByteArray(int length);
447
448 // Allocate a pixel array of the specified length
449 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
450 // failed.
451 // Please note this does not perform a garbage collection.
452 static Object* AllocatePixelArray(int length,
453 uint8_t* external_pointer,
454 PretenureFlag pretenure);
455
Steve Block3ce2e202009-11-05 08:53:23 +0000456 // Allocates an external array of the specified length and type.
457 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
458 // failed.
459 // Please note this does not perform a garbage collection.
460 static Object* AllocateExternalArray(int length,
461 ExternalArrayType array_type,
462 void* external_pointer,
463 PretenureFlag pretenure);
464
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 // Allocate a tenured JS global property cell.
466 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
467 // failed.
468 // Please note this does not perform a garbage collection.
469 static Object* AllocateJSGlobalPropertyCell(Object* value);
470
471 // Allocates a fixed array initialized with undefined values
472 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
473 // failed.
474 // Please note this does not perform a garbage collection.
475 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
Steve Block6ded16b2010-05-10 14:33:55 +0100476 // Allocates a fixed array initialized with undefined values
Steve Blocka7e24c12009-10-30 11:49:00 +0000477 static Object* AllocateFixedArray(int length);
478
Steve Block6ded16b2010-05-10 14:33:55 +0100479 // Allocates an uninitialized fixed array. It must be filled by the caller.
480 //
481 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
482 // failed.
483 // Please note this does not perform a garbage collection.
484 static Object* AllocateUninitializedFixedArray(int length);
485
Steve Blocka7e24c12009-10-30 11:49:00 +0000486 // Make a copy of src and return it. Returns
487 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
488 static Object* CopyFixedArray(FixedArray* src);
489
490 // Allocates a fixed array initialized with the hole values.
491 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
492 // failed.
493 // Please note this does not perform a garbage collection.
Steve Block6ded16b2010-05-10 14:33:55 +0100494 static Object* AllocateFixedArrayWithHoles(
495 int length,
496 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000497
498 // AllocateHashTable is identical to AllocateFixedArray except
499 // that the resulting object has hash_table_map as map.
Steve Block6ded16b2010-05-10 14:33:55 +0100500 static Object* AllocateHashTable(int length,
501 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000502
503 // Allocate a global (but otherwise uninitialized) context.
504 static Object* AllocateGlobalContext();
505
506 // Allocate a function context.
507 static Object* AllocateFunctionContext(int length, JSFunction* closure);
508
509 // Allocate a 'with' context.
510 static Object* AllocateWithContext(Context* previous,
511 JSObject* extension,
512 bool is_catch_context);
513
514 // Allocates a new utility object in the old generation.
515 static Object* AllocateStruct(InstanceType type);
516
517 // Allocates a function initialized with a shared part.
518 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
519 // failed.
520 // Please note this does not perform a garbage collection.
521 static Object* AllocateFunction(Map* function_map,
522 SharedFunctionInfo* shared,
Leon Clarkee46be812010-01-19 14:06:41 +0000523 Object* prototype,
524 PretenureFlag pretenure = TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000525
526 // Indicies for direct access into argument objects.
Leon Clarkee46be812010-01-19 14:06:41 +0000527 static const int kArgumentsObjectSize =
528 JSObject::kHeaderSize + 2 * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000529 static const int arguments_callee_index = 0;
530 static const int arguments_length_index = 1;
531
532 // Allocates an arguments object - optionally with an elements array.
533 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
534 // failed.
535 // Please note this does not perform a garbage collection.
536 static Object* AllocateArgumentsObject(Object* callee, int length);
537
Steve Blocka7e24c12009-10-30 11:49:00 +0000538 // Same as NewNumberFromDouble, but may return a preallocated/immutable
539 // number object (e.g., minus_zero_value_, nan_value_)
540 static Object* NumberFromDouble(double value,
541 PretenureFlag pretenure = NOT_TENURED);
542
543 // Allocated a HeapNumber from value.
544 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
545 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
546
547 // Converts an int into either a Smi or a HeapNumber object.
548 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
549 // failed.
550 // Please note this does not perform a garbage collection.
551 static inline Object* NumberFromInt32(int32_t value);
552
553 // Converts an int into either a Smi or a HeapNumber object.
554 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
555 // failed.
556 // Please note this does not perform a garbage collection.
557 static inline Object* NumberFromUint32(uint32_t value);
558
559 // Allocates a new proxy object.
560 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
561 // failed.
562 // Please note this does not perform a garbage collection.
563 static Object* AllocateProxy(Address proxy,
564 PretenureFlag pretenure = NOT_TENURED);
565
566 // Allocates a new SharedFunctionInfo object.
567 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
568 // failed.
569 // Please note this does not perform a garbage collection.
570 static Object* AllocateSharedFunctionInfo(Object* name);
571
572 // Allocates a new cons string object.
573 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
574 // failed.
575 // Please note this does not perform a garbage collection.
576 static Object* AllocateConsString(String* first, String* second);
577
Steve Blocka7e24c12009-10-30 11:49:00 +0000578 // Allocates a new sub string object which is a substring of an underlying
579 // string buffer stretching from the index start (inclusive) to the index
580 // end (exclusive).
581 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
582 // failed.
583 // Please note this does not perform a garbage collection.
584 static Object* AllocateSubString(String* buffer,
585 int start,
Steve Block6ded16b2010-05-10 14:33:55 +0100586 int end,
587 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000588
589 // Allocate a new external string object, which is backed by a string
590 // resource that resides outside the V8 heap.
591 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
592 // failed.
593 // Please note this does not perform a garbage collection.
594 static Object* AllocateExternalStringFromAscii(
595 ExternalAsciiString::Resource* resource);
596 static Object* AllocateExternalStringFromTwoByte(
597 ExternalTwoByteString::Resource* resource);
598
Leon Clarkee46be812010-01-19 14:06:41 +0000599 // Finalizes an external string by deleting the associated external
600 // data and clearing the resource pointer.
601 static inline void FinalizeExternalString(String* string);
602
Steve Blocka7e24c12009-10-30 11:49:00 +0000603 // Allocates an uninitialized object. The memory is non-executable if the
604 // hardware and OS allow.
605 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
606 // failed.
607 // Please note this function does not perform a garbage collection.
608 static inline Object* AllocateRaw(int size_in_bytes,
609 AllocationSpace space,
610 AllocationSpace retry_space);
611
612 // Initialize a filler object to keep the ability to iterate over the heap
613 // when shortening objects.
614 static void CreateFillerObjectAt(Address addr, int size);
615
616 // Makes a new native code object
617 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
618 // failed. On success, the pointer to the Code object is stored in the
619 // self_reference. This allows generated code to reference its own Code
620 // object by containing this pointer.
621 // Please note this function does not perform a garbage collection.
622 static Object* CreateCode(const CodeDesc& desc,
623 ZoneScopeInfo* sinfo,
624 Code::Flags flags,
625 Handle<Object> self_reference);
626
627 static Object* CopyCode(Code* code);
Steve Block6ded16b2010-05-10 14:33:55 +0100628
629 // Copy the code and scope info part of the code object, but insert
630 // the provided data as the relocation information.
631 static Object* CopyCode(Code* code, Vector<byte> reloc_info);
632
Steve Blocka7e24c12009-10-30 11:49:00 +0000633 // Finds the symbol for string in the symbol table.
634 // If not found, a new symbol is added to the table and returned.
635 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
636 // failed.
637 // Please note this function does not perform a garbage collection.
638 static Object* LookupSymbol(Vector<const char> str);
639 static Object* LookupAsciiSymbol(const char* str) {
640 return LookupSymbol(CStrVector(str));
641 }
642 static Object* LookupSymbol(String* str);
643 static bool LookupSymbolIfExists(String* str, String** symbol);
Steve Blockd0582a62009-12-15 09:54:21 +0000644 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
Steve Blocka7e24c12009-10-30 11:49:00 +0000645
646 // Compute the matching symbol map for a string if possible.
647 // NULL is returned if string is in new space or not flattened.
648 static Map* SymbolMapForString(String* str);
649
Steve Block6ded16b2010-05-10 14:33:55 +0100650 // Tries to flatten a string before compare operation.
651 //
652 // Returns a failure in case it was decided that flattening was
653 // necessary and failed. Note, if flattening is not necessary the
654 // string might stay non-flat even when not a failure is returned.
655 //
656 // Please note this function does not perform a garbage collection.
657 static inline Object* PrepareForCompare(String* str);
658
Steve Blocka7e24c12009-10-30 11:49:00 +0000659 // Converts the given boolean condition to JavaScript boolean value.
660 static Object* ToBoolean(bool condition) {
661 return condition ? true_value() : false_value();
662 }
663
664 // Code that should be run before and after each GC. Includes some
665 // reporting/verification activities when compiled with DEBUG set.
666 static void GarbageCollectionPrologue();
667 static void GarbageCollectionEpilogue();
668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669 // Performs garbage collection operation.
670 // Returns whether required_space bytes are available after the collection.
671 static bool CollectGarbage(int required_space, AllocationSpace space);
672
673 // Performs a full garbage collection. Force compaction if the
674 // parameter is true.
675 static void CollectAllGarbage(bool force_compaction);
676
Steve Blocka7e24c12009-10-30 11:49:00 +0000677 // Notify the heap that a context has been disposed.
Steve Block6ded16b2010-05-10 14:33:55 +0100678 static int NotifyContextDisposed() { return ++contexts_disposed_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000679
680 // Utility to invoke the scavenger. This is needed in test code to
681 // ensure correct callback for weak global handles.
682 static void PerformScavenge();
683
684#ifdef DEBUG
685 // Utility used with flag gc-greedy.
686 static bool GarbageCollectionGreedyCheck();
687#endif
688
Steve Block6ded16b2010-05-10 14:33:55 +0100689 static void AddGCPrologueCallback(
690 GCEpilogueCallback callback, GCType gc_type_filter);
691 static void RemoveGCPrologueCallback(GCEpilogueCallback callback);
692
693 static void AddGCEpilogueCallback(
694 GCEpilogueCallback callback, GCType gc_type_filter);
695 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
696
Steve Blocka7e24c12009-10-30 11:49:00 +0000697 static void SetGlobalGCPrologueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100698 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 global_gc_prologue_callback_ = callback;
700 }
701 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100702 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000703 global_gc_epilogue_callback_ = callback;
704 }
705
706 // Heap root getters. We have versions with and without type::cast() here.
707 // You can't use type::cast during GC because the assert fails.
708#define ROOT_ACCESSOR(type, name, camel_name) \
709 static inline type* name() { \
710 return type::cast(roots_[k##camel_name##RootIndex]); \
711 } \
712 static inline type* raw_unchecked_##name() { \
713 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
714 }
715 ROOT_LIST(ROOT_ACCESSOR)
716#undef ROOT_ACCESSOR
717
718// Utility type maps
719#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
720 static inline Map* name##_map() { \
721 return Map::cast(roots_[k##Name##MapRootIndex]); \
722 }
723 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
724#undef STRUCT_MAP_ACCESSOR
725
726#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
727 return String::cast(roots_[k##name##RootIndex]); \
728 }
729 SYMBOL_LIST(SYMBOL_ACCESSOR)
730#undef SYMBOL_ACCESSOR
731
732 // The hidden_symbol is special because it is the empty string, but does
733 // not match the empty string.
734 static String* hidden_symbol() { return hidden_symbol_; }
735
736 // Iterates over all roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000737 static void IterateRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000738 // Iterates over all strong roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000739 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
Leon Clarked91b9f72010-01-27 17:25:45 +0000740 // Iterates over all the other roots in the heap.
741 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000742
743 // Iterates remembered set of an old space.
744 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
745
746 // Iterates a range of remembered set addresses starting with rset_start
747 // corresponding to the range of allocated pointers
748 // [object_start, object_end).
749 // Returns the number of bits that were set.
750 static int IterateRSetRange(Address object_start,
751 Address object_end,
752 Address rset_start,
753 ObjectSlotCallback copy_object_func);
754
755 // Returns whether the object resides in new space.
756 static inline bool InNewSpace(Object* object);
757 static inline bool InFromSpace(Object* object);
758 static inline bool InToSpace(Object* object);
759
760 // Checks whether an address/object in the heap (including auxiliary
761 // area and unused area).
762 static bool Contains(Address addr);
763 static bool Contains(HeapObject* value);
764
765 // Checks whether an address/object in a space.
Steve Blockd0582a62009-12-15 09:54:21 +0000766 // Currently used by tests, serialization and heap verification only.
Steve Blocka7e24c12009-10-30 11:49:00 +0000767 static bool InSpace(Address addr, AllocationSpace space);
768 static bool InSpace(HeapObject* value, AllocationSpace space);
769
770 // Finds out which space an object should get promoted to based on its type.
771 static inline OldSpace* TargetSpace(HeapObject* object);
772 static inline AllocationSpace TargetSpaceId(InstanceType type);
773
774 // Sets the stub_cache_ (only used when expanding the dictionary).
775 static void public_set_code_stubs(NumberDictionary* value) {
776 roots_[kCodeStubsRootIndex] = value;
777 }
778
779 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
780 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
781 roots_[kNonMonomorphicCacheRootIndex] = value;
782 }
783
Andrei Popescu31002712010-02-23 13:46:05 +0000784 static void public_set_empty_script(Script* script) {
785 roots_[kEmptyScriptRootIndex] = script;
786 }
787
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 // Update the next script id.
789 static inline void SetLastScriptId(Object* last_script_id);
790
791 // Generated code can embed this address to get access to the roots.
792 static Object** roots_address() { return roots_; }
793
794#ifdef DEBUG
795 static void Print();
796 static void PrintHandles();
797
798 // Verify the heap is in its normal state before or after a GC.
799 static void Verify();
800
801 // Report heap statistics.
802 static void ReportHeapStatistics(const char* title);
803 static void ReportCodeStatistics(const char* title);
804
805 // Fill in bogus values in from space
806 static void ZapFromSpace();
807#endif
808
809#if defined(ENABLE_LOGGING_AND_PROFILING)
810 // Print short heap statistics.
811 static void PrintShortHeapStatistics();
812#endif
813
814 // Makes a new symbol object
815 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
816 // failed.
817 // Please note this function does not perform a garbage collection.
818 static Object* CreateSymbol(const char* str, int length, int hash);
819 static Object* CreateSymbol(String* str);
820
821 // Write barrier support for address[offset] = o.
822 static inline void RecordWrite(Address address, int offset);
823
Steve Block6ded16b2010-05-10 14:33:55 +0100824 // Write barrier support for address[start : start + len[ = o.
825 static inline void RecordWrites(Address address, int start, int len);
826
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 // Given an address occupied by a live code object, return that object.
828 static Object* FindCodeObject(Address a);
829
830 // Invoke Shrink on shrinkable spaces.
831 static void Shrink();
832
833 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
834 static inline HeapState gc_state() { return gc_state_; }
835
836#ifdef DEBUG
837 static bool IsAllocationAllowed() { return allocation_allowed_; }
838 static inline bool allow_allocation(bool enable);
839
840 static bool disallow_allocation_failure() {
841 return disallow_allocation_failure_;
842 }
843
Leon Clarkee46be812010-01-19 14:06:41 +0000844 static void TracePathToObject(Object* target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000845 static void TracePathToGlobal();
846#endif
847
848 // Callback function passed to Heap::Iterate etc. Copies an object if
849 // necessary, the object might be promoted to an old space. The caller must
850 // ensure the precondition that the object is (a) a heap object and (b) in
851 // the heap's from space.
852 static void ScavengePointer(HeapObject** p);
853 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
854
855 // Clear a range of remembered set addresses corresponding to the object
856 // area address 'start' with size 'size_in_bytes', eg, when adding blocks
857 // to the free list.
858 static void ClearRSetRange(Address start, int size_in_bytes);
859
860 // Rebuild remembered set in old and map spaces.
861 static void RebuildRSets();
862
Leon Clarkee46be812010-01-19 14:06:41 +0000863 // Update an old object's remembered set
864 static int UpdateRSet(HeapObject* obj);
865
Steve Blocka7e24c12009-10-30 11:49:00 +0000866 // Commits from space if it is uncommitted.
867 static void EnsureFromSpaceIsCommitted();
868
Leon Clarkee46be812010-01-19 14:06:41 +0000869 // Support for partial snapshots. After calling this we can allocate a
870 // certain number of bytes using only linear allocation (with a
871 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
872 // or causing a GC. It returns true of space was reserved or false if a GC is
873 // needed. For paged spaces the space requested must include the space wasted
874 // at the end of each page when allocating linearly.
875 static void ReserveSpace(
876 int new_space_size,
877 int pointer_space_size,
878 int data_space_size,
879 int code_space_size,
880 int map_space_size,
881 int cell_space_size,
882 int large_object_size);
883
Steve Blocka7e24c12009-10-30 11:49:00 +0000884 //
885 // Support for the API.
886 //
887
888 static bool CreateApiObjects();
889
890 // Attempt to find the number in a small cache. If we finds it, return
891 // the string representation of the number. Otherwise return undefined.
892 static Object* GetNumberStringCache(Object* number);
893
894 // Update the cache with a new number-string pair.
895 static void SetNumberStringCache(Object* number, String* str);
896
Steve Blocka7e24c12009-10-30 11:49:00 +0000897 // Adjusts the amount of registered external memory.
898 // Returns the adjusted value.
899 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
900
Steve Block6ded16b2010-05-10 14:33:55 +0100901 // Allocate uninitialized fixed array.
Steve Blocka7e24c12009-10-30 11:49:00 +0000902 static Object* AllocateRawFixedArray(int length);
Steve Block6ded16b2010-05-10 14:33:55 +0100903 static Object* AllocateRawFixedArray(int length,
904 PretenureFlag pretenure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000905
906 // True if we have reached the allocation limit in the old generation that
907 // should force the next GC (caused normally) to be a full one.
908 static bool OldGenerationPromotionLimitReached() {
909 return (PromotedSpaceSize() + PromotedExternalMemorySize())
910 > old_gen_promotion_limit_;
911 }
912
Leon Clarkee46be812010-01-19 14:06:41 +0000913 static intptr_t OldGenerationSpaceAvailable() {
914 return old_gen_allocation_limit_ -
915 (PromotedSpaceSize() + PromotedExternalMemorySize());
916 }
917
Steve Blocka7e24c12009-10-30 11:49:00 +0000918 // True if we have reached the allocation limit in the old generation that
919 // should artificially cause a GC right now.
920 static bool OldGenerationAllocationLimitReached() {
Leon Clarkee46be812010-01-19 14:06:41 +0000921 return OldGenerationSpaceAvailable() < 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000922 }
923
924 // Can be called when the embedding application is idle.
925 static bool IdleNotification();
926
927 // Declare all the root indices.
928 enum RootListIndex {
929#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
930 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
931#undef ROOT_INDEX_DECLARATION
932
933// Utility type maps
934#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
935 STRUCT_LIST(DECLARE_STRUCT_MAP)
936#undef DECLARE_STRUCT_MAP
937
938#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
939 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
940#undef SYMBOL_DECLARATION
941
942 kSymbolTableRootIndex,
943 kStrongRootListLength = kSymbolTableRootIndex,
944 kRootListLength
945 };
946
Steve Block6ded16b2010-05-10 14:33:55 +0100947 static Object* NumberToString(Object* number,
948 bool check_number_string_cache = true);
Steve Blocka7e24c12009-10-30 11:49:00 +0000949
Steve Block3ce2e202009-11-05 08:53:23 +0000950 static Map* MapForExternalArrayType(ExternalArrayType array_type);
951 static RootListIndex RootIndexForExternalArrayType(
952 ExternalArrayType array_type);
953
Steve Blockd0582a62009-12-15 09:54:21 +0000954 static void RecordStats(HeapStats* stats);
955
Steve Block6ded16b2010-05-10 14:33:55 +0100956 // Copy block of memory from src to dst. Size of block should be aligned
957 // by pointer size.
958 static inline void CopyBlock(Object** dst, Object** src, int byte_size);
959
960 // Optimized version of memmove for blocks with pointer size aligned sizes and
961 // pointer size aligned addresses.
962 static inline void MoveBlock(Object** dst, Object** src, int byte_size);
963
964 // Check new space expansion criteria and expand semispaces if it was hit.
965 static void CheckNewSpaceExpansionCriteria();
966
967 static inline void IncrementYoungSurvivorsCounter(int survived) {
968 survived_since_last_expansion_ += survived;
969 }
970
971 static void UpdateNewSpaceReferencesInExternalStringTable(
972 ExternalStringTableUpdaterCallback updater_func);
973
974 // Helper function that governs the promotion policy from new space to
975 // old. If the object's old address lies below the new space's age
976 // mark or if we've already filled the bottom 1/16th of the to space,
977 // we try to promote this object.
978 static inline bool ShouldBePromoted(Address old_address, int object_size);
979
980 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
981
Kristian Monsen25f61362010-05-21 11:50:48 +0100982 static void ClearJSFunctionResultCaches();
983
Leon Clarkef7060e22010-06-03 12:02:55 +0100984 static GCTracer* tracer() { return tracer_; }
985
Steve Blocka7e24c12009-10-30 11:49:00 +0000986 private:
Steve Block3ce2e202009-11-05 08:53:23 +0000987 static int reserved_semispace_size_;
988 static int max_semispace_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 static int initial_semispace_size_;
Steve Block3ce2e202009-11-05 08:53:23 +0000990 static int max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000991 static size_t code_range_size_;
992
993 // For keeping track of how much data has survived
994 // scavenge since last new space expansion.
995 static int survived_since_last_expansion_;
996
997 static int always_allocate_scope_depth_;
Steve Blockd0582a62009-12-15 09:54:21 +0000998 static int linear_allocation_scope_depth_;
Steve Block6ded16b2010-05-10 14:33:55 +0100999
1000 // For keeping track of context disposals.
1001 static int contexts_disposed_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001002
Steve Blocka7e24c12009-10-30 11:49:00 +00001003#if defined(V8_TARGET_ARCH_X64)
1004 static const int kMaxObjectSizeInNewSpace = 512*KB;
1005#else
1006 static const int kMaxObjectSizeInNewSpace = 256*KB;
1007#endif
1008
1009 static NewSpace new_space_;
1010 static OldSpace* old_pointer_space_;
1011 static OldSpace* old_data_space_;
1012 static OldSpace* code_space_;
1013 static MapSpace* map_space_;
1014 static CellSpace* cell_space_;
1015 static LargeObjectSpace* lo_space_;
1016 static HeapState gc_state_;
1017
1018 // Returns the size of object residing in non new spaces.
1019 static int PromotedSpaceSize();
1020
1021 // Returns the amount of external memory registered since last global gc.
1022 static int PromotedExternalMemorySize();
1023
1024 static int mc_count_; // how many mark-compact collections happened
Leon Clarkef7060e22010-06-03 12:02:55 +01001025 static int ms_count_; // how many mark-sweep collections happened
Steve Blocka7e24c12009-10-30 11:49:00 +00001026 static int gc_count_; // how many gc happened
1027
Steve Block6ded16b2010-05-10 14:33:55 +01001028 // Total length of the strings we failed to flatten since the last GC.
1029 static int unflattened_strings_length_;
1030
Steve Blocka7e24c12009-10-30 11:49:00 +00001031#define ROOT_ACCESSOR(type, name, camel_name) \
1032 static inline void set_##name(type* value) { \
1033 roots_[k##camel_name##RootIndex] = value; \
1034 }
1035 ROOT_LIST(ROOT_ACCESSOR)
1036#undef ROOT_ACCESSOR
1037
1038#ifdef DEBUG
1039 static bool allocation_allowed_;
1040
1041 // If the --gc-interval flag is set to a positive value, this
1042 // variable holds the value indicating the number of allocations
1043 // remain until the next failure and garbage collection.
1044 static int allocation_timeout_;
1045
1046 // Do we expect to be able to handle allocation failure at this
1047 // time?
1048 static bool disallow_allocation_failure_;
1049#endif // DEBUG
1050
1051 // Limit that triggers a global GC on the next (normally caused) GC. This
1052 // is checked when we have already decided to do a GC to help determine
1053 // which collector to invoke.
1054 static int old_gen_promotion_limit_;
1055
1056 // Limit that triggers a global GC as soon as is reasonable. This is
1057 // checked before expanding a paged space in the old generation and on
1058 // every allocation in large object space.
1059 static int old_gen_allocation_limit_;
1060
1061 // Limit on the amount of externally allocated memory allowed
1062 // between global GCs. If reached a global GC is forced.
1063 static int external_allocation_limit_;
1064
1065 // The amount of external memory registered through the API kept alive
1066 // by global handles
1067 static int amount_of_external_allocated_memory_;
1068
1069 // Caches the amount of external memory registered at the last global gc.
1070 static int amount_of_external_allocated_memory_at_last_global_gc_;
1071
1072 // Indicates that an allocation has failed in the old generation since the
1073 // last GC.
1074 static int old_gen_exhausted_;
1075
1076 static Object* roots_[kRootListLength];
1077
1078 struct StringTypeTable {
1079 InstanceType type;
1080 int size;
1081 RootListIndex index;
1082 };
1083
1084 struct ConstantSymbolTable {
1085 const char* contents;
1086 RootListIndex index;
1087 };
1088
1089 struct StructTable {
1090 InstanceType type;
1091 int size;
1092 RootListIndex index;
1093 };
1094
1095 static const StringTypeTable string_type_table[];
1096 static const ConstantSymbolTable constant_symbol_table[];
1097 static const StructTable struct_table[];
1098
1099 // The special hidden symbol which is an empty string, but does not match
1100 // any string when looked up in properties.
1101 static String* hidden_symbol_;
1102
1103 // GC callback function, called before and after mark-compact GC.
1104 // Allocations in the callback function are disallowed.
Steve Block6ded16b2010-05-10 14:33:55 +01001105 struct GCPrologueCallbackPair {
1106 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1107 : callback(callback), gc_type(gc_type) {
1108 }
1109 bool operator==(const GCPrologueCallbackPair& pair) const {
1110 return pair.callback == callback;
1111 }
1112 GCPrologueCallback callback;
1113 GCType gc_type;
1114 };
1115 static List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1116
1117 struct GCEpilogueCallbackPair {
1118 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1119 : callback(callback), gc_type(gc_type) {
1120 }
1121 bool operator==(const GCEpilogueCallbackPair& pair) const {
1122 return pair.callback == callback;
1123 }
1124 GCEpilogueCallback callback;
1125 GCType gc_type;
1126 };
1127 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1128
Steve Blocka7e24c12009-10-30 11:49:00 +00001129 static GCCallback global_gc_prologue_callback_;
1130 static GCCallback global_gc_epilogue_callback_;
1131
1132 // Checks whether a global GC is necessary
1133 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1134
1135 // Performs garbage collection
1136 static void PerformGarbageCollection(AllocationSpace space,
1137 GarbageCollector collector,
1138 GCTracer* tracer);
1139
Steve Blocka7e24c12009-10-30 11:49:00 +00001140 // Allocate an uninitialized object in map space. The behavior is identical
1141 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1142 // have to test the allocation space argument and (b) can reduce code size
1143 // (since both AllocateRaw and AllocateRawMap are inlined).
1144 static inline Object* AllocateRawMap();
1145
1146 // Allocate an uninitialized object in the global property cell space.
1147 static inline Object* AllocateRawCell();
1148
1149 // Initializes a JSObject based on its map.
1150 static void InitializeJSObjectFromMap(JSObject* obj,
1151 FixedArray* properties,
1152 Map* map);
1153
1154 static bool CreateInitialMaps();
1155 static bool CreateInitialObjects();
1156
1157 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1158 // that assigns wrong vtable entries.
1159 static void CreateCEntryStub();
Steve Blocka7e24c12009-10-30 11:49:00 +00001160 static void CreateJSEntryStub();
1161 static void CreateJSConstructEntryStub();
1162 static void CreateRegExpCEntryStub();
1163
1164 static void CreateFixedStubs();
1165
Steve Block6ded16b2010-05-10 14:33:55 +01001166 static Object* CreateOddball(const char* to_string, Object* to_number);
Steve Blocka7e24c12009-10-30 11:49:00 +00001167
1168 // Allocate empty fixed array.
1169 static Object* AllocateEmptyFixedArray();
1170
1171 // Performs a minor collection in new generation.
1172 static void Scavenge();
Steve Block6ded16b2010-05-10 14:33:55 +01001173
1174 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1175 Object** pointer);
1176
Leon Clarkee46be812010-01-19 14:06:41 +00001177 static Address DoScavenge(ObjectVisitor* scavenge_visitor,
1178 Address new_space_front);
Steve Blocka7e24c12009-10-30 11:49:00 +00001179
1180 // Performs a major collection in the whole heap.
1181 static void MarkCompact(GCTracer* tracer);
1182
1183 // Code to be run before and after mark-compact.
1184 static void MarkCompactPrologue(bool is_compacting);
1185 static void MarkCompactEpilogue(bool is_compacting);
1186
Kristian Monsen25f61362010-05-21 11:50:48 +01001187 // Completely clear the Instanceof cache (to stop it keeping objects alive
1188 // around a GC).
1189 static void CompletelyClearInstanceofCache() {
1190 set_instanceof_cache_map(the_hole_value());
1191 set_instanceof_cache_function(the_hole_value());
1192 }
1193
Steve Blocka7e24c12009-10-30 11:49:00 +00001194 // Helper function used by CopyObject to copy a source object to an
1195 // allocated target object and update the forwarding pointer in the source
1196 // object. Returns the target object.
Leon Clarkee46be812010-01-19 14:06:41 +00001197 static inline HeapObject* MigrateObject(HeapObject* source,
1198 HeapObject* target,
1199 int size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001200
Steve Blocka7e24c12009-10-30 11:49:00 +00001201#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1202 // Record the copy of an object in the NewSpace's statistics.
1203 static void RecordCopiedObject(HeapObject* obj);
1204
1205 // Record statistics before and after garbage collection.
1206 static void ReportStatisticsBeforeGC();
1207 static void ReportStatisticsAfterGC();
1208#endif
1209
Steve Blocka7e24c12009-10-30 11:49:00 +00001210 // Rebuild remembered set in an old space.
1211 static void RebuildRSets(PagedSpace* space);
1212
1213 // Rebuild remembered set in the large object space.
1214 static void RebuildRSets(LargeObjectSpace* space);
1215
1216 // Slow part of scavenge object.
1217 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1218
Steve Blocka7e24c12009-10-30 11:49:00 +00001219 // Initializes a function with a shared part and prototype.
1220 // Returns the function.
1221 // Note: this code was factored out of AllocateFunction such that
1222 // other parts of the VM could use it. Specifically, a function that creates
1223 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1224 // Please note this does not perform a garbage collection.
1225 static inline Object* InitializeFunction(JSFunction* function,
1226 SharedFunctionInfo* shared,
1227 Object* prototype);
1228
Leon Clarkef7060e22010-06-03 12:02:55 +01001229 static GCTracer* tracer_;
1230
Leon Clarkee46be812010-01-19 14:06:41 +00001231
1232 // Initializes the number to string cache based on the max semispace size.
1233 static Object* InitializeNumberStringCache();
1234 // Flush the number to string cache.
1235 static void FlushNumberStringCache();
1236
Steve Blocka7e24c12009-10-30 11:49:00 +00001237 static const int kInitialSymbolTableSize = 2048;
1238 static const int kInitialEvalCacheSize = 64;
1239
1240 friend class Factory;
1241 friend class DisallowAllocationFailure;
1242 friend class AlwaysAllocateScope;
Steve Blockd0582a62009-12-15 09:54:21 +00001243 friend class LinearAllocationScope;
1244};
1245
1246
1247class HeapStats {
1248 public:
Steve Block6ded16b2010-05-10 14:33:55 +01001249 int* start_marker;
1250 int* new_space_size;
1251 int* new_space_capacity;
1252 int* old_pointer_space_size;
1253 int* old_pointer_space_capacity;
1254 int* old_data_space_size;
1255 int* old_data_space_capacity;
1256 int* code_space_size;
1257 int* code_space_capacity;
1258 int* map_space_size;
1259 int* map_space_capacity;
1260 int* cell_space_size;
1261 int* cell_space_capacity;
1262 int* lo_space_size;
1263 int* global_handle_count;
1264 int* weak_global_handle_count;
1265 int* pending_global_handle_count;
1266 int* near_death_global_handle_count;
1267 int* destroyed_global_handle_count;
1268 int* end_marker;
Steve Blocka7e24c12009-10-30 11:49:00 +00001269};
1270
1271
1272class AlwaysAllocateScope {
1273 public:
1274 AlwaysAllocateScope() {
1275 // We shouldn't hit any nested scopes, because that requires
1276 // non-handle code to call handle code. The code still works but
1277 // performance will degrade, so we want to catch this situation
1278 // in debug mode.
1279 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1280 Heap::always_allocate_scope_depth_++;
1281 }
1282
1283 ~AlwaysAllocateScope() {
1284 Heap::always_allocate_scope_depth_--;
1285 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1286 }
1287};
1288
1289
Steve Blockd0582a62009-12-15 09:54:21 +00001290class LinearAllocationScope {
1291 public:
1292 LinearAllocationScope() {
1293 Heap::linear_allocation_scope_depth_++;
1294 }
1295
1296 ~LinearAllocationScope() {
1297 Heap::linear_allocation_scope_depth_--;
1298 ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
1299 }
1300};
1301
1302
Steve Blocka7e24c12009-10-30 11:49:00 +00001303#ifdef DEBUG
1304// Visitor class to verify interior pointers that do not have remembered set
1305// bits. All heap object pointers have to point into the heap to a location
1306// that has a map pointer at its first word. Caveat: Heap::Contains is an
1307// approximation because it can return true for objects in a heap space but
1308// above the allocation pointer.
1309class VerifyPointersVisitor: public ObjectVisitor {
1310 public:
1311 void VisitPointers(Object** start, Object** end) {
1312 for (Object** current = start; current < end; current++) {
1313 if ((*current)->IsHeapObject()) {
1314 HeapObject* object = HeapObject::cast(*current);
1315 ASSERT(Heap::Contains(object));
1316 ASSERT(object->map()->IsMap());
1317 }
1318 }
1319 }
1320};
1321
1322
1323// Visitor class to verify interior pointers that have remembered set bits.
1324// As VerifyPointersVisitor but also checks that remembered set bits are
1325// always set for pointers into new space.
1326class VerifyPointersAndRSetVisitor: public ObjectVisitor {
1327 public:
1328 void VisitPointers(Object** start, Object** end) {
1329 for (Object** current = start; current < end; current++) {
1330 if ((*current)->IsHeapObject()) {
1331 HeapObject* object = HeapObject::cast(*current);
1332 ASSERT(Heap::Contains(object));
1333 ASSERT(object->map()->IsMap());
1334 if (Heap::InNewSpace(object)) {
1335 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
1336 }
1337 }
1338 }
1339 }
1340};
1341#endif
1342
1343
1344// Space iterator for iterating over all spaces of the heap.
1345// Returns each space in turn, and null when it is done.
1346class AllSpaces BASE_EMBEDDED {
1347 public:
1348 Space* next();
1349 AllSpaces() { counter_ = FIRST_SPACE; }
1350 private:
1351 int counter_;
1352};
1353
1354
1355// Space iterator for iterating over all old spaces of the heap: Old pointer
1356// space, old data space and code space.
1357// Returns each space in turn, and null when it is done.
1358class OldSpaces BASE_EMBEDDED {
1359 public:
1360 OldSpace* next();
1361 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1362 private:
1363 int counter_;
1364};
1365
1366
1367// Space iterator for iterating over all the paged spaces of the heap:
Leon Clarkee46be812010-01-19 14:06:41 +00001368// Map space, old pointer space, old data space, code space and cell space.
Steve Blocka7e24c12009-10-30 11:49:00 +00001369// Returns each space in turn, and null when it is done.
1370class PagedSpaces BASE_EMBEDDED {
1371 public:
1372 PagedSpace* next();
1373 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1374 private:
1375 int counter_;
1376};
1377
1378
1379// Space iterator for iterating over all spaces of the heap.
1380// For each space an object iterator is provided. The deallocation of the
1381// returned object iterators is handled by the space iterator.
1382class SpaceIterator : public Malloced {
1383 public:
1384 SpaceIterator();
1385 virtual ~SpaceIterator();
1386
1387 bool has_next();
1388 ObjectIterator* next();
1389
1390 private:
1391 ObjectIterator* CreateIterator();
1392
1393 int current_space_; // from enum AllocationSpace.
1394 ObjectIterator* iterator_; // object iterator for the current space.
1395};
1396
1397
1398// A HeapIterator provides iteration over the whole heap It aggregates a the
1399// specific iterators for the different spaces as these can only iterate over
1400// one space only.
1401
1402class HeapIterator BASE_EMBEDDED {
1403 public:
1404 explicit HeapIterator();
1405 virtual ~HeapIterator();
1406
Steve Blocka7e24c12009-10-30 11:49:00 +00001407 HeapObject* next();
1408 void reset();
1409
1410 private:
1411 // Perform the initialization.
1412 void Init();
1413
1414 // Perform all necessary shutdown (destruction) work.
1415 void Shutdown();
1416
1417 // Space iterator for iterating all the spaces.
1418 SpaceIterator* space_iterator_;
1419 // Object iterator for the space currently being iterated.
1420 ObjectIterator* object_iterator_;
1421};
1422
1423
1424// Cache for mapping (map, property name) into field offset.
1425// Cleared at startup and prior to mark sweep collection.
1426class KeyedLookupCache {
1427 public:
1428 // Lookup field offset for (map, name). If absent, -1 is returned.
1429 static int Lookup(Map* map, String* name);
1430
1431 // Update an element in the cache.
1432 static void Update(Map* map, String* name, int field_offset);
1433
1434 // Clear the cache.
1435 static void Clear();
Leon Clarkee46be812010-01-19 14:06:41 +00001436
1437 static const int kLength = 64;
1438 static const int kCapacityMask = kLength - 1;
1439 static const int kMapHashShift = 2;
1440
Steve Blocka7e24c12009-10-30 11:49:00 +00001441 private:
1442 static inline int Hash(Map* map, String* name);
Leon Clarkee46be812010-01-19 14:06:41 +00001443
1444 // Get the address of the keys and field_offsets arrays. Used in
1445 // generated code to perform cache lookups.
1446 static Address keys_address() {
1447 return reinterpret_cast<Address>(&keys_);
1448 }
1449
1450 static Address field_offsets_address() {
1451 return reinterpret_cast<Address>(&field_offsets_);
1452 }
1453
Steve Blocka7e24c12009-10-30 11:49:00 +00001454 struct Key {
1455 Map* map;
1456 String* name;
1457 };
1458 static Key keys_[kLength];
1459 static int field_offsets_[kLength];
Steve Blocka7e24c12009-10-30 11:49:00 +00001460
Leon Clarkee46be812010-01-19 14:06:41 +00001461 friend class ExternalReference;
1462};
Steve Blocka7e24c12009-10-30 11:49:00 +00001463
1464
1465// Cache for mapping (array, property name) into descriptor index.
1466// The cache contains both positive and negative results.
1467// Descriptor index equals kNotFound means the property is absent.
1468// Cleared at startup and prior to any gc.
1469class DescriptorLookupCache {
1470 public:
1471 // Lookup descriptor index for (map, name).
1472 // If absent, kAbsent is returned.
1473 static int Lookup(DescriptorArray* array, String* name) {
1474 if (!StringShape(name).IsSymbol()) return kAbsent;
1475 int index = Hash(array, name);
1476 Key& key = keys_[index];
1477 if ((key.array == array) && (key.name == name)) return results_[index];
1478 return kAbsent;
1479 }
1480
1481 // Update an element in the cache.
1482 static void Update(DescriptorArray* array, String* name, int result) {
1483 ASSERT(result != kAbsent);
1484 if (StringShape(name).IsSymbol()) {
1485 int index = Hash(array, name);
1486 Key& key = keys_[index];
1487 key.array = array;
1488 key.name = name;
1489 results_[index] = result;
1490 }
1491 }
1492
1493 // Clear the cache.
1494 static void Clear();
1495
1496 static const int kAbsent = -2;
1497 private:
1498 static int Hash(DescriptorArray* array, String* name) {
1499 // Uses only lower 32 bits if pointers are larger.
Andrei Popescu402d9372010-02-26 13:31:12 +00001500 uint32_t array_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001501 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
Andrei Popescu402d9372010-02-26 13:31:12 +00001502 uint32_t name_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001503 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1504 return (array_hash ^ name_hash) % kLength;
1505 }
1506
1507 static const int kLength = 64;
1508 struct Key {
1509 DescriptorArray* array;
1510 String* name;
1511 };
1512
1513 static Key keys_[kLength];
1514 static int results_[kLength];
1515};
1516
1517
1518// ----------------------------------------------------------------------------
1519// Marking stack for tracing live objects.
1520
1521class MarkingStack {
1522 public:
1523 void Initialize(Address low, Address high) {
1524 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1525 high_ = reinterpret_cast<HeapObject**>(high);
1526 overflowed_ = false;
1527 }
1528
1529 bool is_full() { return top_ >= high_; }
1530
1531 bool is_empty() { return top_ <= low_; }
1532
1533 bool overflowed() { return overflowed_; }
1534
1535 void clear_overflowed() { overflowed_ = false; }
1536
1537 // Push the (marked) object on the marking stack if there is room,
1538 // otherwise mark the object as overflowed and wait for a rescan of the
1539 // heap.
1540 void Push(HeapObject* object) {
1541 CHECK(object->IsHeapObject());
1542 if (is_full()) {
1543 object->SetOverflow();
1544 overflowed_ = true;
1545 } else {
1546 *(top_++) = object;
1547 }
1548 }
1549
1550 HeapObject* Pop() {
1551 ASSERT(!is_empty());
1552 HeapObject* object = *(--top_);
1553 CHECK(object->IsHeapObject());
1554 return object;
1555 }
1556
1557 private:
1558 HeapObject** low_;
1559 HeapObject** top_;
1560 HeapObject** high_;
1561 bool overflowed_;
1562};
1563
1564
1565// A helper class to document/test C++ scopes where we do not
1566// expect a GC. Usage:
1567//
1568// /* Allocation not allowed: we cannot handle a GC in this scope. */
1569// { AssertNoAllocation nogc;
1570// ...
1571// }
1572
1573#ifdef DEBUG
1574
1575class DisallowAllocationFailure {
1576 public:
1577 DisallowAllocationFailure() {
1578 old_state_ = Heap::disallow_allocation_failure_;
1579 Heap::disallow_allocation_failure_ = true;
1580 }
1581 ~DisallowAllocationFailure() {
1582 Heap::disallow_allocation_failure_ = old_state_;
1583 }
1584 private:
1585 bool old_state_;
1586};
1587
1588class AssertNoAllocation {
1589 public:
1590 AssertNoAllocation() {
1591 old_state_ = Heap::allow_allocation(false);
1592 }
1593
1594 ~AssertNoAllocation() {
1595 Heap::allow_allocation(old_state_);
1596 }
1597
1598 private:
1599 bool old_state_;
1600};
1601
1602class DisableAssertNoAllocation {
1603 public:
1604 DisableAssertNoAllocation() {
1605 old_state_ = Heap::allow_allocation(true);
1606 }
1607
1608 ~DisableAssertNoAllocation() {
1609 Heap::allow_allocation(old_state_);
1610 }
1611
1612 private:
1613 bool old_state_;
1614};
1615
1616#else // ndef DEBUG
1617
1618class AssertNoAllocation {
1619 public:
1620 AssertNoAllocation() { }
1621 ~AssertNoAllocation() { }
1622};
1623
1624class DisableAssertNoAllocation {
1625 public:
1626 DisableAssertNoAllocation() { }
1627 ~DisableAssertNoAllocation() { }
1628};
1629
1630#endif
1631
1632// GCTracer collects and prints ONE line after each garbage collector
1633// invocation IFF --trace_gc is used.
1634
1635class GCTracer BASE_EMBEDDED {
1636 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01001637 class Scope BASE_EMBEDDED {
Steve Block6ded16b2010-05-10 14:33:55 +01001638 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01001639 enum ScopeId {
1640 EXTERNAL,
1641 MC_MARK,
1642 MC_SWEEP,
1643 MC_COMPACT,
1644 kNumberOfScopes
1645 };
1646
1647 Scope(GCTracer* tracer, ScopeId scope)
1648 : tracer_(tracer),
1649 scope_(scope) {
Steve Block6ded16b2010-05-10 14:33:55 +01001650 start_time_ = OS::TimeCurrentMillis();
1651 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001652
1653 ~Scope() {
1654 ASSERT((0 <= scope_) && (scope_ < kNumberOfScopes));
1655 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
Steve Block6ded16b2010-05-10 14:33:55 +01001656 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001657
Steve Block6ded16b2010-05-10 14:33:55 +01001658 private:
1659 GCTracer* tracer_;
Leon Clarkef7060e22010-06-03 12:02:55 +01001660 ScopeId scope_;
Steve Block6ded16b2010-05-10 14:33:55 +01001661 double start_time_;
1662 };
1663
1664 GCTracer();
Steve Blocka7e24c12009-10-30 11:49:00 +00001665 ~GCTracer();
1666
1667 // Sets the collector.
1668 void set_collector(GarbageCollector collector) { collector_ = collector; }
1669
1670 // Sets the GC count.
1671 void set_gc_count(int count) { gc_count_ = count; }
1672
1673 // Sets the full GC count.
1674 void set_full_gc_count(int count) { full_gc_count_ = count; }
1675
1676 // Sets the flag that this is a compacting full GC.
1677 void set_is_compacting() { is_compacting_ = true; }
Steve Block6ded16b2010-05-10 14:33:55 +01001678 bool is_compacting() const { return is_compacting_; }
Steve Blocka7e24c12009-10-30 11:49:00 +00001679
1680 // Increment and decrement the count of marked objects.
1681 void increment_marked_count() { ++marked_count_; }
1682 void decrement_marked_count() { --marked_count_; }
1683
1684 int marked_count() { return marked_count_; }
1685
Leon Clarkef7060e22010-06-03 12:02:55 +01001686 void increment_promoted_objects_size(int object_size) {
1687 promoted_objects_size_ += object_size;
1688 }
1689
1690 // Returns maximum GC pause.
1691 static int get_max_gc_pause() { return max_gc_pause_; }
1692
1693 // Returns maximum size of objects alive after GC.
1694 static int get_max_alive_after_gc() { return max_alive_after_gc_; }
1695
1696 // Returns minimal interval between two subsequent collections.
1697 static int get_min_in_mutator() { return min_in_mutator_; }
1698
Steve Blocka7e24c12009-10-30 11:49:00 +00001699 private:
1700 // Returns a string matching the collector.
1701 const char* CollectorString();
1702
1703 // Returns size of object in heap (in MB).
1704 double SizeOfHeapObjects() {
1705 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1706 }
1707
1708 double start_time_; // Timestamp set in the constructor.
Leon Clarkef7060e22010-06-03 12:02:55 +01001709 int start_size_; // Size of objects in heap set in constructor.
Steve Blocka7e24c12009-10-30 11:49:00 +00001710 GarbageCollector collector_; // Type of collector.
1711
1712 // A count (including this one, eg, the first collection is 1) of the
1713 // number of garbage collections.
1714 int gc_count_;
1715
1716 // A count (including this one) of the number of full garbage collections.
1717 int full_gc_count_;
1718
1719 // True if the current GC is a compacting full collection, false
1720 // otherwise.
1721 bool is_compacting_;
1722
1723 // True if the *previous* full GC cwas a compacting collection (will be
1724 // false if there has not been a previous full GC).
1725 bool previous_has_compacted_;
1726
1727 // On a full GC, a count of the number of marked objects. Incremented
1728 // when an object is marked and decremented when an object's mark bit is
1729 // cleared. Will be zero on a scavenge collection.
1730 int marked_count_;
1731
1732 // The count from the end of the previous full GC. Will be zero if there
1733 // was no previous full GC.
1734 int previous_marked_count_;
Leon Clarkef7060e22010-06-03 12:02:55 +01001735
1736 // Amounts of time spent in different scopes during GC.
1737 double scopes_[Scope::kNumberOfScopes];
1738
1739 // Total amount of space either wasted or contained in one of free lists
1740 // before the current GC.
1741 int in_free_list_or_wasted_before_gc_;
1742
1743 // Difference between space used in the heap at the beginning of the current
1744 // collection and the end of the previous collection.
1745 int allocated_since_last_gc_;
1746
1747 // Amount of time spent in mutator that is time elapsed between end of the
1748 // previous collection and the beginning of the current one.
1749 double spent_in_mutator_;
1750
1751 // Size of objects promoted during the current collection.
1752 int promoted_objects_size_;
1753
1754 // Maximum GC pause.
1755 static int max_gc_pause_;
1756
1757 // Maximum size of objects alive after GC.
1758 static int max_alive_after_gc_;
1759
1760 // Minimal interval between two subsequent collections.
1761 static int min_in_mutator_;
1762
1763 // Size of objects alive after last GC.
1764 static int alive_after_last_gc_;
1765
1766 static double last_gc_end_timestamp_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001767};
1768
1769
1770class TranscendentalCache {
1771 public:
1772 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1773
1774 explicit TranscendentalCache(Type t);
1775
1776 // Returns a heap number with f(input), where f is a math function specified
1777 // by the 'type' argument.
1778 static inline Object* Get(Type type, double input) {
1779 TranscendentalCache* cache = caches_[type];
1780 if (cache == NULL) {
1781 caches_[type] = cache = new TranscendentalCache(type);
1782 }
1783 return cache->Get(input);
1784 }
1785
1786 // The cache contains raw Object pointers. This method disposes of
1787 // them before a garbage collection.
1788 static void Clear();
1789
1790 private:
1791 inline Object* Get(double input) {
1792 Converter c;
1793 c.dbl = input;
1794 int hash = Hash(c);
1795 Element e = elements_[hash];
1796 if (e.in[0] == c.integers[0] &&
1797 e.in[1] == c.integers[1]) {
1798 ASSERT(e.output != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00001799 Counters::transcendental_cache_hit.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001800 return e.output;
1801 }
1802 double answer = Calculate(input);
1803 Object* heap_number = Heap::AllocateHeapNumber(answer);
1804 if (!heap_number->IsFailure()) {
1805 elements_[hash].in[0] = c.integers[0];
1806 elements_[hash].in[1] = c.integers[1];
1807 elements_[hash].output = heap_number;
1808 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001809 Counters::transcendental_cache_miss.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001810 return heap_number;
1811 }
1812
1813 inline double Calculate(double input) {
1814 switch (type_) {
1815 case ACOS:
1816 return acos(input);
1817 case ASIN:
1818 return asin(input);
1819 case ATAN:
1820 return atan(input);
1821 case COS:
1822 return cos(input);
1823 case EXP:
1824 return exp(input);
1825 case LOG:
1826 return log(input);
1827 case SIN:
1828 return sin(input);
1829 case TAN:
1830 return tan(input);
1831 default:
1832 return 0.0; // Never happens.
1833 }
1834 }
1835 static const int kCacheSize = 512;
1836 struct Element {
1837 uint32_t in[2];
1838 Object* output;
1839 };
1840 union Converter {
1841 double dbl;
1842 uint32_t integers[2];
1843 };
1844 inline static int Hash(const Converter& c) {
1845 uint32_t hash = (c.integers[0] ^ c.integers[1]);
1846 hash ^= hash >> 16;
1847 hash ^= hash >> 8;
1848 return (hash & (kCacheSize - 1));
1849 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001850
1851 static Address cache_array_address() {
1852 // Used to create an external reference.
1853 return reinterpret_cast<Address>(caches_);
1854 }
1855
1856 // Allow access to the caches_ array as an ExternalReference.
1857 friend class ExternalReference;
1858 // Inline implementation of the caching.
1859 friend class TranscendentalCacheStub;
1860
Steve Blocka7e24c12009-10-30 11:49:00 +00001861 static TranscendentalCache* caches_[kNumberOfCaches];
1862 Element elements_[kCacheSize];
1863 Type type_;
1864};
1865
1866
Leon Clarkee46be812010-01-19 14:06:41 +00001867// External strings table is a place where all external strings are
1868// registered. We need to keep track of such strings to properly
1869// finalize them.
1870class ExternalStringTable : public AllStatic {
1871 public:
1872 // Registers an external string.
1873 inline static void AddString(String* string);
1874
1875 inline static void Iterate(ObjectVisitor* v);
1876
1877 // Restores internal invariant and gets rid of collected strings.
1878 // Must be called after each Iterate() that modified the strings.
1879 static void CleanUp();
1880
1881 // Destroys all allocated memory.
1882 static void TearDown();
1883
1884 private:
1885 friend class Heap;
1886
1887 inline static void Verify();
1888
1889 inline static void AddOldString(String* string);
1890
1891 // Notifies the table that only a prefix of the new list is valid.
1892 inline static void ShrinkNewStrings(int position);
1893
1894 // To speed up scavenge collections new space string are kept
1895 // separate from old space strings.
1896 static List<Object*> new_space_strings_;
1897 static List<Object*> old_space_strings_;
1898};
1899
Steve Blocka7e24c12009-10-30 11:49:00 +00001900} } // namespace v8::internal
1901
1902#endif // V8_HEAP_H_