blob: 0db40083feecd465893ef17482265b0300ebabf6 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
Steve Block6ded16b2010-05-10 14:33:55 +010033#include "splay-tree-inl.h"
34#include "v8-counters.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Steve Block6ded16b2010-05-10 14:33:55 +010039// Forward declarations.
40class ZoneScopeInfo;
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042// Defines all the roots in Heap.
43#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
Steve Blockd0582a62009-12-15 09:54:21 +000044 /* Put the byte array map early. We need it to be in place by the time */ \
45 /* the deserializer hits the next page, since it wants to put a byte */ \
46 /* array in the unused space at the end of the page. */ \
47 V(Map, byte_array_map, ByteArrayMap) \
48 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
49 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
50 /* Cluster the most popular ones in a few cache lines here at the top. */ \
Steve Blocka7e24c12009-10-30 11:49:00 +000051 V(Smi, stack_limit, StackLimit) \
52 V(Object, undefined_value, UndefinedValue) \
53 V(Object, the_hole_value, TheHoleValue) \
54 V(Object, null_value, NullValue) \
55 V(Object, true_value, TrueValue) \
56 V(Object, false_value, FalseValue) \
57 V(Map, heap_number_map, HeapNumberMap) \
58 V(Map, global_context_map, GlobalContextMap) \
59 V(Map, fixed_array_map, FixedArrayMap) \
60 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
61 V(Map, meta_map, MetaMap) \
62 V(Object, termination_exception, TerminationException) \
63 V(Map, hash_table_map, HashTableMap) \
64 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
Steve Blockd0582a62009-12-15 09:54:21 +000065 V(Map, string_map, StringMap) \
66 V(Map, ascii_string_map, AsciiStringMap) \
67 V(Map, symbol_map, SymbolMap) \
68 V(Map, ascii_symbol_map, AsciiSymbolMap) \
69 V(Map, cons_symbol_map, ConsSymbolMap) \
70 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
71 V(Map, external_symbol_map, ExternalSymbolMap) \
72 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
73 V(Map, cons_string_map, ConsStringMap) \
74 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
75 V(Map, external_string_map, ExternalStringMap) \
76 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
77 V(Map, undetectable_string_map, UndetectableStringMap) \
78 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000079 V(Map, pixel_array_map, PixelArrayMap) \
Steve Block3ce2e202009-11-05 08:53:23 +000080 V(Map, external_byte_array_map, ExternalByteArrayMap) \
81 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
82 V(Map, external_short_array_map, ExternalShortArrayMap) \
83 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
84 V(Map, external_int_array_map, ExternalIntArrayMap) \
85 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
86 V(Map, external_float_array_map, ExternalFloatArrayMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000087 V(Map, context_map, ContextMap) \
88 V(Map, catch_context_map, CatchContextMap) \
89 V(Map, code_map, CodeMap) \
90 V(Map, oddball_map, OddballMap) \
91 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000092 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
93 V(Map, proxy_map, ProxyMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000094 V(Object, nan_value, NanValue) \
95 V(Object, minus_zero_value, MinusZeroValue) \
Kristian Monsen25f61362010-05-21 11:50:48 +010096 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
97 V(Object, instanceof_cache_map, InstanceofCacheMap) \
98 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
Steve Blocka7e24c12009-10-30 11:49:00 +000099 V(String, empty_string, EmptyString) \
100 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
101 V(Map, neander_map, NeanderMap) \
102 V(JSObject, message_listeners, MessageListeners) \
103 V(Proxy, prototype_accessors, PrototypeAccessors) \
104 V(NumberDictionary, code_stubs, CodeStubs) \
105 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
106 V(Code, js_entry_code, JsEntryCode) \
107 V(Code, js_construct_entry_code, JsConstructEntryCode) \
108 V(Code, c_entry_code, CEntryCode) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 V(FixedArray, number_string_cache, NumberStringCache) \
110 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
111 V(FixedArray, natives_source_cache, NativesSourceCache) \
112 V(Object, last_script_id, LastScriptId) \
Andrei Popescu31002712010-02-23 13:46:05 +0000113 V(Script, empty_script, EmptyScript) \
Steve Blockd0582a62009-12-15 09:54:21 +0000114 V(Smi, real_stack_limit, RealStackLimit) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000115
Steve Block6ded16b2010-05-10 14:33:55 +0100116#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
Steve Blocka7e24c12009-10-30 11:49:00 +0000117#define STRONG_ROOT_LIST(V) \
118 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
119 V(Code, re_c_entry_code, RegExpCEntryCode)
120#else
121#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
122#endif
123
124#define ROOT_LIST(V) \
125 STRONG_ROOT_LIST(V) \
126 V(SymbolTable, symbol_table, SymbolTable)
127
128#define SYMBOL_LIST(V) \
129 V(Array_symbol, "Array") \
130 V(Object_symbol, "Object") \
131 V(Proto_symbol, "__proto__") \
132 V(StringImpl_symbol, "StringImpl") \
133 V(arguments_symbol, "arguments") \
134 V(Arguments_symbol, "Arguments") \
135 V(arguments_shadow_symbol, ".arguments") \
136 V(call_symbol, "call") \
137 V(apply_symbol, "apply") \
138 V(caller_symbol, "caller") \
139 V(boolean_symbol, "boolean") \
140 V(Boolean_symbol, "Boolean") \
141 V(callee_symbol, "callee") \
142 V(constructor_symbol, "constructor") \
143 V(code_symbol, ".code") \
144 V(result_symbol, ".result") \
145 V(catch_var_symbol, ".catch-var") \
146 V(empty_symbol, "") \
147 V(eval_symbol, "eval") \
148 V(function_symbol, "function") \
149 V(length_symbol, "length") \
150 V(name_symbol, "name") \
151 V(number_symbol, "number") \
152 V(Number_symbol, "Number") \
153 V(RegExp_symbol, "RegExp") \
Steve Block6ded16b2010-05-10 14:33:55 +0100154 V(source_symbol, "source") \
155 V(global_symbol, "global") \
156 V(ignore_case_symbol, "ignoreCase") \
157 V(multiline_symbol, "multiline") \
158 V(input_symbol, "input") \
159 V(index_symbol, "index") \
160 V(last_index_symbol, "lastIndex") \
Steve Blocka7e24c12009-10-30 11:49:00 +0000161 V(object_symbol, "object") \
162 V(prototype_symbol, "prototype") \
163 V(string_symbol, "string") \
164 V(String_symbol, "String") \
165 V(Date_symbol, "Date") \
166 V(this_symbol, "this") \
167 V(to_string_symbol, "toString") \
168 V(char_at_symbol, "CharAt") \
169 V(undefined_symbol, "undefined") \
170 V(value_of_symbol, "valueOf") \
171 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
172 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
173 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
174 V(illegal_access_symbol, "illegal access") \
175 V(out_of_memory_symbol, "out-of-memory") \
176 V(illegal_execution_state_symbol, "illegal execution state") \
177 V(get_symbol, "get") \
178 V(set_symbol, "set") \
179 V(function_class_symbol, "Function") \
180 V(illegal_argument_symbol, "illegal argument") \
181 V(MakeReferenceError_symbol, "MakeReferenceError") \
182 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
183 V(MakeTypeError_symbol, "MakeTypeError") \
184 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
185 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
186 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
187 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
188 V(illegal_return_symbol, "illegal_return") \
189 V(illegal_break_symbol, "illegal_break") \
190 V(illegal_continue_symbol, "illegal_continue") \
191 V(unknown_label_symbol, "unknown_label") \
192 V(redeclaration_symbol, "redeclaration") \
193 V(failure_symbol, "<failure>") \
194 V(space_symbol, " ") \
195 V(exec_symbol, "exec") \
196 V(zero_symbol, "0") \
197 V(global_eval_symbol, "GlobalEval") \
Steve Blockd0582a62009-12-15 09:54:21 +0000198 V(identity_hash_symbol, "v8::IdentityHash") \
199 V(closure_symbol, "(closure)")
Steve Blocka7e24c12009-10-30 11:49:00 +0000200
201
202// Forward declaration of the GCTracer class.
203class GCTracer;
Steve Blockd0582a62009-12-15 09:54:21 +0000204class HeapStats;
Steve Blocka7e24c12009-10-30 11:49:00 +0000205
206
Steve Block6ded16b2010-05-10 14:33:55 +0100207typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
208
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100209typedef bool (*DirtyRegionCallback)(Address start,
210 Address end,
211 ObjectSlotCallback copy_object_func);
212
Steve Block6ded16b2010-05-10 14:33:55 +0100213
Steve Blocka7e24c12009-10-30 11:49:00 +0000214// The all static Heap captures the interface to the global object heap.
215// All JavaScript contexts by this process share the same object heap.
216
217class Heap : public AllStatic {
218 public:
219 // Configure heap size before setup. Return false if the heap has been
220 // setup already.
Steve Block3ce2e202009-11-05 08:53:23 +0000221 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000222 static bool ConfigureHeapDefault();
223
224 // Initializes the global object heap. If create_heap_objects is true,
225 // also creates the basic non-mutable objects.
226 // Returns whether it succeeded.
227 static bool Setup(bool create_heap_objects);
228
229 // Destroys all memory allocated by the heap.
230 static void TearDown();
231
Steve Blockd0582a62009-12-15 09:54:21 +0000232 // Set the stack limit in the roots_ array. Some architectures generate
233 // code that looks here, because it is faster than loading from the static
234 // jslimit_/real_jslimit_ variable in the StackGuard.
235 static void SetStackLimits();
Steve Blocka7e24c12009-10-30 11:49:00 +0000236
237 // Returns whether Setup has been called.
238 static bool HasBeenSetup();
239
Steve Block3ce2e202009-11-05 08:53:23 +0000240 // Returns the maximum amount of memory reserved for the heap. For
241 // the young generation, we reserve 4 times the amount needed for a
242 // semi space. The young generation consists of two semi spaces and
243 // we reserve twice the amount needed for those in order to ensure
244 // that new space can be aligned to its size.
245 static int MaxReserved() {
246 return 4 * reserved_semispace_size_ + max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000247 }
Steve Block3ce2e202009-11-05 08:53:23 +0000248 static int MaxSemiSpaceSize() { return max_semispace_size_; }
249 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000250 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
Steve Block3ce2e202009-11-05 08:53:23 +0000251 static int MaxOldGenerationSize() { return max_old_generation_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000252
253 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
254 // more spaces are needed until it reaches the limit.
255 static int Capacity();
256
Steve Block3ce2e202009-11-05 08:53:23 +0000257 // Returns the amount of memory currently committed for the heap.
258 static int CommittedMemory();
259
Steve Blocka7e24c12009-10-30 11:49:00 +0000260 // Returns the available bytes in space w/o growing.
261 // Heap doesn't guarantee that it can allocate an object that requires
262 // all available bytes. Check MaxHeapObjectSize() instead.
263 static int Available();
264
265 // Returns the maximum object size in paged space.
266 static inline int MaxObjectSizeInPagedSpace();
267
268 // Returns of size of all objects residing in the heap.
269 static int SizeOfObjects();
270
271 // Return the starting address and a mask for the new space. And-masking an
272 // address with the mask will result in the start address of the new space
273 // for all addresses in either semispace.
274 static Address NewSpaceStart() { return new_space_.start(); }
275 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
276 static Address NewSpaceTop() { return new_space_.top(); }
277
278 static NewSpace* new_space() { return &new_space_; }
279 static OldSpace* old_pointer_space() { return old_pointer_space_; }
280 static OldSpace* old_data_space() { return old_data_space_; }
281 static OldSpace* code_space() { return code_space_; }
282 static MapSpace* map_space() { return map_space_; }
283 static CellSpace* cell_space() { return cell_space_; }
284 static LargeObjectSpace* lo_space() { return lo_space_; }
285
286 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
287 static Address always_allocate_scope_depth_address() {
288 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
289 }
Steve Blockd0582a62009-12-15 09:54:21 +0000290 static bool linear_allocation() {
Leon Clarkee46be812010-01-19 14:06:41 +0000291 return linear_allocation_scope_depth_ != 0;
Steve Blockd0582a62009-12-15 09:54:21 +0000292 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000293
294 static Address* NewSpaceAllocationTopAddress() {
295 return new_space_.allocation_top_address();
296 }
297 static Address* NewSpaceAllocationLimitAddress() {
298 return new_space_.allocation_limit_address();
299 }
300
301 // Uncommit unused semi space.
302 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
303
304#ifdef ENABLE_HEAP_PROTECTION
305 // Protect/unprotect the heap by marking all spaces read-only/writable.
306 static void Protect();
307 static void Unprotect();
308#endif
309
310 // Allocates and initializes a new JavaScript object based on a
311 // constructor.
312 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
313 // failed.
314 // Please note this does not perform a garbage collection.
315 static Object* AllocateJSObject(JSFunction* constructor,
316 PretenureFlag pretenure = NOT_TENURED);
317
318 // Allocates and initializes a new global object based on a constructor.
319 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
320 // failed.
321 // Please note this does not perform a garbage collection.
322 static Object* AllocateGlobalObject(JSFunction* constructor);
323
324 // Returns a deep copy of the JavaScript object.
325 // Properties and elements are copied too.
326 // Returns failure if allocation failed.
327 static Object* CopyJSObject(JSObject* source);
328
329 // Allocates the function prototype.
330 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
331 // failed.
332 // Please note this does not perform a garbage collection.
333 static Object* AllocateFunctionPrototype(JSFunction* function);
334
335 // Reinitialize an JSGlobalProxy based on a constructor. The object
336 // must have the same size as objects allocated using the
337 // constructor. The object is reinitialized and behaves as an
338 // object that has been freshly allocated using the constructor.
339 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
340 JSGlobalProxy* global);
341
342 // Allocates and initializes a new JavaScript object based on a map.
343 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
344 // failed.
345 // Please note this does not perform a garbage collection.
346 static Object* AllocateJSObjectFromMap(Map* map,
347 PretenureFlag pretenure = NOT_TENURED);
348
349 // Allocates a heap object based on the map.
350 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
351 // failed.
352 // Please note this function does not perform a garbage collection.
353 static Object* Allocate(Map* map, AllocationSpace space);
354
355 // Allocates a JS Map in the heap.
356 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
357 // failed.
358 // Please note this function does not perform a garbage collection.
359 static Object* AllocateMap(InstanceType instance_type, int instance_size);
360
361 // Allocates a partial map for bootstrapping.
362 static Object* AllocatePartialMap(InstanceType instance_type,
363 int instance_size);
364
365 // Allocate a map for the specified function
366 static Object* AllocateInitialMap(JSFunction* fun);
367
Steve Block6ded16b2010-05-10 14:33:55 +0100368 // Allocates an empty code cache.
369 static Object* AllocateCodeCache();
370
Kristian Monsen25f61362010-05-21 11:50:48 +0100371 // Clear the Instanceof cache (used when a prototype changes).
372 static void ClearInstanceofCache() {
373 set_instanceof_cache_function(the_hole_value());
374 }
375
Steve Blocka7e24c12009-10-30 11:49:00 +0000376 // Allocates and fully initializes a String. There are two String
377 // encodings: ASCII and two byte. One should choose between the three string
378 // allocation functions based on the encoding of the string buffer used to
379 // initialized the string.
380 // - ...FromAscii initializes the string from a buffer that is ASCII
381 // encoded (it does not check that the buffer is ASCII encoded) and the
382 // result will be ASCII encoded.
383 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
384 // encoded. If the characters are all single-byte characters, the
385 // result will be ASCII encoded, otherwise it will converted to two
386 // byte.
387 // - ...FromTwoByte initializes the string from a buffer that is two-byte
388 // encoded. If the characters are all single-byte characters, the
389 // result will be converted to ASCII, otherwise it will be left as
390 // two-byte.
391 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
392 // failed.
393 // Please note this does not perform a garbage collection.
394 static Object* AllocateStringFromAscii(
395 Vector<const char> str,
396 PretenureFlag pretenure = NOT_TENURED);
397 static Object* AllocateStringFromUtf8(
398 Vector<const char> str,
399 PretenureFlag pretenure = NOT_TENURED);
400 static Object* AllocateStringFromTwoByte(
401 Vector<const uc16> str,
402 PretenureFlag pretenure = NOT_TENURED);
403
404 // Allocates a symbol in old space based on the character stream.
405 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
406 // failed.
407 // Please note this function does not perform a garbage collection.
408 static inline Object* AllocateSymbol(Vector<const char> str,
409 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000410 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000411
412 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
413 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000414 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000415
416 static Object* AllocateExternalSymbol(Vector<const char> str,
417 int chars);
418
419
420 // Allocates and partially initializes a String. There are two String
421 // encodings: ASCII and two byte. These functions allocate a string of the
422 // given length and set its map and length fields. The characters of the
423 // string are uninitialized.
424 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
425 // failed.
426 // Please note this does not perform a garbage collection.
427 static Object* AllocateRawAsciiString(
428 int length,
429 PretenureFlag pretenure = NOT_TENURED);
430 static Object* AllocateRawTwoByteString(
431 int length,
432 PretenureFlag pretenure = NOT_TENURED);
433
434 // Computes a single character string where the character has code.
435 // A cache is used for ascii codes.
436 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
437 // failed. Please note this does not perform a garbage collection.
438 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
439
440 // Allocate a byte array of the specified length
441 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
442 // failed.
443 // Please note this does not perform a garbage collection.
444 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
445
446 // Allocate a non-tenured byte array of the specified length
447 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
448 // failed.
449 // Please note this does not perform a garbage collection.
450 static Object* AllocateByteArray(int length);
451
452 // Allocate a pixel array of the specified length
453 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
454 // failed.
455 // Please note this does not perform a garbage collection.
456 static Object* AllocatePixelArray(int length,
457 uint8_t* external_pointer,
458 PretenureFlag pretenure);
459
Steve Block3ce2e202009-11-05 08:53:23 +0000460 // Allocates an external array of the specified length and type.
461 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
462 // failed.
463 // Please note this does not perform a garbage collection.
464 static Object* AllocateExternalArray(int length,
465 ExternalArrayType array_type,
466 void* external_pointer,
467 PretenureFlag pretenure);
468
Steve Blocka7e24c12009-10-30 11:49:00 +0000469 // Allocate a tenured JS global property cell.
470 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
471 // failed.
472 // Please note this does not perform a garbage collection.
473 static Object* AllocateJSGlobalPropertyCell(Object* value);
474
475 // Allocates a fixed array initialized with undefined values
476 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
477 // failed.
478 // Please note this does not perform a garbage collection.
479 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
Steve Block6ded16b2010-05-10 14:33:55 +0100480 // Allocates a fixed array initialized with undefined values
Steve Blocka7e24c12009-10-30 11:49:00 +0000481 static Object* AllocateFixedArray(int length);
482
Steve Block6ded16b2010-05-10 14:33:55 +0100483 // Allocates an uninitialized fixed array. It must be filled by the caller.
484 //
485 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
486 // failed.
487 // Please note this does not perform a garbage collection.
488 static Object* AllocateUninitializedFixedArray(int length);
489
Steve Blocka7e24c12009-10-30 11:49:00 +0000490 // Make a copy of src and return it. Returns
491 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
492 static Object* CopyFixedArray(FixedArray* src);
493
494 // Allocates a fixed array initialized with the hole values.
495 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
496 // failed.
497 // Please note this does not perform a garbage collection.
Steve Block6ded16b2010-05-10 14:33:55 +0100498 static Object* AllocateFixedArrayWithHoles(
499 int length,
500 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000501
502 // AllocateHashTable is identical to AllocateFixedArray except
503 // that the resulting object has hash_table_map as map.
Steve Block6ded16b2010-05-10 14:33:55 +0100504 static Object* AllocateHashTable(int length,
505 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000506
507 // Allocate a global (but otherwise uninitialized) context.
508 static Object* AllocateGlobalContext();
509
510 // Allocate a function context.
511 static Object* AllocateFunctionContext(int length, JSFunction* closure);
512
513 // Allocate a 'with' context.
514 static Object* AllocateWithContext(Context* previous,
515 JSObject* extension,
516 bool is_catch_context);
517
518 // Allocates a new utility object in the old generation.
519 static Object* AllocateStruct(InstanceType type);
520
521 // Allocates a function initialized with a shared part.
522 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
523 // failed.
524 // Please note this does not perform a garbage collection.
525 static Object* AllocateFunction(Map* function_map,
526 SharedFunctionInfo* shared,
Leon Clarkee46be812010-01-19 14:06:41 +0000527 Object* prototype,
528 PretenureFlag pretenure = TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000529
530 // Indicies for direct access into argument objects.
Leon Clarkee46be812010-01-19 14:06:41 +0000531 static const int kArgumentsObjectSize =
532 JSObject::kHeaderSize + 2 * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000533 static const int arguments_callee_index = 0;
534 static const int arguments_length_index = 1;
535
536 // Allocates an arguments object - optionally with an elements array.
537 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
538 // failed.
539 // Please note this does not perform a garbage collection.
540 static Object* AllocateArgumentsObject(Object* callee, int length);
541
Steve Blocka7e24c12009-10-30 11:49:00 +0000542 // Same as NewNumberFromDouble, but may return a preallocated/immutable
543 // number object (e.g., minus_zero_value_, nan_value_)
544 static Object* NumberFromDouble(double value,
545 PretenureFlag pretenure = NOT_TENURED);
546
547 // Allocated a HeapNumber from value.
548 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
549 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
550
551 // Converts an int into either a Smi or a HeapNumber object.
552 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
553 // failed.
554 // Please note this does not perform a garbage collection.
555 static inline Object* NumberFromInt32(int32_t value);
556
557 // Converts an int into either a Smi or a HeapNumber object.
558 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
559 // failed.
560 // Please note this does not perform a garbage collection.
561 static inline Object* NumberFromUint32(uint32_t value);
562
563 // Allocates a new proxy object.
564 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
565 // failed.
566 // Please note this does not perform a garbage collection.
567 static Object* AllocateProxy(Address proxy,
568 PretenureFlag pretenure = NOT_TENURED);
569
570 // Allocates a new SharedFunctionInfo object.
571 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
572 // failed.
573 // Please note this does not perform a garbage collection.
574 static Object* AllocateSharedFunctionInfo(Object* name);
575
576 // Allocates a new cons string object.
577 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
578 // failed.
579 // Please note this does not perform a garbage collection.
580 static Object* AllocateConsString(String* first, String* second);
581
Steve Blocka7e24c12009-10-30 11:49:00 +0000582 // Allocates a new sub string object which is a substring of an underlying
583 // string buffer stretching from the index start (inclusive) to the index
584 // end (exclusive).
585 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
586 // failed.
587 // Please note this does not perform a garbage collection.
588 static Object* AllocateSubString(String* buffer,
589 int start,
Steve Block6ded16b2010-05-10 14:33:55 +0100590 int end,
591 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000592
593 // Allocate a new external string object, which is backed by a string
594 // resource that resides outside the V8 heap.
595 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
596 // failed.
597 // Please note this does not perform a garbage collection.
598 static Object* AllocateExternalStringFromAscii(
599 ExternalAsciiString::Resource* resource);
600 static Object* AllocateExternalStringFromTwoByte(
601 ExternalTwoByteString::Resource* resource);
602
Leon Clarkee46be812010-01-19 14:06:41 +0000603 // Finalizes an external string by deleting the associated external
604 // data and clearing the resource pointer.
605 static inline void FinalizeExternalString(String* string);
606
Steve Blocka7e24c12009-10-30 11:49:00 +0000607 // Allocates an uninitialized object. The memory is non-executable if the
608 // hardware and OS allow.
609 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
610 // failed.
611 // Please note this function does not perform a garbage collection.
612 static inline Object* AllocateRaw(int size_in_bytes,
613 AllocationSpace space,
614 AllocationSpace retry_space);
615
616 // Initialize a filler object to keep the ability to iterate over the heap
617 // when shortening objects.
618 static void CreateFillerObjectAt(Address addr, int size);
619
620 // Makes a new native code object
621 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
622 // failed. On success, the pointer to the Code object is stored in the
623 // self_reference. This allows generated code to reference its own Code
624 // object by containing this pointer.
625 // Please note this function does not perform a garbage collection.
626 static Object* CreateCode(const CodeDesc& desc,
627 ZoneScopeInfo* sinfo,
628 Code::Flags flags,
629 Handle<Object> self_reference);
630
631 static Object* CopyCode(Code* code);
Steve Block6ded16b2010-05-10 14:33:55 +0100632
633 // Copy the code and scope info part of the code object, but insert
634 // the provided data as the relocation information.
635 static Object* CopyCode(Code* code, Vector<byte> reloc_info);
636
Steve Blocka7e24c12009-10-30 11:49:00 +0000637 // Finds the symbol for string in the symbol table.
638 // If not found, a new symbol is added to the table and returned.
639 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
640 // failed.
641 // Please note this function does not perform a garbage collection.
642 static Object* LookupSymbol(Vector<const char> str);
643 static Object* LookupAsciiSymbol(const char* str) {
644 return LookupSymbol(CStrVector(str));
645 }
646 static Object* LookupSymbol(String* str);
647 static bool LookupSymbolIfExists(String* str, String** symbol);
Steve Blockd0582a62009-12-15 09:54:21 +0000648 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
Steve Blocka7e24c12009-10-30 11:49:00 +0000649
650 // Compute the matching symbol map for a string if possible.
651 // NULL is returned if string is in new space or not flattened.
652 static Map* SymbolMapForString(String* str);
653
Steve Block6ded16b2010-05-10 14:33:55 +0100654 // Tries to flatten a string before compare operation.
655 //
656 // Returns a failure in case it was decided that flattening was
657 // necessary and failed. Note, if flattening is not necessary the
658 // string might stay non-flat even when not a failure is returned.
659 //
660 // Please note this function does not perform a garbage collection.
661 static inline Object* PrepareForCompare(String* str);
662
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 // Converts the given boolean condition to JavaScript boolean value.
664 static Object* ToBoolean(bool condition) {
665 return condition ? true_value() : false_value();
666 }
667
668 // Code that should be run before and after each GC. Includes some
669 // reporting/verification activities when compiled with DEBUG set.
670 static void GarbageCollectionPrologue();
671 static void GarbageCollectionEpilogue();
672
Steve Blocka7e24c12009-10-30 11:49:00 +0000673 // Performs garbage collection operation.
674 // Returns whether required_space bytes are available after the collection.
675 static bool CollectGarbage(int required_space, AllocationSpace space);
676
677 // Performs a full garbage collection. Force compaction if the
678 // parameter is true.
679 static void CollectAllGarbage(bool force_compaction);
680
Steve Blocka7e24c12009-10-30 11:49:00 +0000681 // Notify the heap that a context has been disposed.
Steve Block6ded16b2010-05-10 14:33:55 +0100682 static int NotifyContextDisposed() { return ++contexts_disposed_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000683
684 // Utility to invoke the scavenger. This is needed in test code to
685 // ensure correct callback for weak global handles.
686 static void PerformScavenge();
687
688#ifdef DEBUG
689 // Utility used with flag gc-greedy.
690 static bool GarbageCollectionGreedyCheck();
691#endif
692
Steve Block6ded16b2010-05-10 14:33:55 +0100693 static void AddGCPrologueCallback(
694 GCEpilogueCallback callback, GCType gc_type_filter);
695 static void RemoveGCPrologueCallback(GCEpilogueCallback callback);
696
697 static void AddGCEpilogueCallback(
698 GCEpilogueCallback callback, GCType gc_type_filter);
699 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
700
Steve Blocka7e24c12009-10-30 11:49:00 +0000701 static void SetGlobalGCPrologueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100702 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000703 global_gc_prologue_callback_ = callback;
704 }
705 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100706 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000707 global_gc_epilogue_callback_ = callback;
708 }
709
710 // Heap root getters. We have versions with and without type::cast() here.
711 // You can't use type::cast during GC because the assert fails.
712#define ROOT_ACCESSOR(type, name, camel_name) \
713 static inline type* name() { \
714 return type::cast(roots_[k##camel_name##RootIndex]); \
715 } \
716 static inline type* raw_unchecked_##name() { \
717 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
718 }
719 ROOT_LIST(ROOT_ACCESSOR)
720#undef ROOT_ACCESSOR
721
722// Utility type maps
723#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
724 static inline Map* name##_map() { \
725 return Map::cast(roots_[k##Name##MapRootIndex]); \
726 }
727 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
728#undef STRUCT_MAP_ACCESSOR
729
730#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
731 return String::cast(roots_[k##name##RootIndex]); \
732 }
733 SYMBOL_LIST(SYMBOL_ACCESSOR)
734#undef SYMBOL_ACCESSOR
735
736 // The hidden_symbol is special because it is the empty string, but does
737 // not match the empty string.
738 static String* hidden_symbol() { return hidden_symbol_; }
739
740 // Iterates over all roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000741 static void IterateRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000742 // Iterates over all strong roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000743 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
Leon Clarked91b9f72010-01-27 17:25:45 +0000744 // Iterates over all the other roots in the heap.
745 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000746
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100747 enum ExpectedPageWatermarkState {
748 WATERMARK_SHOULD_BE_VALID,
749 WATERMARK_CAN_BE_INVALID
750 };
Steve Blocka7e24c12009-10-30 11:49:00 +0000751
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100752 // For each dirty region on a page in use from an old space call
753 // visit_dirty_region callback.
754 // If either visit_dirty_region or callback can cause an allocation
755 // in old space and changes in allocation watermark then
756 // can_preallocate_during_iteration should be set to true.
757 // All pages will be marked as having invalid watermark upon
758 // iteration completion.
759 static void IterateDirtyRegions(
760 PagedSpace* space,
761 DirtyRegionCallback visit_dirty_region,
762 ObjectSlotCallback callback,
763 ExpectedPageWatermarkState expected_page_watermark_state);
764
765 // Interpret marks as a bitvector of dirty marks for regions of size
766 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering
767 // memory interval from start to top. For each dirty region call a
768 // visit_dirty_region callback. Return updated bitvector of dirty marks.
769 static uint32_t IterateDirtyRegions(uint32_t marks,
770 Address start,
771 Address end,
772 DirtyRegionCallback visit_dirty_region,
773 ObjectSlotCallback callback);
774
775 // Iterate pointers to new space found in memory interval from start to end.
776 // Update dirty marks for page containing start address.
777 static void IterateAndMarkPointersToNewSpace(Address start,
778 Address end,
779 ObjectSlotCallback callback);
780
781 // Iterate pointers to new space found in memory interval from start to end.
782 // Return true if pointers to new space was found.
783 static bool IteratePointersInDirtyRegion(Address start,
784 Address end,
785 ObjectSlotCallback callback);
786
787
788 // Iterate pointers to new space found in memory interval from start to end.
789 // This interval is considered to belong to the map space.
790 // Return true if pointers to new space was found.
791 static bool IteratePointersInDirtyMapsRegion(Address start,
792 Address end,
793 ObjectSlotCallback callback);
794
Steve Blocka7e24c12009-10-30 11:49:00 +0000795
796 // Returns whether the object resides in new space.
797 static inline bool InNewSpace(Object* object);
798 static inline bool InFromSpace(Object* object);
799 static inline bool InToSpace(Object* object);
800
801 // Checks whether an address/object in the heap (including auxiliary
802 // area and unused area).
803 static bool Contains(Address addr);
804 static bool Contains(HeapObject* value);
805
806 // Checks whether an address/object in a space.
Steve Blockd0582a62009-12-15 09:54:21 +0000807 // Currently used by tests, serialization and heap verification only.
Steve Blocka7e24c12009-10-30 11:49:00 +0000808 static bool InSpace(Address addr, AllocationSpace space);
809 static bool InSpace(HeapObject* value, AllocationSpace space);
810
811 // Finds out which space an object should get promoted to based on its type.
812 static inline OldSpace* TargetSpace(HeapObject* object);
813 static inline AllocationSpace TargetSpaceId(InstanceType type);
814
815 // Sets the stub_cache_ (only used when expanding the dictionary).
816 static void public_set_code_stubs(NumberDictionary* value) {
817 roots_[kCodeStubsRootIndex] = value;
818 }
819
820 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
821 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
822 roots_[kNonMonomorphicCacheRootIndex] = value;
823 }
824
Andrei Popescu31002712010-02-23 13:46:05 +0000825 static void public_set_empty_script(Script* script) {
826 roots_[kEmptyScriptRootIndex] = script;
827 }
828
Steve Blocka7e24c12009-10-30 11:49:00 +0000829 // Update the next script id.
830 static inline void SetLastScriptId(Object* last_script_id);
831
832 // Generated code can embed this address to get access to the roots.
833 static Object** roots_address() { return roots_; }
834
835#ifdef DEBUG
836 static void Print();
837 static void PrintHandles();
838
839 // Verify the heap is in its normal state before or after a GC.
840 static void Verify();
841
842 // Report heap statistics.
843 static void ReportHeapStatistics(const char* title);
844 static void ReportCodeStatistics(const char* title);
845
846 // Fill in bogus values in from space
847 static void ZapFromSpace();
848#endif
849
850#if defined(ENABLE_LOGGING_AND_PROFILING)
851 // Print short heap statistics.
852 static void PrintShortHeapStatistics();
853#endif
854
855 // Makes a new symbol object
856 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
857 // failed.
858 // Please note this function does not perform a garbage collection.
859 static Object* CreateSymbol(const char* str, int length, int hash);
860 static Object* CreateSymbol(String* str);
861
862 // Write barrier support for address[offset] = o.
863 static inline void RecordWrite(Address address, int offset);
864
Steve Block6ded16b2010-05-10 14:33:55 +0100865 // Write barrier support for address[start : start + len[ = o.
866 static inline void RecordWrites(Address address, int start, int len);
867
Steve Blocka7e24c12009-10-30 11:49:00 +0000868 // Given an address occupied by a live code object, return that object.
869 static Object* FindCodeObject(Address a);
870
871 // Invoke Shrink on shrinkable spaces.
872 static void Shrink();
873
874 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
875 static inline HeapState gc_state() { return gc_state_; }
876
877#ifdef DEBUG
878 static bool IsAllocationAllowed() { return allocation_allowed_; }
879 static inline bool allow_allocation(bool enable);
880
881 static bool disallow_allocation_failure() {
882 return disallow_allocation_failure_;
883 }
884
Leon Clarkee46be812010-01-19 14:06:41 +0000885 static void TracePathToObject(Object* target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000886 static void TracePathToGlobal();
887#endif
888
889 // Callback function passed to Heap::Iterate etc. Copies an object if
890 // necessary, the object might be promoted to an old space. The caller must
891 // ensure the precondition that the object is (a) a heap object and (b) in
892 // the heap's from space.
893 static void ScavengePointer(HeapObject** p);
894 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
895
Steve Blocka7e24c12009-10-30 11:49:00 +0000896 // Commits from space if it is uncommitted.
897 static void EnsureFromSpaceIsCommitted();
898
Leon Clarkee46be812010-01-19 14:06:41 +0000899 // Support for partial snapshots. After calling this we can allocate a
900 // certain number of bytes using only linear allocation (with a
901 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
902 // or causing a GC. It returns true of space was reserved or false if a GC is
903 // needed. For paged spaces the space requested must include the space wasted
904 // at the end of each page when allocating linearly.
905 static void ReserveSpace(
906 int new_space_size,
907 int pointer_space_size,
908 int data_space_size,
909 int code_space_size,
910 int map_space_size,
911 int cell_space_size,
912 int large_object_size);
913
Steve Blocka7e24c12009-10-30 11:49:00 +0000914 //
915 // Support for the API.
916 //
917
918 static bool CreateApiObjects();
919
920 // Attempt to find the number in a small cache. If we finds it, return
921 // the string representation of the number. Otherwise return undefined.
922 static Object* GetNumberStringCache(Object* number);
923
924 // Update the cache with a new number-string pair.
925 static void SetNumberStringCache(Object* number, String* str);
926
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 // Adjusts the amount of registered external memory.
928 // Returns the adjusted value.
929 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
930
Steve Block6ded16b2010-05-10 14:33:55 +0100931 // Allocate uninitialized fixed array.
Steve Blocka7e24c12009-10-30 11:49:00 +0000932 static Object* AllocateRawFixedArray(int length);
Steve Block6ded16b2010-05-10 14:33:55 +0100933 static Object* AllocateRawFixedArray(int length,
934 PretenureFlag pretenure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000935
936 // True if we have reached the allocation limit in the old generation that
937 // should force the next GC (caused normally) to be a full one.
938 static bool OldGenerationPromotionLimitReached() {
939 return (PromotedSpaceSize() + PromotedExternalMemorySize())
940 > old_gen_promotion_limit_;
941 }
942
Leon Clarkee46be812010-01-19 14:06:41 +0000943 static intptr_t OldGenerationSpaceAvailable() {
944 return old_gen_allocation_limit_ -
945 (PromotedSpaceSize() + PromotedExternalMemorySize());
946 }
947
Steve Blocka7e24c12009-10-30 11:49:00 +0000948 // True if we have reached the allocation limit in the old generation that
949 // should artificially cause a GC right now.
950 static bool OldGenerationAllocationLimitReached() {
Leon Clarkee46be812010-01-19 14:06:41 +0000951 return OldGenerationSpaceAvailable() < 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000952 }
953
954 // Can be called when the embedding application is idle.
955 static bool IdleNotification();
956
957 // Declare all the root indices.
958 enum RootListIndex {
959#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
960 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
961#undef ROOT_INDEX_DECLARATION
962
963// Utility type maps
964#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
965 STRUCT_LIST(DECLARE_STRUCT_MAP)
966#undef DECLARE_STRUCT_MAP
967
968#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
969 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
970#undef SYMBOL_DECLARATION
971
972 kSymbolTableRootIndex,
973 kStrongRootListLength = kSymbolTableRootIndex,
974 kRootListLength
975 };
976
Steve Block6ded16b2010-05-10 14:33:55 +0100977 static Object* NumberToString(Object* number,
978 bool check_number_string_cache = true);
Steve Blocka7e24c12009-10-30 11:49:00 +0000979
Steve Block3ce2e202009-11-05 08:53:23 +0000980 static Map* MapForExternalArrayType(ExternalArrayType array_type);
981 static RootListIndex RootIndexForExternalArrayType(
982 ExternalArrayType array_type);
983
Steve Blockd0582a62009-12-15 09:54:21 +0000984 static void RecordStats(HeapStats* stats);
985
Steve Block6ded16b2010-05-10 14:33:55 +0100986 // Copy block of memory from src to dst. Size of block should be aligned
987 // by pointer size.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100988 static inline void CopyBlock(Address dst, Address src, int byte_size);
989
990 static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
991 Address src,
992 int byte_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100993
994 // Optimized version of memmove for blocks with pointer size aligned sizes and
995 // pointer size aligned addresses.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100996 static inline void MoveBlock(Address dst, Address src, int byte_size);
997
998 static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
999 Address src,
1000 int byte_size);
Steve Block6ded16b2010-05-10 14:33:55 +01001001
1002 // Check new space expansion criteria and expand semispaces if it was hit.
1003 static void CheckNewSpaceExpansionCriteria();
1004
1005 static inline void IncrementYoungSurvivorsCounter(int survived) {
1006 survived_since_last_expansion_ += survived;
1007 }
1008
1009 static void UpdateNewSpaceReferencesInExternalStringTable(
1010 ExternalStringTableUpdaterCallback updater_func);
1011
1012 // Helper function that governs the promotion policy from new space to
1013 // old. If the object's old address lies below the new space's age
1014 // mark or if we've already filled the bottom 1/16th of the to space,
1015 // we try to promote this object.
1016 static inline bool ShouldBePromoted(Address old_address, int object_size);
1017
1018 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1019
Kristian Monsen25f61362010-05-21 11:50:48 +01001020 static void ClearJSFunctionResultCaches();
1021
Leon Clarkef7060e22010-06-03 12:02:55 +01001022 static GCTracer* tracer() { return tracer_; }
1023
Steve Blocka7e24c12009-10-30 11:49:00 +00001024 private:
Steve Block3ce2e202009-11-05 08:53:23 +00001025 static int reserved_semispace_size_;
1026 static int max_semispace_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001027 static int initial_semispace_size_;
Steve Block3ce2e202009-11-05 08:53:23 +00001028 static int max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001029 static size_t code_range_size_;
1030
1031 // For keeping track of how much data has survived
1032 // scavenge since last new space expansion.
1033 static int survived_since_last_expansion_;
1034
1035 static int always_allocate_scope_depth_;
Steve Blockd0582a62009-12-15 09:54:21 +00001036 static int linear_allocation_scope_depth_;
Steve Block6ded16b2010-05-10 14:33:55 +01001037
1038 // For keeping track of context disposals.
1039 static int contexts_disposed_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001040
Steve Blocka7e24c12009-10-30 11:49:00 +00001041#if defined(V8_TARGET_ARCH_X64)
1042 static const int kMaxObjectSizeInNewSpace = 512*KB;
1043#else
1044 static const int kMaxObjectSizeInNewSpace = 256*KB;
1045#endif
1046
1047 static NewSpace new_space_;
1048 static OldSpace* old_pointer_space_;
1049 static OldSpace* old_data_space_;
1050 static OldSpace* code_space_;
1051 static MapSpace* map_space_;
1052 static CellSpace* cell_space_;
1053 static LargeObjectSpace* lo_space_;
1054 static HeapState gc_state_;
1055
1056 // Returns the size of object residing in non new spaces.
1057 static int PromotedSpaceSize();
1058
1059 // Returns the amount of external memory registered since last global gc.
1060 static int PromotedExternalMemorySize();
1061
1062 static int mc_count_; // how many mark-compact collections happened
Leon Clarkef7060e22010-06-03 12:02:55 +01001063 static int ms_count_; // how many mark-sweep collections happened
Steve Blocka7e24c12009-10-30 11:49:00 +00001064 static int gc_count_; // how many gc happened
1065
Steve Block6ded16b2010-05-10 14:33:55 +01001066 // Total length of the strings we failed to flatten since the last GC.
1067 static int unflattened_strings_length_;
1068
Steve Blocka7e24c12009-10-30 11:49:00 +00001069#define ROOT_ACCESSOR(type, name, camel_name) \
1070 static inline void set_##name(type* value) { \
1071 roots_[k##camel_name##RootIndex] = value; \
1072 }
1073 ROOT_LIST(ROOT_ACCESSOR)
1074#undef ROOT_ACCESSOR
1075
1076#ifdef DEBUG
1077 static bool allocation_allowed_;
1078
1079 // If the --gc-interval flag is set to a positive value, this
1080 // variable holds the value indicating the number of allocations
1081 // remain until the next failure and garbage collection.
1082 static int allocation_timeout_;
1083
1084 // Do we expect to be able to handle allocation failure at this
1085 // time?
1086 static bool disallow_allocation_failure_;
1087#endif // DEBUG
1088
1089 // Limit that triggers a global GC on the next (normally caused) GC. This
1090 // is checked when we have already decided to do a GC to help determine
1091 // which collector to invoke.
1092 static int old_gen_promotion_limit_;
1093
1094 // Limit that triggers a global GC as soon as is reasonable. This is
1095 // checked before expanding a paged space in the old generation and on
1096 // every allocation in large object space.
1097 static int old_gen_allocation_limit_;
1098
1099 // Limit on the amount of externally allocated memory allowed
1100 // between global GCs. If reached a global GC is forced.
1101 static int external_allocation_limit_;
1102
1103 // The amount of external memory registered through the API kept alive
1104 // by global handles
1105 static int amount_of_external_allocated_memory_;
1106
1107 // Caches the amount of external memory registered at the last global gc.
1108 static int amount_of_external_allocated_memory_at_last_global_gc_;
1109
1110 // Indicates that an allocation has failed in the old generation since the
1111 // last GC.
1112 static int old_gen_exhausted_;
1113
1114 static Object* roots_[kRootListLength];
1115
1116 struct StringTypeTable {
1117 InstanceType type;
1118 int size;
1119 RootListIndex index;
1120 };
1121
1122 struct ConstantSymbolTable {
1123 const char* contents;
1124 RootListIndex index;
1125 };
1126
1127 struct StructTable {
1128 InstanceType type;
1129 int size;
1130 RootListIndex index;
1131 };
1132
1133 static const StringTypeTable string_type_table[];
1134 static const ConstantSymbolTable constant_symbol_table[];
1135 static const StructTable struct_table[];
1136
1137 // The special hidden symbol which is an empty string, but does not match
1138 // any string when looked up in properties.
1139 static String* hidden_symbol_;
1140
1141 // GC callback function, called before and after mark-compact GC.
1142 // Allocations in the callback function are disallowed.
Steve Block6ded16b2010-05-10 14:33:55 +01001143 struct GCPrologueCallbackPair {
1144 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1145 : callback(callback), gc_type(gc_type) {
1146 }
1147 bool operator==(const GCPrologueCallbackPair& pair) const {
1148 return pair.callback == callback;
1149 }
1150 GCPrologueCallback callback;
1151 GCType gc_type;
1152 };
1153 static List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1154
1155 struct GCEpilogueCallbackPair {
1156 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1157 : callback(callback), gc_type(gc_type) {
1158 }
1159 bool operator==(const GCEpilogueCallbackPair& pair) const {
1160 return pair.callback == callback;
1161 }
1162 GCEpilogueCallback callback;
1163 GCType gc_type;
1164 };
1165 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1166
Steve Blocka7e24c12009-10-30 11:49:00 +00001167 static GCCallback global_gc_prologue_callback_;
1168 static GCCallback global_gc_epilogue_callback_;
1169
1170 // Checks whether a global GC is necessary
1171 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1172
1173 // Performs garbage collection
1174 static void PerformGarbageCollection(AllocationSpace space,
1175 GarbageCollector collector,
1176 GCTracer* tracer);
1177
Steve Blocka7e24c12009-10-30 11:49:00 +00001178 // Allocate an uninitialized object in map space. The behavior is identical
1179 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1180 // have to test the allocation space argument and (b) can reduce code size
1181 // (since both AllocateRaw and AllocateRawMap are inlined).
1182 static inline Object* AllocateRawMap();
1183
1184 // Allocate an uninitialized object in the global property cell space.
1185 static inline Object* AllocateRawCell();
1186
1187 // Initializes a JSObject based on its map.
1188 static void InitializeJSObjectFromMap(JSObject* obj,
1189 FixedArray* properties,
1190 Map* map);
1191
1192 static bool CreateInitialMaps();
1193 static bool CreateInitialObjects();
1194
1195 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1196 // that assigns wrong vtable entries.
1197 static void CreateCEntryStub();
Steve Blocka7e24c12009-10-30 11:49:00 +00001198 static void CreateJSEntryStub();
1199 static void CreateJSConstructEntryStub();
1200 static void CreateRegExpCEntryStub();
1201
1202 static void CreateFixedStubs();
1203
Steve Block6ded16b2010-05-10 14:33:55 +01001204 static Object* CreateOddball(const char* to_string, Object* to_number);
Steve Blocka7e24c12009-10-30 11:49:00 +00001205
1206 // Allocate empty fixed array.
1207 static Object* AllocateEmptyFixedArray();
1208
1209 // Performs a minor collection in new generation.
1210 static void Scavenge();
Steve Block6ded16b2010-05-10 14:33:55 +01001211
1212 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1213 Object** pointer);
1214
Leon Clarkee46be812010-01-19 14:06:41 +00001215 static Address DoScavenge(ObjectVisitor* scavenge_visitor,
1216 Address new_space_front);
Steve Blocka7e24c12009-10-30 11:49:00 +00001217
1218 // Performs a major collection in the whole heap.
1219 static void MarkCompact(GCTracer* tracer);
1220
1221 // Code to be run before and after mark-compact.
1222 static void MarkCompactPrologue(bool is_compacting);
1223 static void MarkCompactEpilogue(bool is_compacting);
1224
Kristian Monsen25f61362010-05-21 11:50:48 +01001225 // Completely clear the Instanceof cache (to stop it keeping objects alive
1226 // around a GC).
1227 static void CompletelyClearInstanceofCache() {
1228 set_instanceof_cache_map(the_hole_value());
1229 set_instanceof_cache_function(the_hole_value());
1230 }
1231
Steve Blocka7e24c12009-10-30 11:49:00 +00001232 // Helper function used by CopyObject to copy a source object to an
1233 // allocated target object and update the forwarding pointer in the source
1234 // object. Returns the target object.
Leon Clarkee46be812010-01-19 14:06:41 +00001235 static inline HeapObject* MigrateObject(HeapObject* source,
1236 HeapObject* target,
1237 int size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001238
Steve Blocka7e24c12009-10-30 11:49:00 +00001239#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1240 // Record the copy of an object in the NewSpace's statistics.
1241 static void RecordCopiedObject(HeapObject* obj);
1242
1243 // Record statistics before and after garbage collection.
1244 static void ReportStatisticsBeforeGC();
1245 static void ReportStatisticsAfterGC();
1246#endif
1247
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 // Slow part of scavenge object.
1249 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1250
Steve Blocka7e24c12009-10-30 11:49:00 +00001251 // Initializes a function with a shared part and prototype.
1252 // Returns the function.
1253 // Note: this code was factored out of AllocateFunction such that
1254 // other parts of the VM could use it. Specifically, a function that creates
1255 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1256 // Please note this does not perform a garbage collection.
1257 static inline Object* InitializeFunction(JSFunction* function,
1258 SharedFunctionInfo* shared,
1259 Object* prototype);
1260
Leon Clarkef7060e22010-06-03 12:02:55 +01001261 static GCTracer* tracer_;
1262
Leon Clarkee46be812010-01-19 14:06:41 +00001263
1264 // Initializes the number to string cache based on the max semispace size.
1265 static Object* InitializeNumberStringCache();
1266 // Flush the number to string cache.
1267 static void FlushNumberStringCache();
1268
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001269 // Flush code from functions we do not expect to use again. The code will
1270 // be replaced with a lazy compilable version.
1271 static void FlushCode();
1272
Steve Blocka7e24c12009-10-30 11:49:00 +00001273 static const int kInitialSymbolTableSize = 2048;
1274 static const int kInitialEvalCacheSize = 64;
1275
1276 friend class Factory;
1277 friend class DisallowAllocationFailure;
1278 friend class AlwaysAllocateScope;
Steve Blockd0582a62009-12-15 09:54:21 +00001279 friend class LinearAllocationScope;
1280};
1281
1282
1283class HeapStats {
1284 public:
Steve Block6ded16b2010-05-10 14:33:55 +01001285 int* start_marker;
1286 int* new_space_size;
1287 int* new_space_capacity;
1288 int* old_pointer_space_size;
1289 int* old_pointer_space_capacity;
1290 int* old_data_space_size;
1291 int* old_data_space_capacity;
1292 int* code_space_size;
1293 int* code_space_capacity;
1294 int* map_space_size;
1295 int* map_space_capacity;
1296 int* cell_space_size;
1297 int* cell_space_capacity;
1298 int* lo_space_size;
1299 int* global_handle_count;
1300 int* weak_global_handle_count;
1301 int* pending_global_handle_count;
1302 int* near_death_global_handle_count;
1303 int* destroyed_global_handle_count;
1304 int* end_marker;
Steve Blocka7e24c12009-10-30 11:49:00 +00001305};
1306
1307
1308class AlwaysAllocateScope {
1309 public:
1310 AlwaysAllocateScope() {
1311 // We shouldn't hit any nested scopes, because that requires
1312 // non-handle code to call handle code. The code still works but
1313 // performance will degrade, so we want to catch this situation
1314 // in debug mode.
1315 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1316 Heap::always_allocate_scope_depth_++;
1317 }
1318
1319 ~AlwaysAllocateScope() {
1320 Heap::always_allocate_scope_depth_--;
1321 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1322 }
1323};
1324
1325
Steve Blockd0582a62009-12-15 09:54:21 +00001326class LinearAllocationScope {
1327 public:
1328 LinearAllocationScope() {
1329 Heap::linear_allocation_scope_depth_++;
1330 }
1331
1332 ~LinearAllocationScope() {
1333 Heap::linear_allocation_scope_depth_--;
1334 ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
1335 }
1336};
1337
1338
Steve Blocka7e24c12009-10-30 11:49:00 +00001339#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001340// Visitor class to verify interior pointers in spaces that do not contain
1341// or care about intergenerational references. All heap object pointers have to
1342// point into the heap to a location that has a map pointer at its first word.
1343// Caveat: Heap::Contains is an approximation because it can return true for
1344// objects in a heap space but above the allocation pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +00001345class VerifyPointersVisitor: public ObjectVisitor {
1346 public:
1347 void VisitPointers(Object** start, Object** end) {
1348 for (Object** current = start; current < end; current++) {
1349 if ((*current)->IsHeapObject()) {
1350 HeapObject* object = HeapObject::cast(*current);
1351 ASSERT(Heap::Contains(object));
1352 ASSERT(object->map()->IsMap());
1353 }
1354 }
1355 }
1356};
1357
1358
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001359// Visitor class to verify interior pointers in spaces that use region marks
1360// to keep track of intergenerational references.
1361// As VerifyPointersVisitor but also checks that dirty marks are set
1362// for regions covering intergenerational references.
1363class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor {
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 public:
1365 void VisitPointers(Object** start, Object** end) {
1366 for (Object** current = start; current < end; current++) {
1367 if ((*current)->IsHeapObject()) {
1368 HeapObject* object = HeapObject::cast(*current);
1369 ASSERT(Heap::Contains(object));
1370 ASSERT(object->map()->IsMap());
1371 if (Heap::InNewSpace(object)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001372 ASSERT(Heap::InToSpace(object));
1373 Address addr = reinterpret_cast<Address>(current);
1374 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr));
Steve Blocka7e24c12009-10-30 11:49:00 +00001375 }
1376 }
1377 }
1378 }
1379};
1380#endif
1381
1382
1383// Space iterator for iterating over all spaces of the heap.
1384// Returns each space in turn, and null when it is done.
1385class AllSpaces BASE_EMBEDDED {
1386 public:
1387 Space* next();
1388 AllSpaces() { counter_ = FIRST_SPACE; }
1389 private:
1390 int counter_;
1391};
1392
1393
1394// Space iterator for iterating over all old spaces of the heap: Old pointer
1395// space, old data space and code space.
1396// Returns each space in turn, and null when it is done.
1397class OldSpaces BASE_EMBEDDED {
1398 public:
1399 OldSpace* next();
1400 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1401 private:
1402 int counter_;
1403};
1404
1405
1406// Space iterator for iterating over all the paged spaces of the heap:
Leon Clarkee46be812010-01-19 14:06:41 +00001407// Map space, old pointer space, old data space, code space and cell space.
Steve Blocka7e24c12009-10-30 11:49:00 +00001408// Returns each space in turn, and null when it is done.
1409class PagedSpaces BASE_EMBEDDED {
1410 public:
1411 PagedSpace* next();
1412 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1413 private:
1414 int counter_;
1415};
1416
1417
1418// Space iterator for iterating over all spaces of the heap.
1419// For each space an object iterator is provided. The deallocation of the
1420// returned object iterators is handled by the space iterator.
1421class SpaceIterator : public Malloced {
1422 public:
1423 SpaceIterator();
1424 virtual ~SpaceIterator();
1425
1426 bool has_next();
1427 ObjectIterator* next();
1428
1429 private:
1430 ObjectIterator* CreateIterator();
1431
1432 int current_space_; // from enum AllocationSpace.
1433 ObjectIterator* iterator_; // object iterator for the current space.
1434};
1435
1436
1437// A HeapIterator provides iteration over the whole heap It aggregates a the
1438// specific iterators for the different spaces as these can only iterate over
1439// one space only.
1440
1441class HeapIterator BASE_EMBEDDED {
1442 public:
1443 explicit HeapIterator();
1444 virtual ~HeapIterator();
1445
Steve Blocka7e24c12009-10-30 11:49:00 +00001446 HeapObject* next();
1447 void reset();
1448
1449 private:
1450 // Perform the initialization.
1451 void Init();
1452
1453 // Perform all necessary shutdown (destruction) work.
1454 void Shutdown();
1455
1456 // Space iterator for iterating all the spaces.
1457 SpaceIterator* space_iterator_;
1458 // Object iterator for the space currently being iterated.
1459 ObjectIterator* object_iterator_;
1460};
1461
1462
1463// Cache for mapping (map, property name) into field offset.
1464// Cleared at startup and prior to mark sweep collection.
1465class KeyedLookupCache {
1466 public:
1467 // Lookup field offset for (map, name). If absent, -1 is returned.
1468 static int Lookup(Map* map, String* name);
1469
1470 // Update an element in the cache.
1471 static void Update(Map* map, String* name, int field_offset);
1472
1473 // Clear the cache.
1474 static void Clear();
Leon Clarkee46be812010-01-19 14:06:41 +00001475
1476 static const int kLength = 64;
1477 static const int kCapacityMask = kLength - 1;
1478 static const int kMapHashShift = 2;
1479
Steve Blocka7e24c12009-10-30 11:49:00 +00001480 private:
1481 static inline int Hash(Map* map, String* name);
Leon Clarkee46be812010-01-19 14:06:41 +00001482
1483 // Get the address of the keys and field_offsets arrays. Used in
1484 // generated code to perform cache lookups.
1485 static Address keys_address() {
1486 return reinterpret_cast<Address>(&keys_);
1487 }
1488
1489 static Address field_offsets_address() {
1490 return reinterpret_cast<Address>(&field_offsets_);
1491 }
1492
Steve Blocka7e24c12009-10-30 11:49:00 +00001493 struct Key {
1494 Map* map;
1495 String* name;
1496 };
1497 static Key keys_[kLength];
1498 static int field_offsets_[kLength];
Steve Blocka7e24c12009-10-30 11:49:00 +00001499
Leon Clarkee46be812010-01-19 14:06:41 +00001500 friend class ExternalReference;
1501};
Steve Blocka7e24c12009-10-30 11:49:00 +00001502
1503
1504// Cache for mapping (array, property name) into descriptor index.
1505// The cache contains both positive and negative results.
1506// Descriptor index equals kNotFound means the property is absent.
1507// Cleared at startup and prior to any gc.
1508class DescriptorLookupCache {
1509 public:
1510 // Lookup descriptor index for (map, name).
1511 // If absent, kAbsent is returned.
1512 static int Lookup(DescriptorArray* array, String* name) {
1513 if (!StringShape(name).IsSymbol()) return kAbsent;
1514 int index = Hash(array, name);
1515 Key& key = keys_[index];
1516 if ((key.array == array) && (key.name == name)) return results_[index];
1517 return kAbsent;
1518 }
1519
1520 // Update an element in the cache.
1521 static void Update(DescriptorArray* array, String* name, int result) {
1522 ASSERT(result != kAbsent);
1523 if (StringShape(name).IsSymbol()) {
1524 int index = Hash(array, name);
1525 Key& key = keys_[index];
1526 key.array = array;
1527 key.name = name;
1528 results_[index] = result;
1529 }
1530 }
1531
1532 // Clear the cache.
1533 static void Clear();
1534
1535 static const int kAbsent = -2;
1536 private:
1537 static int Hash(DescriptorArray* array, String* name) {
1538 // Uses only lower 32 bits if pointers are larger.
Andrei Popescu402d9372010-02-26 13:31:12 +00001539 uint32_t array_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001540 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
Andrei Popescu402d9372010-02-26 13:31:12 +00001541 uint32_t name_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001542 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1543 return (array_hash ^ name_hash) % kLength;
1544 }
1545
1546 static const int kLength = 64;
1547 struct Key {
1548 DescriptorArray* array;
1549 String* name;
1550 };
1551
1552 static Key keys_[kLength];
1553 static int results_[kLength];
1554};
1555
1556
1557// ----------------------------------------------------------------------------
1558// Marking stack for tracing live objects.
1559
1560class MarkingStack {
1561 public:
1562 void Initialize(Address low, Address high) {
1563 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1564 high_ = reinterpret_cast<HeapObject**>(high);
1565 overflowed_ = false;
1566 }
1567
1568 bool is_full() { return top_ >= high_; }
1569
1570 bool is_empty() { return top_ <= low_; }
1571
1572 bool overflowed() { return overflowed_; }
1573
1574 void clear_overflowed() { overflowed_ = false; }
1575
1576 // Push the (marked) object on the marking stack if there is room,
1577 // otherwise mark the object as overflowed and wait for a rescan of the
1578 // heap.
1579 void Push(HeapObject* object) {
1580 CHECK(object->IsHeapObject());
1581 if (is_full()) {
1582 object->SetOverflow();
1583 overflowed_ = true;
1584 } else {
1585 *(top_++) = object;
1586 }
1587 }
1588
1589 HeapObject* Pop() {
1590 ASSERT(!is_empty());
1591 HeapObject* object = *(--top_);
1592 CHECK(object->IsHeapObject());
1593 return object;
1594 }
1595
1596 private:
1597 HeapObject** low_;
1598 HeapObject** top_;
1599 HeapObject** high_;
1600 bool overflowed_;
1601};
1602
1603
1604// A helper class to document/test C++ scopes where we do not
1605// expect a GC. Usage:
1606//
1607// /* Allocation not allowed: we cannot handle a GC in this scope. */
1608// { AssertNoAllocation nogc;
1609// ...
1610// }
1611
1612#ifdef DEBUG
1613
1614class DisallowAllocationFailure {
1615 public:
1616 DisallowAllocationFailure() {
1617 old_state_ = Heap::disallow_allocation_failure_;
1618 Heap::disallow_allocation_failure_ = true;
1619 }
1620 ~DisallowAllocationFailure() {
1621 Heap::disallow_allocation_failure_ = old_state_;
1622 }
1623 private:
1624 bool old_state_;
1625};
1626
1627class AssertNoAllocation {
1628 public:
1629 AssertNoAllocation() {
1630 old_state_ = Heap::allow_allocation(false);
1631 }
1632
1633 ~AssertNoAllocation() {
1634 Heap::allow_allocation(old_state_);
1635 }
1636
1637 private:
1638 bool old_state_;
1639};
1640
1641class DisableAssertNoAllocation {
1642 public:
1643 DisableAssertNoAllocation() {
1644 old_state_ = Heap::allow_allocation(true);
1645 }
1646
1647 ~DisableAssertNoAllocation() {
1648 Heap::allow_allocation(old_state_);
1649 }
1650
1651 private:
1652 bool old_state_;
1653};
1654
1655#else // ndef DEBUG
1656
1657class AssertNoAllocation {
1658 public:
1659 AssertNoAllocation() { }
1660 ~AssertNoAllocation() { }
1661};
1662
1663class DisableAssertNoAllocation {
1664 public:
1665 DisableAssertNoAllocation() { }
1666 ~DisableAssertNoAllocation() { }
1667};
1668
1669#endif
1670
1671// GCTracer collects and prints ONE line after each garbage collector
1672// invocation IFF --trace_gc is used.
1673
1674class GCTracer BASE_EMBEDDED {
1675 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01001676 class Scope BASE_EMBEDDED {
Steve Block6ded16b2010-05-10 14:33:55 +01001677 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01001678 enum ScopeId {
1679 EXTERNAL,
1680 MC_MARK,
1681 MC_SWEEP,
1682 MC_COMPACT,
1683 kNumberOfScopes
1684 };
1685
1686 Scope(GCTracer* tracer, ScopeId scope)
1687 : tracer_(tracer),
1688 scope_(scope) {
Steve Block6ded16b2010-05-10 14:33:55 +01001689 start_time_ = OS::TimeCurrentMillis();
1690 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001691
1692 ~Scope() {
1693 ASSERT((0 <= scope_) && (scope_ < kNumberOfScopes));
1694 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
Steve Block6ded16b2010-05-10 14:33:55 +01001695 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001696
Steve Block6ded16b2010-05-10 14:33:55 +01001697 private:
1698 GCTracer* tracer_;
Leon Clarkef7060e22010-06-03 12:02:55 +01001699 ScopeId scope_;
Steve Block6ded16b2010-05-10 14:33:55 +01001700 double start_time_;
1701 };
1702
1703 GCTracer();
Steve Blocka7e24c12009-10-30 11:49:00 +00001704 ~GCTracer();
1705
1706 // Sets the collector.
1707 void set_collector(GarbageCollector collector) { collector_ = collector; }
1708
1709 // Sets the GC count.
1710 void set_gc_count(int count) { gc_count_ = count; }
1711
1712 // Sets the full GC count.
1713 void set_full_gc_count(int count) { full_gc_count_ = count; }
1714
1715 // Sets the flag that this is a compacting full GC.
1716 void set_is_compacting() { is_compacting_ = true; }
Steve Block6ded16b2010-05-10 14:33:55 +01001717 bool is_compacting() const { return is_compacting_; }
Steve Blocka7e24c12009-10-30 11:49:00 +00001718
1719 // Increment and decrement the count of marked objects.
1720 void increment_marked_count() { ++marked_count_; }
1721 void decrement_marked_count() { --marked_count_; }
1722
1723 int marked_count() { return marked_count_; }
1724
Leon Clarkef7060e22010-06-03 12:02:55 +01001725 void increment_promoted_objects_size(int object_size) {
1726 promoted_objects_size_ += object_size;
1727 }
1728
1729 // Returns maximum GC pause.
1730 static int get_max_gc_pause() { return max_gc_pause_; }
1731
1732 // Returns maximum size of objects alive after GC.
1733 static int get_max_alive_after_gc() { return max_alive_after_gc_; }
1734
1735 // Returns minimal interval between two subsequent collections.
1736 static int get_min_in_mutator() { return min_in_mutator_; }
1737
Steve Blocka7e24c12009-10-30 11:49:00 +00001738 private:
1739 // Returns a string matching the collector.
1740 const char* CollectorString();
1741
1742 // Returns size of object in heap (in MB).
1743 double SizeOfHeapObjects() {
1744 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1745 }
1746
1747 double start_time_; // Timestamp set in the constructor.
Leon Clarkef7060e22010-06-03 12:02:55 +01001748 int start_size_; // Size of objects in heap set in constructor.
Steve Blocka7e24c12009-10-30 11:49:00 +00001749 GarbageCollector collector_; // Type of collector.
1750
1751 // A count (including this one, eg, the first collection is 1) of the
1752 // number of garbage collections.
1753 int gc_count_;
1754
1755 // A count (including this one) of the number of full garbage collections.
1756 int full_gc_count_;
1757
1758 // True if the current GC is a compacting full collection, false
1759 // otherwise.
1760 bool is_compacting_;
1761
1762 // True if the *previous* full GC cwas a compacting collection (will be
1763 // false if there has not been a previous full GC).
1764 bool previous_has_compacted_;
1765
1766 // On a full GC, a count of the number of marked objects. Incremented
1767 // when an object is marked and decremented when an object's mark bit is
1768 // cleared. Will be zero on a scavenge collection.
1769 int marked_count_;
1770
1771 // The count from the end of the previous full GC. Will be zero if there
1772 // was no previous full GC.
1773 int previous_marked_count_;
Leon Clarkef7060e22010-06-03 12:02:55 +01001774
1775 // Amounts of time spent in different scopes during GC.
1776 double scopes_[Scope::kNumberOfScopes];
1777
1778 // Total amount of space either wasted or contained in one of free lists
1779 // before the current GC.
1780 int in_free_list_or_wasted_before_gc_;
1781
1782 // Difference between space used in the heap at the beginning of the current
1783 // collection and the end of the previous collection.
1784 int allocated_since_last_gc_;
1785
1786 // Amount of time spent in mutator that is time elapsed between end of the
1787 // previous collection and the beginning of the current one.
1788 double spent_in_mutator_;
1789
1790 // Size of objects promoted during the current collection.
1791 int promoted_objects_size_;
1792
1793 // Maximum GC pause.
1794 static int max_gc_pause_;
1795
1796 // Maximum size of objects alive after GC.
1797 static int max_alive_after_gc_;
1798
1799 // Minimal interval between two subsequent collections.
1800 static int min_in_mutator_;
1801
1802 // Size of objects alive after last GC.
1803 static int alive_after_last_gc_;
1804
1805 static double last_gc_end_timestamp_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001806};
1807
1808
1809class TranscendentalCache {
1810 public:
1811 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1812
1813 explicit TranscendentalCache(Type t);
1814
1815 // Returns a heap number with f(input), where f is a math function specified
1816 // by the 'type' argument.
1817 static inline Object* Get(Type type, double input) {
1818 TranscendentalCache* cache = caches_[type];
1819 if (cache == NULL) {
1820 caches_[type] = cache = new TranscendentalCache(type);
1821 }
1822 return cache->Get(input);
1823 }
1824
1825 // The cache contains raw Object pointers. This method disposes of
1826 // them before a garbage collection.
1827 static void Clear();
1828
1829 private:
1830 inline Object* Get(double input) {
1831 Converter c;
1832 c.dbl = input;
1833 int hash = Hash(c);
1834 Element e = elements_[hash];
1835 if (e.in[0] == c.integers[0] &&
1836 e.in[1] == c.integers[1]) {
1837 ASSERT(e.output != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00001838 Counters::transcendental_cache_hit.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001839 return e.output;
1840 }
1841 double answer = Calculate(input);
1842 Object* heap_number = Heap::AllocateHeapNumber(answer);
1843 if (!heap_number->IsFailure()) {
1844 elements_[hash].in[0] = c.integers[0];
1845 elements_[hash].in[1] = c.integers[1];
1846 elements_[hash].output = heap_number;
1847 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001848 Counters::transcendental_cache_miss.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001849 return heap_number;
1850 }
1851
1852 inline double Calculate(double input) {
1853 switch (type_) {
1854 case ACOS:
1855 return acos(input);
1856 case ASIN:
1857 return asin(input);
1858 case ATAN:
1859 return atan(input);
1860 case COS:
1861 return cos(input);
1862 case EXP:
1863 return exp(input);
1864 case LOG:
1865 return log(input);
1866 case SIN:
1867 return sin(input);
1868 case TAN:
1869 return tan(input);
1870 default:
1871 return 0.0; // Never happens.
1872 }
1873 }
1874 static const int kCacheSize = 512;
1875 struct Element {
1876 uint32_t in[2];
1877 Object* output;
1878 };
1879 union Converter {
1880 double dbl;
1881 uint32_t integers[2];
1882 };
1883 inline static int Hash(const Converter& c) {
1884 uint32_t hash = (c.integers[0] ^ c.integers[1]);
1885 hash ^= hash >> 16;
1886 hash ^= hash >> 8;
1887 return (hash & (kCacheSize - 1));
1888 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001889
1890 static Address cache_array_address() {
1891 // Used to create an external reference.
1892 return reinterpret_cast<Address>(caches_);
1893 }
1894
1895 // Allow access to the caches_ array as an ExternalReference.
1896 friend class ExternalReference;
1897 // Inline implementation of the caching.
1898 friend class TranscendentalCacheStub;
1899
Steve Blocka7e24c12009-10-30 11:49:00 +00001900 static TranscendentalCache* caches_[kNumberOfCaches];
1901 Element elements_[kCacheSize];
1902 Type type_;
1903};
1904
1905
Leon Clarkee46be812010-01-19 14:06:41 +00001906// External strings table is a place where all external strings are
1907// registered. We need to keep track of such strings to properly
1908// finalize them.
1909class ExternalStringTable : public AllStatic {
1910 public:
1911 // Registers an external string.
1912 inline static void AddString(String* string);
1913
1914 inline static void Iterate(ObjectVisitor* v);
1915
1916 // Restores internal invariant and gets rid of collected strings.
1917 // Must be called after each Iterate() that modified the strings.
1918 static void CleanUp();
1919
1920 // Destroys all allocated memory.
1921 static void TearDown();
1922
1923 private:
1924 friend class Heap;
1925
1926 inline static void Verify();
1927
1928 inline static void AddOldString(String* string);
1929
1930 // Notifies the table that only a prefix of the new list is valid.
1931 inline static void ShrinkNewStrings(int position);
1932
1933 // To speed up scavenge collections new space string are kept
1934 // separate from old space strings.
1935 static List<Object*> new_space_strings_;
1936 static List<Object*> old_space_strings_;
1937};
1938
Steve Blocka7e24c12009-10-30 11:49:00 +00001939} } // namespace v8::internal
1940
1941#endif // V8_HEAP_H_