blob: a8f8c343950239688ac801d237255ba0834abf8c [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
Steve Block6ded16b2010-05-10 14:33:55 +010033#include "splay-tree-inl.h"
34#include "v8-counters.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Steve Block6ded16b2010-05-10 14:33:55 +010039// Forward declarations.
40class ZoneScopeInfo;
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042// Defines all the roots in Heap.
43#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
Steve Blockd0582a62009-12-15 09:54:21 +000044 /* Put the byte array map early. We need it to be in place by the time */ \
45 /* the deserializer hits the next page, since it wants to put a byte */ \
46 /* array in the unused space at the end of the page. */ \
47 V(Map, byte_array_map, ByteArrayMap) \
48 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
49 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
50 /* Cluster the most popular ones in a few cache lines here at the top. */ \
Steve Blocka7e24c12009-10-30 11:49:00 +000051 V(Smi, stack_limit, StackLimit) \
52 V(Object, undefined_value, UndefinedValue) \
53 V(Object, the_hole_value, TheHoleValue) \
54 V(Object, null_value, NullValue) \
55 V(Object, true_value, TrueValue) \
56 V(Object, false_value, FalseValue) \
57 V(Map, heap_number_map, HeapNumberMap) \
58 V(Map, global_context_map, GlobalContextMap) \
59 V(Map, fixed_array_map, FixedArrayMap) \
60 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
61 V(Map, meta_map, MetaMap) \
62 V(Object, termination_exception, TerminationException) \
63 V(Map, hash_table_map, HashTableMap) \
64 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
Steve Blockd0582a62009-12-15 09:54:21 +000065 V(Map, string_map, StringMap) \
66 V(Map, ascii_string_map, AsciiStringMap) \
67 V(Map, symbol_map, SymbolMap) \
68 V(Map, ascii_symbol_map, AsciiSymbolMap) \
69 V(Map, cons_symbol_map, ConsSymbolMap) \
70 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
71 V(Map, external_symbol_map, ExternalSymbolMap) \
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010072 V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap) \
Steve Blockd0582a62009-12-15 09:54:21 +000073 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
74 V(Map, cons_string_map, ConsStringMap) \
75 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
76 V(Map, external_string_map, ExternalStringMap) \
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010077 V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \
Steve Blockd0582a62009-12-15 09:54:21 +000078 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
79 V(Map, undetectable_string_map, UndetectableStringMap) \
80 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000081 V(Map, pixel_array_map, PixelArrayMap) \
Steve Block3ce2e202009-11-05 08:53:23 +000082 V(Map, external_byte_array_map, ExternalByteArrayMap) \
83 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
84 V(Map, external_short_array_map, ExternalShortArrayMap) \
85 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
86 V(Map, external_int_array_map, ExternalIntArrayMap) \
87 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
88 V(Map, external_float_array_map, ExternalFloatArrayMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000089 V(Map, context_map, ContextMap) \
90 V(Map, catch_context_map, CatchContextMap) \
91 V(Map, code_map, CodeMap) \
92 V(Map, oddball_map, OddballMap) \
93 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000094 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
95 V(Map, proxy_map, ProxyMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000096 V(Object, nan_value, NanValue) \
97 V(Object, minus_zero_value, MinusZeroValue) \
Kristian Monsen25f61362010-05-21 11:50:48 +010098 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
99 V(Object, instanceof_cache_map, InstanceofCacheMap) \
100 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000101 V(String, empty_string, EmptyString) \
102 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
103 V(Map, neander_map, NeanderMap) \
104 V(JSObject, message_listeners, MessageListeners) \
105 V(Proxy, prototype_accessors, PrototypeAccessors) \
106 V(NumberDictionary, code_stubs, CodeStubs) \
107 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
108 V(Code, js_entry_code, JsEntryCode) \
109 V(Code, js_construct_entry_code, JsConstructEntryCode) \
110 V(Code, c_entry_code, CEntryCode) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000111 V(FixedArray, number_string_cache, NumberStringCache) \
112 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
113 V(FixedArray, natives_source_cache, NativesSourceCache) \
114 V(Object, last_script_id, LastScriptId) \
Andrei Popescu31002712010-02-23 13:46:05 +0000115 V(Script, empty_script, EmptyScript) \
Steve Blockd0582a62009-12-15 09:54:21 +0000116 V(Smi, real_stack_limit, RealStackLimit) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000117
Steve Block6ded16b2010-05-10 14:33:55 +0100118#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
Steve Blocka7e24c12009-10-30 11:49:00 +0000119#define STRONG_ROOT_LIST(V) \
120 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
121 V(Code, re_c_entry_code, RegExpCEntryCode)
122#else
123#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
124#endif
125
126#define ROOT_LIST(V) \
127 STRONG_ROOT_LIST(V) \
128 V(SymbolTable, symbol_table, SymbolTable)
129
130#define SYMBOL_LIST(V) \
131 V(Array_symbol, "Array") \
132 V(Object_symbol, "Object") \
133 V(Proto_symbol, "__proto__") \
134 V(StringImpl_symbol, "StringImpl") \
135 V(arguments_symbol, "arguments") \
136 V(Arguments_symbol, "Arguments") \
137 V(arguments_shadow_symbol, ".arguments") \
138 V(call_symbol, "call") \
139 V(apply_symbol, "apply") \
140 V(caller_symbol, "caller") \
141 V(boolean_symbol, "boolean") \
142 V(Boolean_symbol, "Boolean") \
143 V(callee_symbol, "callee") \
144 V(constructor_symbol, "constructor") \
145 V(code_symbol, ".code") \
146 V(result_symbol, ".result") \
147 V(catch_var_symbol, ".catch-var") \
148 V(empty_symbol, "") \
149 V(eval_symbol, "eval") \
150 V(function_symbol, "function") \
151 V(length_symbol, "length") \
152 V(name_symbol, "name") \
153 V(number_symbol, "number") \
154 V(Number_symbol, "Number") \
155 V(RegExp_symbol, "RegExp") \
Steve Block6ded16b2010-05-10 14:33:55 +0100156 V(source_symbol, "source") \
157 V(global_symbol, "global") \
158 V(ignore_case_symbol, "ignoreCase") \
159 V(multiline_symbol, "multiline") \
160 V(input_symbol, "input") \
161 V(index_symbol, "index") \
162 V(last_index_symbol, "lastIndex") \
Steve Blocka7e24c12009-10-30 11:49:00 +0000163 V(object_symbol, "object") \
164 V(prototype_symbol, "prototype") \
165 V(string_symbol, "string") \
166 V(String_symbol, "String") \
167 V(Date_symbol, "Date") \
168 V(this_symbol, "this") \
169 V(to_string_symbol, "toString") \
170 V(char_at_symbol, "CharAt") \
171 V(undefined_symbol, "undefined") \
172 V(value_of_symbol, "valueOf") \
173 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
174 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
175 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
176 V(illegal_access_symbol, "illegal access") \
177 V(out_of_memory_symbol, "out-of-memory") \
178 V(illegal_execution_state_symbol, "illegal execution state") \
179 V(get_symbol, "get") \
180 V(set_symbol, "set") \
181 V(function_class_symbol, "Function") \
182 V(illegal_argument_symbol, "illegal argument") \
183 V(MakeReferenceError_symbol, "MakeReferenceError") \
184 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
185 V(MakeTypeError_symbol, "MakeTypeError") \
186 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
187 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
188 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
189 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
190 V(illegal_return_symbol, "illegal_return") \
191 V(illegal_break_symbol, "illegal_break") \
192 V(illegal_continue_symbol, "illegal_continue") \
193 V(unknown_label_symbol, "unknown_label") \
194 V(redeclaration_symbol, "redeclaration") \
195 V(failure_symbol, "<failure>") \
196 V(space_symbol, " ") \
197 V(exec_symbol, "exec") \
198 V(zero_symbol, "0") \
199 V(global_eval_symbol, "GlobalEval") \
Steve Blockd0582a62009-12-15 09:54:21 +0000200 V(identity_hash_symbol, "v8::IdentityHash") \
201 V(closure_symbol, "(closure)")
Steve Blocka7e24c12009-10-30 11:49:00 +0000202
203
204// Forward declaration of the GCTracer class.
205class GCTracer;
Steve Blockd0582a62009-12-15 09:54:21 +0000206class HeapStats;
Steve Blocka7e24c12009-10-30 11:49:00 +0000207
208
Steve Block6ded16b2010-05-10 14:33:55 +0100209typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
210
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100211typedef bool (*DirtyRegionCallback)(Address start,
212 Address end,
213 ObjectSlotCallback copy_object_func);
214
Steve Block6ded16b2010-05-10 14:33:55 +0100215
Steve Blocka7e24c12009-10-30 11:49:00 +0000216// The all static Heap captures the interface to the global object heap.
217// All JavaScript contexts by this process share the same object heap.
218
219class Heap : public AllStatic {
220 public:
221 // Configure heap size before setup. Return false if the heap has been
222 // setup already.
Steve Block3ce2e202009-11-05 08:53:23 +0000223 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000224 static bool ConfigureHeapDefault();
225
226 // Initializes the global object heap. If create_heap_objects is true,
227 // also creates the basic non-mutable objects.
228 // Returns whether it succeeded.
229 static bool Setup(bool create_heap_objects);
230
231 // Destroys all memory allocated by the heap.
232 static void TearDown();
233
Steve Blockd0582a62009-12-15 09:54:21 +0000234 // Set the stack limit in the roots_ array. Some architectures generate
235 // code that looks here, because it is faster than loading from the static
236 // jslimit_/real_jslimit_ variable in the StackGuard.
237 static void SetStackLimits();
Steve Blocka7e24c12009-10-30 11:49:00 +0000238
239 // Returns whether Setup has been called.
240 static bool HasBeenSetup();
241
Steve Block3ce2e202009-11-05 08:53:23 +0000242 // Returns the maximum amount of memory reserved for the heap. For
243 // the young generation, we reserve 4 times the amount needed for a
244 // semi space. The young generation consists of two semi spaces and
245 // we reserve twice the amount needed for those in order to ensure
246 // that new space can be aligned to its size.
247 static int MaxReserved() {
248 return 4 * reserved_semispace_size_ + max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000249 }
Steve Block3ce2e202009-11-05 08:53:23 +0000250 static int MaxSemiSpaceSize() { return max_semispace_size_; }
251 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000252 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
Steve Block3ce2e202009-11-05 08:53:23 +0000253 static int MaxOldGenerationSize() { return max_old_generation_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000254
255 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
256 // more spaces are needed until it reaches the limit.
257 static int Capacity();
258
Steve Block3ce2e202009-11-05 08:53:23 +0000259 // Returns the amount of memory currently committed for the heap.
260 static int CommittedMemory();
261
Steve Blocka7e24c12009-10-30 11:49:00 +0000262 // Returns the available bytes in space w/o growing.
263 // Heap doesn't guarantee that it can allocate an object that requires
264 // all available bytes. Check MaxHeapObjectSize() instead.
265 static int Available();
266
267 // Returns the maximum object size in paged space.
268 static inline int MaxObjectSizeInPagedSpace();
269
270 // Returns of size of all objects residing in the heap.
271 static int SizeOfObjects();
272
273 // Return the starting address and a mask for the new space. And-masking an
274 // address with the mask will result in the start address of the new space
275 // for all addresses in either semispace.
276 static Address NewSpaceStart() { return new_space_.start(); }
277 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
278 static Address NewSpaceTop() { return new_space_.top(); }
279
280 static NewSpace* new_space() { return &new_space_; }
281 static OldSpace* old_pointer_space() { return old_pointer_space_; }
282 static OldSpace* old_data_space() { return old_data_space_; }
283 static OldSpace* code_space() { return code_space_; }
284 static MapSpace* map_space() { return map_space_; }
285 static CellSpace* cell_space() { return cell_space_; }
286 static LargeObjectSpace* lo_space() { return lo_space_; }
287
288 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
289 static Address always_allocate_scope_depth_address() {
290 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
291 }
Steve Blockd0582a62009-12-15 09:54:21 +0000292 static bool linear_allocation() {
Leon Clarkee46be812010-01-19 14:06:41 +0000293 return linear_allocation_scope_depth_ != 0;
Steve Blockd0582a62009-12-15 09:54:21 +0000294 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000295
296 static Address* NewSpaceAllocationTopAddress() {
297 return new_space_.allocation_top_address();
298 }
299 static Address* NewSpaceAllocationLimitAddress() {
300 return new_space_.allocation_limit_address();
301 }
302
303 // Uncommit unused semi space.
304 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
305
306#ifdef ENABLE_HEAP_PROTECTION
307 // Protect/unprotect the heap by marking all spaces read-only/writable.
308 static void Protect();
309 static void Unprotect();
310#endif
311
312 // Allocates and initializes a new JavaScript object based on a
313 // constructor.
314 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
315 // failed.
316 // Please note this does not perform a garbage collection.
317 static Object* AllocateJSObject(JSFunction* constructor,
318 PretenureFlag pretenure = NOT_TENURED);
319
320 // Allocates and initializes a new global object based on a constructor.
321 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
322 // failed.
323 // Please note this does not perform a garbage collection.
324 static Object* AllocateGlobalObject(JSFunction* constructor);
325
326 // Returns a deep copy of the JavaScript object.
327 // Properties and elements are copied too.
328 // Returns failure if allocation failed.
329 static Object* CopyJSObject(JSObject* source);
330
331 // Allocates the function prototype.
332 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
333 // failed.
334 // Please note this does not perform a garbage collection.
335 static Object* AllocateFunctionPrototype(JSFunction* function);
336
337 // Reinitialize an JSGlobalProxy based on a constructor. The object
338 // must have the same size as objects allocated using the
339 // constructor. The object is reinitialized and behaves as an
340 // object that has been freshly allocated using the constructor.
341 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
342 JSGlobalProxy* global);
343
344 // Allocates and initializes a new JavaScript object based on a map.
345 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
346 // failed.
347 // Please note this does not perform a garbage collection.
348 static Object* AllocateJSObjectFromMap(Map* map,
349 PretenureFlag pretenure = NOT_TENURED);
350
351 // Allocates a heap object based on the map.
352 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
353 // failed.
354 // Please note this function does not perform a garbage collection.
355 static Object* Allocate(Map* map, AllocationSpace space);
356
357 // Allocates a JS Map in the heap.
358 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
359 // failed.
360 // Please note this function does not perform a garbage collection.
361 static Object* AllocateMap(InstanceType instance_type, int instance_size);
362
363 // Allocates a partial map for bootstrapping.
364 static Object* AllocatePartialMap(InstanceType instance_type,
365 int instance_size);
366
367 // Allocate a map for the specified function
368 static Object* AllocateInitialMap(JSFunction* fun);
369
Steve Block6ded16b2010-05-10 14:33:55 +0100370 // Allocates an empty code cache.
371 static Object* AllocateCodeCache();
372
Kristian Monsen25f61362010-05-21 11:50:48 +0100373 // Clear the Instanceof cache (used when a prototype changes).
374 static void ClearInstanceofCache() {
375 set_instanceof_cache_function(the_hole_value());
376 }
377
Steve Blocka7e24c12009-10-30 11:49:00 +0000378 // Allocates and fully initializes a String. There are two String
379 // encodings: ASCII and two byte. One should choose between the three string
380 // allocation functions based on the encoding of the string buffer used to
381 // initialized the string.
382 // - ...FromAscii initializes the string from a buffer that is ASCII
383 // encoded (it does not check that the buffer is ASCII encoded) and the
384 // result will be ASCII encoded.
385 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
386 // encoded. If the characters are all single-byte characters, the
387 // result will be ASCII encoded, otherwise it will converted to two
388 // byte.
389 // - ...FromTwoByte initializes the string from a buffer that is two-byte
390 // encoded. If the characters are all single-byte characters, the
391 // result will be converted to ASCII, otherwise it will be left as
392 // two-byte.
393 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
394 // failed.
395 // Please note this does not perform a garbage collection.
396 static Object* AllocateStringFromAscii(
397 Vector<const char> str,
398 PretenureFlag pretenure = NOT_TENURED);
399 static Object* AllocateStringFromUtf8(
400 Vector<const char> str,
401 PretenureFlag pretenure = NOT_TENURED);
402 static Object* AllocateStringFromTwoByte(
403 Vector<const uc16> str,
404 PretenureFlag pretenure = NOT_TENURED);
405
406 // Allocates a symbol in old space based on the character stream.
407 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
408 // failed.
409 // Please note this function does not perform a garbage collection.
410 static inline Object* AllocateSymbol(Vector<const char> str,
411 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000412 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000413
414 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
415 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000416 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000417
418 static Object* AllocateExternalSymbol(Vector<const char> str,
419 int chars);
420
421
422 // Allocates and partially initializes a String. There are two String
423 // encodings: ASCII and two byte. These functions allocate a string of the
424 // given length and set its map and length fields. The characters of the
425 // string are uninitialized.
426 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
427 // failed.
428 // Please note this does not perform a garbage collection.
429 static Object* AllocateRawAsciiString(
430 int length,
431 PretenureFlag pretenure = NOT_TENURED);
432 static Object* AllocateRawTwoByteString(
433 int length,
434 PretenureFlag pretenure = NOT_TENURED);
435
436 // Computes a single character string where the character has code.
437 // A cache is used for ascii codes.
438 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
439 // failed. Please note this does not perform a garbage collection.
440 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
441
442 // Allocate a byte array of the specified length
443 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
444 // failed.
445 // Please note this does not perform a garbage collection.
446 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
447
448 // Allocate a non-tenured byte array of the specified length
449 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
450 // failed.
451 // Please note this does not perform a garbage collection.
452 static Object* AllocateByteArray(int length);
453
454 // Allocate a pixel array of the specified length
455 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
456 // failed.
457 // Please note this does not perform a garbage collection.
458 static Object* AllocatePixelArray(int length,
459 uint8_t* external_pointer,
460 PretenureFlag pretenure);
461
Steve Block3ce2e202009-11-05 08:53:23 +0000462 // Allocates an external array of the specified length and type.
463 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
464 // failed.
465 // Please note this does not perform a garbage collection.
466 static Object* AllocateExternalArray(int length,
467 ExternalArrayType array_type,
468 void* external_pointer,
469 PretenureFlag pretenure);
470
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 // Allocate a tenured JS global property cell.
472 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
473 // failed.
474 // Please note this does not perform a garbage collection.
475 static Object* AllocateJSGlobalPropertyCell(Object* value);
476
477 // Allocates a fixed array initialized with undefined values
478 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
479 // failed.
480 // Please note this does not perform a garbage collection.
481 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
Steve Block6ded16b2010-05-10 14:33:55 +0100482 // Allocates a fixed array initialized with undefined values
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 static Object* AllocateFixedArray(int length);
484
Steve Block6ded16b2010-05-10 14:33:55 +0100485 // Allocates an uninitialized fixed array. It must be filled by the caller.
486 //
487 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
488 // failed.
489 // Please note this does not perform a garbage collection.
490 static Object* AllocateUninitializedFixedArray(int length);
491
Steve Blocka7e24c12009-10-30 11:49:00 +0000492 // Make a copy of src and return it. Returns
493 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
494 static Object* CopyFixedArray(FixedArray* src);
495
496 // Allocates a fixed array initialized with the hole values.
497 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
498 // failed.
499 // Please note this does not perform a garbage collection.
Steve Block6ded16b2010-05-10 14:33:55 +0100500 static Object* AllocateFixedArrayWithHoles(
501 int length,
502 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000503
504 // AllocateHashTable is identical to AllocateFixedArray except
505 // that the resulting object has hash_table_map as map.
Steve Block6ded16b2010-05-10 14:33:55 +0100506 static Object* AllocateHashTable(int length,
507 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000508
509 // Allocate a global (but otherwise uninitialized) context.
510 static Object* AllocateGlobalContext();
511
512 // Allocate a function context.
513 static Object* AllocateFunctionContext(int length, JSFunction* closure);
514
515 // Allocate a 'with' context.
516 static Object* AllocateWithContext(Context* previous,
517 JSObject* extension,
518 bool is_catch_context);
519
520 // Allocates a new utility object in the old generation.
521 static Object* AllocateStruct(InstanceType type);
522
523 // Allocates a function initialized with a shared part.
524 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
525 // failed.
526 // Please note this does not perform a garbage collection.
527 static Object* AllocateFunction(Map* function_map,
528 SharedFunctionInfo* shared,
Leon Clarkee46be812010-01-19 14:06:41 +0000529 Object* prototype,
530 PretenureFlag pretenure = TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000531
532 // Indicies for direct access into argument objects.
Leon Clarkee46be812010-01-19 14:06:41 +0000533 static const int kArgumentsObjectSize =
534 JSObject::kHeaderSize + 2 * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000535 static const int arguments_callee_index = 0;
536 static const int arguments_length_index = 1;
537
538 // Allocates an arguments object - optionally with an elements array.
539 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
540 // failed.
541 // Please note this does not perform a garbage collection.
542 static Object* AllocateArgumentsObject(Object* callee, int length);
543
Steve Blocka7e24c12009-10-30 11:49:00 +0000544 // Same as NewNumberFromDouble, but may return a preallocated/immutable
545 // number object (e.g., minus_zero_value_, nan_value_)
546 static Object* NumberFromDouble(double value,
547 PretenureFlag pretenure = NOT_TENURED);
548
549 // Allocated a HeapNumber from value.
550 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
551 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
552
553 // Converts an int into either a Smi or a HeapNumber object.
554 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
555 // failed.
556 // Please note this does not perform a garbage collection.
557 static inline Object* NumberFromInt32(int32_t value);
558
559 // Converts an int into either a Smi or a HeapNumber object.
560 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
561 // failed.
562 // Please note this does not perform a garbage collection.
563 static inline Object* NumberFromUint32(uint32_t value);
564
565 // Allocates a new proxy object.
566 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
567 // failed.
568 // Please note this does not perform a garbage collection.
569 static Object* AllocateProxy(Address proxy,
570 PretenureFlag pretenure = NOT_TENURED);
571
572 // Allocates a new SharedFunctionInfo object.
573 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
574 // failed.
575 // Please note this does not perform a garbage collection.
576 static Object* AllocateSharedFunctionInfo(Object* name);
577
578 // Allocates a new cons string object.
579 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
580 // failed.
581 // Please note this does not perform a garbage collection.
582 static Object* AllocateConsString(String* first, String* second);
583
Steve Blocka7e24c12009-10-30 11:49:00 +0000584 // Allocates a new sub string object which is a substring of an underlying
585 // string buffer stretching from the index start (inclusive) to the index
586 // end (exclusive).
587 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
588 // failed.
589 // Please note this does not perform a garbage collection.
590 static Object* AllocateSubString(String* buffer,
591 int start,
Steve Block6ded16b2010-05-10 14:33:55 +0100592 int end,
593 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000594
595 // Allocate a new external string object, which is backed by a string
596 // resource that resides outside the V8 heap.
597 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
598 // failed.
599 // Please note this does not perform a garbage collection.
600 static Object* AllocateExternalStringFromAscii(
601 ExternalAsciiString::Resource* resource);
602 static Object* AllocateExternalStringFromTwoByte(
603 ExternalTwoByteString::Resource* resource);
604
Leon Clarkee46be812010-01-19 14:06:41 +0000605 // Finalizes an external string by deleting the associated external
606 // data and clearing the resource pointer.
607 static inline void FinalizeExternalString(String* string);
608
Steve Blocka7e24c12009-10-30 11:49:00 +0000609 // Allocates an uninitialized object. The memory is non-executable if the
610 // hardware and OS allow.
611 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
612 // failed.
613 // Please note this function does not perform a garbage collection.
614 static inline Object* AllocateRaw(int size_in_bytes,
615 AllocationSpace space,
616 AllocationSpace retry_space);
617
618 // Initialize a filler object to keep the ability to iterate over the heap
619 // when shortening objects.
620 static void CreateFillerObjectAt(Address addr, int size);
621
622 // Makes a new native code object
623 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
624 // failed. On success, the pointer to the Code object is stored in the
625 // self_reference. This allows generated code to reference its own Code
626 // object by containing this pointer.
627 // Please note this function does not perform a garbage collection.
628 static Object* CreateCode(const CodeDesc& desc,
629 ZoneScopeInfo* sinfo,
630 Code::Flags flags,
631 Handle<Object> self_reference);
632
633 static Object* CopyCode(Code* code);
Steve Block6ded16b2010-05-10 14:33:55 +0100634
635 // Copy the code and scope info part of the code object, but insert
636 // the provided data as the relocation information.
637 static Object* CopyCode(Code* code, Vector<byte> reloc_info);
638
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 // Finds the symbol for string in the symbol table.
640 // If not found, a new symbol is added to the table and returned.
641 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
642 // failed.
643 // Please note this function does not perform a garbage collection.
644 static Object* LookupSymbol(Vector<const char> str);
645 static Object* LookupAsciiSymbol(const char* str) {
646 return LookupSymbol(CStrVector(str));
647 }
648 static Object* LookupSymbol(String* str);
649 static bool LookupSymbolIfExists(String* str, String** symbol);
Steve Blockd0582a62009-12-15 09:54:21 +0000650 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
Steve Blocka7e24c12009-10-30 11:49:00 +0000651
652 // Compute the matching symbol map for a string if possible.
653 // NULL is returned if string is in new space or not flattened.
654 static Map* SymbolMapForString(String* str);
655
Steve Block6ded16b2010-05-10 14:33:55 +0100656 // Tries to flatten a string before compare operation.
657 //
658 // Returns a failure in case it was decided that flattening was
659 // necessary and failed. Note, if flattening is not necessary the
660 // string might stay non-flat even when not a failure is returned.
661 //
662 // Please note this function does not perform a garbage collection.
663 static inline Object* PrepareForCompare(String* str);
664
Steve Blocka7e24c12009-10-30 11:49:00 +0000665 // Converts the given boolean condition to JavaScript boolean value.
666 static Object* ToBoolean(bool condition) {
667 return condition ? true_value() : false_value();
668 }
669
670 // Code that should be run before and after each GC. Includes some
671 // reporting/verification activities when compiled with DEBUG set.
672 static void GarbageCollectionPrologue();
673 static void GarbageCollectionEpilogue();
674
Steve Blocka7e24c12009-10-30 11:49:00 +0000675 // Performs garbage collection operation.
676 // Returns whether required_space bytes are available after the collection.
677 static bool CollectGarbage(int required_space, AllocationSpace space);
678
679 // Performs a full garbage collection. Force compaction if the
680 // parameter is true.
681 static void CollectAllGarbage(bool force_compaction);
682
Steve Blocka7e24c12009-10-30 11:49:00 +0000683 // Notify the heap that a context has been disposed.
Steve Block6ded16b2010-05-10 14:33:55 +0100684 static int NotifyContextDisposed() { return ++contexts_disposed_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000685
686 // Utility to invoke the scavenger. This is needed in test code to
687 // ensure correct callback for weak global handles.
688 static void PerformScavenge();
689
690#ifdef DEBUG
691 // Utility used with flag gc-greedy.
692 static bool GarbageCollectionGreedyCheck();
693#endif
694
Steve Block6ded16b2010-05-10 14:33:55 +0100695 static void AddGCPrologueCallback(
696 GCEpilogueCallback callback, GCType gc_type_filter);
697 static void RemoveGCPrologueCallback(GCEpilogueCallback callback);
698
699 static void AddGCEpilogueCallback(
700 GCEpilogueCallback callback, GCType gc_type_filter);
701 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
702
Steve Blocka7e24c12009-10-30 11:49:00 +0000703 static void SetGlobalGCPrologueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100704 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000705 global_gc_prologue_callback_ = callback;
706 }
707 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100708 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 global_gc_epilogue_callback_ = callback;
710 }
711
712 // Heap root getters. We have versions with and without type::cast() here.
713 // You can't use type::cast during GC because the assert fails.
714#define ROOT_ACCESSOR(type, name, camel_name) \
715 static inline type* name() { \
716 return type::cast(roots_[k##camel_name##RootIndex]); \
717 } \
718 static inline type* raw_unchecked_##name() { \
719 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
720 }
721 ROOT_LIST(ROOT_ACCESSOR)
722#undef ROOT_ACCESSOR
723
724// Utility type maps
725#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
726 static inline Map* name##_map() { \
727 return Map::cast(roots_[k##Name##MapRootIndex]); \
728 }
729 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
730#undef STRUCT_MAP_ACCESSOR
731
732#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
733 return String::cast(roots_[k##name##RootIndex]); \
734 }
735 SYMBOL_LIST(SYMBOL_ACCESSOR)
736#undef SYMBOL_ACCESSOR
737
738 // The hidden_symbol is special because it is the empty string, but does
739 // not match the empty string.
740 static String* hidden_symbol() { return hidden_symbol_; }
741
742 // Iterates over all roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000743 static void IterateRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000744 // Iterates over all strong roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000745 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
Leon Clarked91b9f72010-01-27 17:25:45 +0000746 // Iterates over all the other roots in the heap.
747 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000748
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100749 enum ExpectedPageWatermarkState {
750 WATERMARK_SHOULD_BE_VALID,
751 WATERMARK_CAN_BE_INVALID
752 };
Steve Blocka7e24c12009-10-30 11:49:00 +0000753
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100754 // For each dirty region on a page in use from an old space call
755 // visit_dirty_region callback.
756 // If either visit_dirty_region or callback can cause an allocation
757 // in old space and changes in allocation watermark then
758 // can_preallocate_during_iteration should be set to true.
759 // All pages will be marked as having invalid watermark upon
760 // iteration completion.
761 static void IterateDirtyRegions(
762 PagedSpace* space,
763 DirtyRegionCallback visit_dirty_region,
764 ObjectSlotCallback callback,
765 ExpectedPageWatermarkState expected_page_watermark_state);
766
767 // Interpret marks as a bitvector of dirty marks for regions of size
768 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering
769 // memory interval from start to top. For each dirty region call a
770 // visit_dirty_region callback. Return updated bitvector of dirty marks.
771 static uint32_t IterateDirtyRegions(uint32_t marks,
772 Address start,
773 Address end,
774 DirtyRegionCallback visit_dirty_region,
775 ObjectSlotCallback callback);
776
777 // Iterate pointers to new space found in memory interval from start to end.
778 // Update dirty marks for page containing start address.
779 static void IterateAndMarkPointersToNewSpace(Address start,
780 Address end,
781 ObjectSlotCallback callback);
782
783 // Iterate pointers to new space found in memory interval from start to end.
784 // Return true if pointers to new space was found.
785 static bool IteratePointersInDirtyRegion(Address start,
786 Address end,
787 ObjectSlotCallback callback);
788
789
790 // Iterate pointers to new space found in memory interval from start to end.
791 // This interval is considered to belong to the map space.
792 // Return true if pointers to new space was found.
793 static bool IteratePointersInDirtyMapsRegion(Address start,
794 Address end,
795 ObjectSlotCallback callback);
796
Steve Blocka7e24c12009-10-30 11:49:00 +0000797
798 // Returns whether the object resides in new space.
799 static inline bool InNewSpace(Object* object);
800 static inline bool InFromSpace(Object* object);
801 static inline bool InToSpace(Object* object);
802
803 // Checks whether an address/object in the heap (including auxiliary
804 // area and unused area).
805 static bool Contains(Address addr);
806 static bool Contains(HeapObject* value);
807
808 // Checks whether an address/object in a space.
Steve Blockd0582a62009-12-15 09:54:21 +0000809 // Currently used by tests, serialization and heap verification only.
Steve Blocka7e24c12009-10-30 11:49:00 +0000810 static bool InSpace(Address addr, AllocationSpace space);
811 static bool InSpace(HeapObject* value, AllocationSpace space);
812
813 // Finds out which space an object should get promoted to based on its type.
814 static inline OldSpace* TargetSpace(HeapObject* object);
815 static inline AllocationSpace TargetSpaceId(InstanceType type);
816
817 // Sets the stub_cache_ (only used when expanding the dictionary).
818 static void public_set_code_stubs(NumberDictionary* value) {
819 roots_[kCodeStubsRootIndex] = value;
820 }
821
822 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
823 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
824 roots_[kNonMonomorphicCacheRootIndex] = value;
825 }
826
Andrei Popescu31002712010-02-23 13:46:05 +0000827 static void public_set_empty_script(Script* script) {
828 roots_[kEmptyScriptRootIndex] = script;
829 }
830
Steve Blocka7e24c12009-10-30 11:49:00 +0000831 // Update the next script id.
832 static inline void SetLastScriptId(Object* last_script_id);
833
834 // Generated code can embed this address to get access to the roots.
835 static Object** roots_address() { return roots_; }
836
837#ifdef DEBUG
838 static void Print();
839 static void PrintHandles();
840
841 // Verify the heap is in its normal state before or after a GC.
842 static void Verify();
843
844 // Report heap statistics.
845 static void ReportHeapStatistics(const char* title);
846 static void ReportCodeStatistics(const char* title);
847
848 // Fill in bogus values in from space
849 static void ZapFromSpace();
850#endif
851
852#if defined(ENABLE_LOGGING_AND_PROFILING)
853 // Print short heap statistics.
854 static void PrintShortHeapStatistics();
855#endif
856
857 // Makes a new symbol object
858 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
859 // failed.
860 // Please note this function does not perform a garbage collection.
861 static Object* CreateSymbol(const char* str, int length, int hash);
862 static Object* CreateSymbol(String* str);
863
864 // Write barrier support for address[offset] = o.
865 static inline void RecordWrite(Address address, int offset);
866
Steve Block6ded16b2010-05-10 14:33:55 +0100867 // Write barrier support for address[start : start + len[ = o.
868 static inline void RecordWrites(Address address, int start, int len);
869
Steve Blocka7e24c12009-10-30 11:49:00 +0000870 // Given an address occupied by a live code object, return that object.
871 static Object* FindCodeObject(Address a);
872
873 // Invoke Shrink on shrinkable spaces.
874 static void Shrink();
875
876 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
877 static inline HeapState gc_state() { return gc_state_; }
878
879#ifdef DEBUG
880 static bool IsAllocationAllowed() { return allocation_allowed_; }
881 static inline bool allow_allocation(bool enable);
882
883 static bool disallow_allocation_failure() {
884 return disallow_allocation_failure_;
885 }
886
Leon Clarkee46be812010-01-19 14:06:41 +0000887 static void TracePathToObject(Object* target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000888 static void TracePathToGlobal();
889#endif
890
891 // Callback function passed to Heap::Iterate etc. Copies an object if
892 // necessary, the object might be promoted to an old space. The caller must
893 // ensure the precondition that the object is (a) a heap object and (b) in
894 // the heap's from space.
895 static void ScavengePointer(HeapObject** p);
896 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
897
Steve Blocka7e24c12009-10-30 11:49:00 +0000898 // Commits from space if it is uncommitted.
899 static void EnsureFromSpaceIsCommitted();
900
Leon Clarkee46be812010-01-19 14:06:41 +0000901 // Support for partial snapshots. After calling this we can allocate a
902 // certain number of bytes using only linear allocation (with a
903 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
904 // or causing a GC. It returns true of space was reserved or false if a GC is
905 // needed. For paged spaces the space requested must include the space wasted
906 // at the end of each page when allocating linearly.
907 static void ReserveSpace(
908 int new_space_size,
909 int pointer_space_size,
910 int data_space_size,
911 int code_space_size,
912 int map_space_size,
913 int cell_space_size,
914 int large_object_size);
915
Steve Blocka7e24c12009-10-30 11:49:00 +0000916 //
917 // Support for the API.
918 //
919
920 static bool CreateApiObjects();
921
922 // Attempt to find the number in a small cache. If we finds it, return
923 // the string representation of the number. Otherwise return undefined.
924 static Object* GetNumberStringCache(Object* number);
925
926 // Update the cache with a new number-string pair.
927 static void SetNumberStringCache(Object* number, String* str);
928
Steve Blocka7e24c12009-10-30 11:49:00 +0000929 // Adjusts the amount of registered external memory.
930 // Returns the adjusted value.
931 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
932
Steve Block6ded16b2010-05-10 14:33:55 +0100933 // Allocate uninitialized fixed array.
Steve Blocka7e24c12009-10-30 11:49:00 +0000934 static Object* AllocateRawFixedArray(int length);
Steve Block6ded16b2010-05-10 14:33:55 +0100935 static Object* AllocateRawFixedArray(int length,
936 PretenureFlag pretenure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000937
938 // True if we have reached the allocation limit in the old generation that
939 // should force the next GC (caused normally) to be a full one.
940 static bool OldGenerationPromotionLimitReached() {
941 return (PromotedSpaceSize() + PromotedExternalMemorySize())
942 > old_gen_promotion_limit_;
943 }
944
Leon Clarkee46be812010-01-19 14:06:41 +0000945 static intptr_t OldGenerationSpaceAvailable() {
946 return old_gen_allocation_limit_ -
947 (PromotedSpaceSize() + PromotedExternalMemorySize());
948 }
949
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 // True if we have reached the allocation limit in the old generation that
951 // should artificially cause a GC right now.
952 static bool OldGenerationAllocationLimitReached() {
Leon Clarkee46be812010-01-19 14:06:41 +0000953 return OldGenerationSpaceAvailable() < 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000954 }
955
956 // Can be called when the embedding application is idle.
957 static bool IdleNotification();
958
959 // Declare all the root indices.
960 enum RootListIndex {
961#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
962 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
963#undef ROOT_INDEX_DECLARATION
964
965// Utility type maps
966#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
967 STRUCT_LIST(DECLARE_STRUCT_MAP)
968#undef DECLARE_STRUCT_MAP
969
970#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
971 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
972#undef SYMBOL_DECLARATION
973
974 kSymbolTableRootIndex,
975 kStrongRootListLength = kSymbolTableRootIndex,
976 kRootListLength
977 };
978
Steve Block6ded16b2010-05-10 14:33:55 +0100979 static Object* NumberToString(Object* number,
980 bool check_number_string_cache = true);
Steve Blocka7e24c12009-10-30 11:49:00 +0000981
Steve Block3ce2e202009-11-05 08:53:23 +0000982 static Map* MapForExternalArrayType(ExternalArrayType array_type);
983 static RootListIndex RootIndexForExternalArrayType(
984 ExternalArrayType array_type);
985
Steve Blockd0582a62009-12-15 09:54:21 +0000986 static void RecordStats(HeapStats* stats);
987
Steve Block6ded16b2010-05-10 14:33:55 +0100988 // Copy block of memory from src to dst. Size of block should be aligned
989 // by pointer size.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100990 static inline void CopyBlock(Address dst, Address src, int byte_size);
991
992 static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
993 Address src,
994 int byte_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100995
996 // Optimized version of memmove for blocks with pointer size aligned sizes and
997 // pointer size aligned addresses.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100998 static inline void MoveBlock(Address dst, Address src, int byte_size);
999
1000 static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1001 Address src,
1002 int byte_size);
Steve Block6ded16b2010-05-10 14:33:55 +01001003
1004 // Check new space expansion criteria and expand semispaces if it was hit.
1005 static void CheckNewSpaceExpansionCriteria();
1006
1007 static inline void IncrementYoungSurvivorsCounter(int survived) {
1008 survived_since_last_expansion_ += survived;
1009 }
1010
1011 static void UpdateNewSpaceReferencesInExternalStringTable(
1012 ExternalStringTableUpdaterCallback updater_func);
1013
1014 // Helper function that governs the promotion policy from new space to
1015 // old. If the object's old address lies below the new space's age
1016 // mark or if we've already filled the bottom 1/16th of the to space,
1017 // we try to promote this object.
1018 static inline bool ShouldBePromoted(Address old_address, int object_size);
1019
1020 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1021
Kristian Monsen25f61362010-05-21 11:50:48 +01001022 static void ClearJSFunctionResultCaches();
1023
Leon Clarkef7060e22010-06-03 12:02:55 +01001024 static GCTracer* tracer() { return tracer_; }
1025
Steve Blocka7e24c12009-10-30 11:49:00 +00001026 private:
Steve Block3ce2e202009-11-05 08:53:23 +00001027 static int reserved_semispace_size_;
1028 static int max_semispace_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001029 static int initial_semispace_size_;
Steve Block3ce2e202009-11-05 08:53:23 +00001030 static int max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001031 static size_t code_range_size_;
1032
1033 // For keeping track of how much data has survived
1034 // scavenge since last new space expansion.
1035 static int survived_since_last_expansion_;
1036
1037 static int always_allocate_scope_depth_;
Steve Blockd0582a62009-12-15 09:54:21 +00001038 static int linear_allocation_scope_depth_;
Steve Block6ded16b2010-05-10 14:33:55 +01001039
1040 // For keeping track of context disposals.
1041 static int contexts_disposed_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001042
Steve Blocka7e24c12009-10-30 11:49:00 +00001043#if defined(V8_TARGET_ARCH_X64)
1044 static const int kMaxObjectSizeInNewSpace = 512*KB;
1045#else
1046 static const int kMaxObjectSizeInNewSpace = 256*KB;
1047#endif
1048
1049 static NewSpace new_space_;
1050 static OldSpace* old_pointer_space_;
1051 static OldSpace* old_data_space_;
1052 static OldSpace* code_space_;
1053 static MapSpace* map_space_;
1054 static CellSpace* cell_space_;
1055 static LargeObjectSpace* lo_space_;
1056 static HeapState gc_state_;
1057
1058 // Returns the size of object residing in non new spaces.
1059 static int PromotedSpaceSize();
1060
1061 // Returns the amount of external memory registered since last global gc.
1062 static int PromotedExternalMemorySize();
1063
1064 static int mc_count_; // how many mark-compact collections happened
Leon Clarkef7060e22010-06-03 12:02:55 +01001065 static int ms_count_; // how many mark-sweep collections happened
Steve Blocka7e24c12009-10-30 11:49:00 +00001066 static int gc_count_; // how many gc happened
1067
Steve Block6ded16b2010-05-10 14:33:55 +01001068 // Total length of the strings we failed to flatten since the last GC.
1069 static int unflattened_strings_length_;
1070
Steve Blocka7e24c12009-10-30 11:49:00 +00001071#define ROOT_ACCESSOR(type, name, camel_name) \
1072 static inline void set_##name(type* value) { \
1073 roots_[k##camel_name##RootIndex] = value; \
1074 }
1075 ROOT_LIST(ROOT_ACCESSOR)
1076#undef ROOT_ACCESSOR
1077
1078#ifdef DEBUG
1079 static bool allocation_allowed_;
1080
1081 // If the --gc-interval flag is set to a positive value, this
1082 // variable holds the value indicating the number of allocations
1083 // remain until the next failure and garbage collection.
1084 static int allocation_timeout_;
1085
1086 // Do we expect to be able to handle allocation failure at this
1087 // time?
1088 static bool disallow_allocation_failure_;
1089#endif // DEBUG
1090
1091 // Limit that triggers a global GC on the next (normally caused) GC. This
1092 // is checked when we have already decided to do a GC to help determine
1093 // which collector to invoke.
1094 static int old_gen_promotion_limit_;
1095
1096 // Limit that triggers a global GC as soon as is reasonable. This is
1097 // checked before expanding a paged space in the old generation and on
1098 // every allocation in large object space.
1099 static int old_gen_allocation_limit_;
1100
1101 // Limit on the amount of externally allocated memory allowed
1102 // between global GCs. If reached a global GC is forced.
1103 static int external_allocation_limit_;
1104
1105 // The amount of external memory registered through the API kept alive
1106 // by global handles
1107 static int amount_of_external_allocated_memory_;
1108
1109 // Caches the amount of external memory registered at the last global gc.
1110 static int amount_of_external_allocated_memory_at_last_global_gc_;
1111
1112 // Indicates that an allocation has failed in the old generation since the
1113 // last GC.
1114 static int old_gen_exhausted_;
1115
1116 static Object* roots_[kRootListLength];
1117
1118 struct StringTypeTable {
1119 InstanceType type;
1120 int size;
1121 RootListIndex index;
1122 };
1123
1124 struct ConstantSymbolTable {
1125 const char* contents;
1126 RootListIndex index;
1127 };
1128
1129 struct StructTable {
1130 InstanceType type;
1131 int size;
1132 RootListIndex index;
1133 };
1134
1135 static const StringTypeTable string_type_table[];
1136 static const ConstantSymbolTable constant_symbol_table[];
1137 static const StructTable struct_table[];
1138
1139 // The special hidden symbol which is an empty string, but does not match
1140 // any string when looked up in properties.
1141 static String* hidden_symbol_;
1142
1143 // GC callback function, called before and after mark-compact GC.
1144 // Allocations in the callback function are disallowed.
Steve Block6ded16b2010-05-10 14:33:55 +01001145 struct GCPrologueCallbackPair {
1146 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1147 : callback(callback), gc_type(gc_type) {
1148 }
1149 bool operator==(const GCPrologueCallbackPair& pair) const {
1150 return pair.callback == callback;
1151 }
1152 GCPrologueCallback callback;
1153 GCType gc_type;
1154 };
1155 static List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1156
1157 struct GCEpilogueCallbackPair {
1158 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1159 : callback(callback), gc_type(gc_type) {
1160 }
1161 bool operator==(const GCEpilogueCallbackPair& pair) const {
1162 return pair.callback == callback;
1163 }
1164 GCEpilogueCallback callback;
1165 GCType gc_type;
1166 };
1167 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1168
Steve Blocka7e24c12009-10-30 11:49:00 +00001169 static GCCallback global_gc_prologue_callback_;
1170 static GCCallback global_gc_epilogue_callback_;
1171
1172 // Checks whether a global GC is necessary
1173 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1174
1175 // Performs garbage collection
1176 static void PerformGarbageCollection(AllocationSpace space,
1177 GarbageCollector collector,
1178 GCTracer* tracer);
1179
Steve Blocka7e24c12009-10-30 11:49:00 +00001180 // Allocate an uninitialized object in map space. The behavior is identical
1181 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1182 // have to test the allocation space argument and (b) can reduce code size
1183 // (since both AllocateRaw and AllocateRawMap are inlined).
1184 static inline Object* AllocateRawMap();
1185
1186 // Allocate an uninitialized object in the global property cell space.
1187 static inline Object* AllocateRawCell();
1188
1189 // Initializes a JSObject based on its map.
1190 static void InitializeJSObjectFromMap(JSObject* obj,
1191 FixedArray* properties,
1192 Map* map);
1193
1194 static bool CreateInitialMaps();
1195 static bool CreateInitialObjects();
1196
1197 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1198 // that assigns wrong vtable entries.
1199 static void CreateCEntryStub();
Steve Blocka7e24c12009-10-30 11:49:00 +00001200 static void CreateJSEntryStub();
1201 static void CreateJSConstructEntryStub();
1202 static void CreateRegExpCEntryStub();
1203
1204 static void CreateFixedStubs();
1205
Steve Block6ded16b2010-05-10 14:33:55 +01001206 static Object* CreateOddball(const char* to_string, Object* to_number);
Steve Blocka7e24c12009-10-30 11:49:00 +00001207
1208 // Allocate empty fixed array.
1209 static Object* AllocateEmptyFixedArray();
1210
1211 // Performs a minor collection in new generation.
1212 static void Scavenge();
Steve Block6ded16b2010-05-10 14:33:55 +01001213
1214 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1215 Object** pointer);
1216
Leon Clarkee46be812010-01-19 14:06:41 +00001217 static Address DoScavenge(ObjectVisitor* scavenge_visitor,
1218 Address new_space_front);
Steve Blocka7e24c12009-10-30 11:49:00 +00001219
1220 // Performs a major collection in the whole heap.
1221 static void MarkCompact(GCTracer* tracer);
1222
1223 // Code to be run before and after mark-compact.
1224 static void MarkCompactPrologue(bool is_compacting);
1225 static void MarkCompactEpilogue(bool is_compacting);
1226
Kristian Monsen25f61362010-05-21 11:50:48 +01001227 // Completely clear the Instanceof cache (to stop it keeping objects alive
1228 // around a GC).
1229 static void CompletelyClearInstanceofCache() {
1230 set_instanceof_cache_map(the_hole_value());
1231 set_instanceof_cache_function(the_hole_value());
1232 }
1233
Steve Blocka7e24c12009-10-30 11:49:00 +00001234 // Helper function used by CopyObject to copy a source object to an
1235 // allocated target object and update the forwarding pointer in the source
1236 // object. Returns the target object.
Leon Clarkee46be812010-01-19 14:06:41 +00001237 static inline HeapObject* MigrateObject(HeapObject* source,
1238 HeapObject* target,
1239 int size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001240
Steve Blocka7e24c12009-10-30 11:49:00 +00001241#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1242 // Record the copy of an object in the NewSpace's statistics.
1243 static void RecordCopiedObject(HeapObject* obj);
1244
1245 // Record statistics before and after garbage collection.
1246 static void ReportStatisticsBeforeGC();
1247 static void ReportStatisticsAfterGC();
1248#endif
1249
Steve Blocka7e24c12009-10-30 11:49:00 +00001250 // Slow part of scavenge object.
1251 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1252
Steve Blocka7e24c12009-10-30 11:49:00 +00001253 // Initializes a function with a shared part and prototype.
1254 // Returns the function.
1255 // Note: this code was factored out of AllocateFunction such that
1256 // other parts of the VM could use it. Specifically, a function that creates
1257 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1258 // Please note this does not perform a garbage collection.
1259 static inline Object* InitializeFunction(JSFunction* function,
1260 SharedFunctionInfo* shared,
1261 Object* prototype);
1262
Leon Clarkef7060e22010-06-03 12:02:55 +01001263 static GCTracer* tracer_;
1264
Leon Clarkee46be812010-01-19 14:06:41 +00001265
1266 // Initializes the number to string cache based on the max semispace size.
1267 static Object* InitializeNumberStringCache();
1268 // Flush the number to string cache.
1269 static void FlushNumberStringCache();
1270
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001271 // Flush code from functions we do not expect to use again. The code will
1272 // be replaced with a lazy compilable version.
1273 static void FlushCode();
1274
Steve Blocka7e24c12009-10-30 11:49:00 +00001275 static const int kInitialSymbolTableSize = 2048;
1276 static const int kInitialEvalCacheSize = 64;
1277
1278 friend class Factory;
1279 friend class DisallowAllocationFailure;
1280 friend class AlwaysAllocateScope;
Steve Blockd0582a62009-12-15 09:54:21 +00001281 friend class LinearAllocationScope;
1282};
1283
1284
1285class HeapStats {
1286 public:
Steve Block6ded16b2010-05-10 14:33:55 +01001287 int* start_marker;
1288 int* new_space_size;
1289 int* new_space_capacity;
1290 int* old_pointer_space_size;
1291 int* old_pointer_space_capacity;
1292 int* old_data_space_size;
1293 int* old_data_space_capacity;
1294 int* code_space_size;
1295 int* code_space_capacity;
1296 int* map_space_size;
1297 int* map_space_capacity;
1298 int* cell_space_size;
1299 int* cell_space_capacity;
1300 int* lo_space_size;
1301 int* global_handle_count;
1302 int* weak_global_handle_count;
1303 int* pending_global_handle_count;
1304 int* near_death_global_handle_count;
1305 int* destroyed_global_handle_count;
1306 int* end_marker;
Steve Blocka7e24c12009-10-30 11:49:00 +00001307};
1308
1309
1310class AlwaysAllocateScope {
1311 public:
1312 AlwaysAllocateScope() {
1313 // We shouldn't hit any nested scopes, because that requires
1314 // non-handle code to call handle code. The code still works but
1315 // performance will degrade, so we want to catch this situation
1316 // in debug mode.
1317 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1318 Heap::always_allocate_scope_depth_++;
1319 }
1320
1321 ~AlwaysAllocateScope() {
1322 Heap::always_allocate_scope_depth_--;
1323 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1324 }
1325};
1326
1327
Steve Blockd0582a62009-12-15 09:54:21 +00001328class LinearAllocationScope {
1329 public:
1330 LinearAllocationScope() {
1331 Heap::linear_allocation_scope_depth_++;
1332 }
1333
1334 ~LinearAllocationScope() {
1335 Heap::linear_allocation_scope_depth_--;
1336 ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
1337 }
1338};
1339
1340
Steve Blocka7e24c12009-10-30 11:49:00 +00001341#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001342// Visitor class to verify interior pointers in spaces that do not contain
1343// or care about intergenerational references. All heap object pointers have to
1344// point into the heap to a location that has a map pointer at its first word.
1345// Caveat: Heap::Contains is an approximation because it can return true for
1346// objects in a heap space but above the allocation pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +00001347class VerifyPointersVisitor: public ObjectVisitor {
1348 public:
1349 void VisitPointers(Object** start, Object** end) {
1350 for (Object** current = start; current < end; current++) {
1351 if ((*current)->IsHeapObject()) {
1352 HeapObject* object = HeapObject::cast(*current);
1353 ASSERT(Heap::Contains(object));
1354 ASSERT(object->map()->IsMap());
1355 }
1356 }
1357 }
1358};
1359
1360
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001361// Visitor class to verify interior pointers in spaces that use region marks
1362// to keep track of intergenerational references.
1363// As VerifyPointersVisitor but also checks that dirty marks are set
1364// for regions covering intergenerational references.
1365class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor {
Steve Blocka7e24c12009-10-30 11:49:00 +00001366 public:
1367 void VisitPointers(Object** start, Object** end) {
1368 for (Object** current = start; current < end; current++) {
1369 if ((*current)->IsHeapObject()) {
1370 HeapObject* object = HeapObject::cast(*current);
1371 ASSERT(Heap::Contains(object));
1372 ASSERT(object->map()->IsMap());
1373 if (Heap::InNewSpace(object)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001374 ASSERT(Heap::InToSpace(object));
1375 Address addr = reinterpret_cast<Address>(current);
1376 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr));
Steve Blocka7e24c12009-10-30 11:49:00 +00001377 }
1378 }
1379 }
1380 }
1381};
1382#endif
1383
1384
1385// Space iterator for iterating over all spaces of the heap.
1386// Returns each space in turn, and null when it is done.
1387class AllSpaces BASE_EMBEDDED {
1388 public:
1389 Space* next();
1390 AllSpaces() { counter_ = FIRST_SPACE; }
1391 private:
1392 int counter_;
1393};
1394
1395
1396// Space iterator for iterating over all old spaces of the heap: Old pointer
1397// space, old data space and code space.
1398// Returns each space in turn, and null when it is done.
1399class OldSpaces BASE_EMBEDDED {
1400 public:
1401 OldSpace* next();
1402 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1403 private:
1404 int counter_;
1405};
1406
1407
1408// Space iterator for iterating over all the paged spaces of the heap:
Leon Clarkee46be812010-01-19 14:06:41 +00001409// Map space, old pointer space, old data space, code space and cell space.
Steve Blocka7e24c12009-10-30 11:49:00 +00001410// Returns each space in turn, and null when it is done.
1411class PagedSpaces BASE_EMBEDDED {
1412 public:
1413 PagedSpace* next();
1414 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1415 private:
1416 int counter_;
1417};
1418
1419
1420// Space iterator for iterating over all spaces of the heap.
1421// For each space an object iterator is provided. The deallocation of the
1422// returned object iterators is handled by the space iterator.
1423class SpaceIterator : public Malloced {
1424 public:
1425 SpaceIterator();
1426 virtual ~SpaceIterator();
1427
1428 bool has_next();
1429 ObjectIterator* next();
1430
1431 private:
1432 ObjectIterator* CreateIterator();
1433
1434 int current_space_; // from enum AllocationSpace.
1435 ObjectIterator* iterator_; // object iterator for the current space.
1436};
1437
1438
1439// A HeapIterator provides iteration over the whole heap It aggregates a the
1440// specific iterators for the different spaces as these can only iterate over
1441// one space only.
1442
1443class HeapIterator BASE_EMBEDDED {
1444 public:
1445 explicit HeapIterator();
1446 virtual ~HeapIterator();
1447
Steve Blocka7e24c12009-10-30 11:49:00 +00001448 HeapObject* next();
1449 void reset();
1450
1451 private:
1452 // Perform the initialization.
1453 void Init();
1454
1455 // Perform all necessary shutdown (destruction) work.
1456 void Shutdown();
1457
1458 // Space iterator for iterating all the spaces.
1459 SpaceIterator* space_iterator_;
1460 // Object iterator for the space currently being iterated.
1461 ObjectIterator* object_iterator_;
1462};
1463
1464
1465// Cache for mapping (map, property name) into field offset.
1466// Cleared at startup and prior to mark sweep collection.
1467class KeyedLookupCache {
1468 public:
1469 // Lookup field offset for (map, name). If absent, -1 is returned.
1470 static int Lookup(Map* map, String* name);
1471
1472 // Update an element in the cache.
1473 static void Update(Map* map, String* name, int field_offset);
1474
1475 // Clear the cache.
1476 static void Clear();
Leon Clarkee46be812010-01-19 14:06:41 +00001477
1478 static const int kLength = 64;
1479 static const int kCapacityMask = kLength - 1;
1480 static const int kMapHashShift = 2;
1481
Steve Blocka7e24c12009-10-30 11:49:00 +00001482 private:
1483 static inline int Hash(Map* map, String* name);
Leon Clarkee46be812010-01-19 14:06:41 +00001484
1485 // Get the address of the keys and field_offsets arrays. Used in
1486 // generated code to perform cache lookups.
1487 static Address keys_address() {
1488 return reinterpret_cast<Address>(&keys_);
1489 }
1490
1491 static Address field_offsets_address() {
1492 return reinterpret_cast<Address>(&field_offsets_);
1493 }
1494
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 struct Key {
1496 Map* map;
1497 String* name;
1498 };
1499 static Key keys_[kLength];
1500 static int field_offsets_[kLength];
Steve Blocka7e24c12009-10-30 11:49:00 +00001501
Leon Clarkee46be812010-01-19 14:06:41 +00001502 friend class ExternalReference;
1503};
Steve Blocka7e24c12009-10-30 11:49:00 +00001504
1505
1506// Cache for mapping (array, property name) into descriptor index.
1507// The cache contains both positive and negative results.
1508// Descriptor index equals kNotFound means the property is absent.
1509// Cleared at startup and prior to any gc.
1510class DescriptorLookupCache {
1511 public:
1512 // Lookup descriptor index for (map, name).
1513 // If absent, kAbsent is returned.
1514 static int Lookup(DescriptorArray* array, String* name) {
1515 if (!StringShape(name).IsSymbol()) return kAbsent;
1516 int index = Hash(array, name);
1517 Key& key = keys_[index];
1518 if ((key.array == array) && (key.name == name)) return results_[index];
1519 return kAbsent;
1520 }
1521
1522 // Update an element in the cache.
1523 static void Update(DescriptorArray* array, String* name, int result) {
1524 ASSERT(result != kAbsent);
1525 if (StringShape(name).IsSymbol()) {
1526 int index = Hash(array, name);
1527 Key& key = keys_[index];
1528 key.array = array;
1529 key.name = name;
1530 results_[index] = result;
1531 }
1532 }
1533
1534 // Clear the cache.
1535 static void Clear();
1536
1537 static const int kAbsent = -2;
1538 private:
1539 static int Hash(DescriptorArray* array, String* name) {
1540 // Uses only lower 32 bits if pointers are larger.
Andrei Popescu402d9372010-02-26 13:31:12 +00001541 uint32_t array_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001542 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
Andrei Popescu402d9372010-02-26 13:31:12 +00001543 uint32_t name_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001544 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1545 return (array_hash ^ name_hash) % kLength;
1546 }
1547
1548 static const int kLength = 64;
1549 struct Key {
1550 DescriptorArray* array;
1551 String* name;
1552 };
1553
1554 static Key keys_[kLength];
1555 static int results_[kLength];
1556};
1557
1558
1559// ----------------------------------------------------------------------------
1560// Marking stack for tracing live objects.
1561
1562class MarkingStack {
1563 public:
1564 void Initialize(Address low, Address high) {
1565 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1566 high_ = reinterpret_cast<HeapObject**>(high);
1567 overflowed_ = false;
1568 }
1569
1570 bool is_full() { return top_ >= high_; }
1571
1572 bool is_empty() { return top_ <= low_; }
1573
1574 bool overflowed() { return overflowed_; }
1575
1576 void clear_overflowed() { overflowed_ = false; }
1577
1578 // Push the (marked) object on the marking stack if there is room,
1579 // otherwise mark the object as overflowed and wait for a rescan of the
1580 // heap.
1581 void Push(HeapObject* object) {
1582 CHECK(object->IsHeapObject());
1583 if (is_full()) {
1584 object->SetOverflow();
1585 overflowed_ = true;
1586 } else {
1587 *(top_++) = object;
1588 }
1589 }
1590
1591 HeapObject* Pop() {
1592 ASSERT(!is_empty());
1593 HeapObject* object = *(--top_);
1594 CHECK(object->IsHeapObject());
1595 return object;
1596 }
1597
1598 private:
1599 HeapObject** low_;
1600 HeapObject** top_;
1601 HeapObject** high_;
1602 bool overflowed_;
1603};
1604
1605
1606// A helper class to document/test C++ scopes where we do not
1607// expect a GC. Usage:
1608//
1609// /* Allocation not allowed: we cannot handle a GC in this scope. */
1610// { AssertNoAllocation nogc;
1611// ...
1612// }
1613
1614#ifdef DEBUG
1615
1616class DisallowAllocationFailure {
1617 public:
1618 DisallowAllocationFailure() {
1619 old_state_ = Heap::disallow_allocation_failure_;
1620 Heap::disallow_allocation_failure_ = true;
1621 }
1622 ~DisallowAllocationFailure() {
1623 Heap::disallow_allocation_failure_ = old_state_;
1624 }
1625 private:
1626 bool old_state_;
1627};
1628
1629class AssertNoAllocation {
1630 public:
1631 AssertNoAllocation() {
1632 old_state_ = Heap::allow_allocation(false);
1633 }
1634
1635 ~AssertNoAllocation() {
1636 Heap::allow_allocation(old_state_);
1637 }
1638
1639 private:
1640 bool old_state_;
1641};
1642
1643class DisableAssertNoAllocation {
1644 public:
1645 DisableAssertNoAllocation() {
1646 old_state_ = Heap::allow_allocation(true);
1647 }
1648
1649 ~DisableAssertNoAllocation() {
1650 Heap::allow_allocation(old_state_);
1651 }
1652
1653 private:
1654 bool old_state_;
1655};
1656
1657#else // ndef DEBUG
1658
1659class AssertNoAllocation {
1660 public:
1661 AssertNoAllocation() { }
1662 ~AssertNoAllocation() { }
1663};
1664
1665class DisableAssertNoAllocation {
1666 public:
1667 DisableAssertNoAllocation() { }
1668 ~DisableAssertNoAllocation() { }
1669};
1670
1671#endif
1672
1673// GCTracer collects and prints ONE line after each garbage collector
1674// invocation IFF --trace_gc is used.
1675
1676class GCTracer BASE_EMBEDDED {
1677 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01001678 class Scope BASE_EMBEDDED {
Steve Block6ded16b2010-05-10 14:33:55 +01001679 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01001680 enum ScopeId {
1681 EXTERNAL,
1682 MC_MARK,
1683 MC_SWEEP,
1684 MC_COMPACT,
1685 kNumberOfScopes
1686 };
1687
1688 Scope(GCTracer* tracer, ScopeId scope)
1689 : tracer_(tracer),
1690 scope_(scope) {
Steve Block6ded16b2010-05-10 14:33:55 +01001691 start_time_ = OS::TimeCurrentMillis();
1692 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001693
1694 ~Scope() {
1695 ASSERT((0 <= scope_) && (scope_ < kNumberOfScopes));
1696 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
Steve Block6ded16b2010-05-10 14:33:55 +01001697 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001698
Steve Block6ded16b2010-05-10 14:33:55 +01001699 private:
1700 GCTracer* tracer_;
Leon Clarkef7060e22010-06-03 12:02:55 +01001701 ScopeId scope_;
Steve Block6ded16b2010-05-10 14:33:55 +01001702 double start_time_;
1703 };
1704
1705 GCTracer();
Steve Blocka7e24c12009-10-30 11:49:00 +00001706 ~GCTracer();
1707
1708 // Sets the collector.
1709 void set_collector(GarbageCollector collector) { collector_ = collector; }
1710
1711 // Sets the GC count.
1712 void set_gc_count(int count) { gc_count_ = count; }
1713
1714 // Sets the full GC count.
1715 void set_full_gc_count(int count) { full_gc_count_ = count; }
1716
1717 // Sets the flag that this is a compacting full GC.
1718 void set_is_compacting() { is_compacting_ = true; }
Steve Block6ded16b2010-05-10 14:33:55 +01001719 bool is_compacting() const { return is_compacting_; }
Steve Blocka7e24c12009-10-30 11:49:00 +00001720
1721 // Increment and decrement the count of marked objects.
1722 void increment_marked_count() { ++marked_count_; }
1723 void decrement_marked_count() { --marked_count_; }
1724
1725 int marked_count() { return marked_count_; }
1726
Leon Clarkef7060e22010-06-03 12:02:55 +01001727 void increment_promoted_objects_size(int object_size) {
1728 promoted_objects_size_ += object_size;
1729 }
1730
1731 // Returns maximum GC pause.
1732 static int get_max_gc_pause() { return max_gc_pause_; }
1733
1734 // Returns maximum size of objects alive after GC.
1735 static int get_max_alive_after_gc() { return max_alive_after_gc_; }
1736
1737 // Returns minimal interval between two subsequent collections.
1738 static int get_min_in_mutator() { return min_in_mutator_; }
1739
Steve Blocka7e24c12009-10-30 11:49:00 +00001740 private:
1741 // Returns a string matching the collector.
1742 const char* CollectorString();
1743
1744 // Returns size of object in heap (in MB).
1745 double SizeOfHeapObjects() {
1746 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1747 }
1748
1749 double start_time_; // Timestamp set in the constructor.
Leon Clarkef7060e22010-06-03 12:02:55 +01001750 int start_size_; // Size of objects in heap set in constructor.
Steve Blocka7e24c12009-10-30 11:49:00 +00001751 GarbageCollector collector_; // Type of collector.
1752
1753 // A count (including this one, eg, the first collection is 1) of the
1754 // number of garbage collections.
1755 int gc_count_;
1756
1757 // A count (including this one) of the number of full garbage collections.
1758 int full_gc_count_;
1759
1760 // True if the current GC is a compacting full collection, false
1761 // otherwise.
1762 bool is_compacting_;
1763
1764 // True if the *previous* full GC cwas a compacting collection (will be
1765 // false if there has not been a previous full GC).
1766 bool previous_has_compacted_;
1767
1768 // On a full GC, a count of the number of marked objects. Incremented
1769 // when an object is marked and decremented when an object's mark bit is
1770 // cleared. Will be zero on a scavenge collection.
1771 int marked_count_;
1772
1773 // The count from the end of the previous full GC. Will be zero if there
1774 // was no previous full GC.
1775 int previous_marked_count_;
Leon Clarkef7060e22010-06-03 12:02:55 +01001776
1777 // Amounts of time spent in different scopes during GC.
1778 double scopes_[Scope::kNumberOfScopes];
1779
1780 // Total amount of space either wasted or contained in one of free lists
1781 // before the current GC.
1782 int in_free_list_or_wasted_before_gc_;
1783
1784 // Difference between space used in the heap at the beginning of the current
1785 // collection and the end of the previous collection.
1786 int allocated_since_last_gc_;
1787
1788 // Amount of time spent in mutator that is time elapsed between end of the
1789 // previous collection and the beginning of the current one.
1790 double spent_in_mutator_;
1791
1792 // Size of objects promoted during the current collection.
1793 int promoted_objects_size_;
1794
1795 // Maximum GC pause.
1796 static int max_gc_pause_;
1797
1798 // Maximum size of objects alive after GC.
1799 static int max_alive_after_gc_;
1800
1801 // Minimal interval between two subsequent collections.
1802 static int min_in_mutator_;
1803
1804 // Size of objects alive after last GC.
1805 static int alive_after_last_gc_;
1806
1807 static double last_gc_end_timestamp_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001808};
1809
1810
1811class TranscendentalCache {
1812 public:
1813 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1814
1815 explicit TranscendentalCache(Type t);
1816
1817 // Returns a heap number with f(input), where f is a math function specified
1818 // by the 'type' argument.
1819 static inline Object* Get(Type type, double input) {
1820 TranscendentalCache* cache = caches_[type];
1821 if (cache == NULL) {
1822 caches_[type] = cache = new TranscendentalCache(type);
1823 }
1824 return cache->Get(input);
1825 }
1826
1827 // The cache contains raw Object pointers. This method disposes of
1828 // them before a garbage collection.
1829 static void Clear();
1830
1831 private:
1832 inline Object* Get(double input) {
1833 Converter c;
1834 c.dbl = input;
1835 int hash = Hash(c);
1836 Element e = elements_[hash];
1837 if (e.in[0] == c.integers[0] &&
1838 e.in[1] == c.integers[1]) {
1839 ASSERT(e.output != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00001840 Counters::transcendental_cache_hit.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001841 return e.output;
1842 }
1843 double answer = Calculate(input);
1844 Object* heap_number = Heap::AllocateHeapNumber(answer);
1845 if (!heap_number->IsFailure()) {
1846 elements_[hash].in[0] = c.integers[0];
1847 elements_[hash].in[1] = c.integers[1];
1848 elements_[hash].output = heap_number;
1849 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001850 Counters::transcendental_cache_miss.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001851 return heap_number;
1852 }
1853
1854 inline double Calculate(double input) {
1855 switch (type_) {
1856 case ACOS:
1857 return acos(input);
1858 case ASIN:
1859 return asin(input);
1860 case ATAN:
1861 return atan(input);
1862 case COS:
1863 return cos(input);
1864 case EXP:
1865 return exp(input);
1866 case LOG:
1867 return log(input);
1868 case SIN:
1869 return sin(input);
1870 case TAN:
1871 return tan(input);
1872 default:
1873 return 0.0; // Never happens.
1874 }
1875 }
1876 static const int kCacheSize = 512;
1877 struct Element {
1878 uint32_t in[2];
1879 Object* output;
1880 };
1881 union Converter {
1882 double dbl;
1883 uint32_t integers[2];
1884 };
1885 inline static int Hash(const Converter& c) {
1886 uint32_t hash = (c.integers[0] ^ c.integers[1]);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001887 hash ^= static_cast<int32_t>(hash) >> 16;
1888 hash ^= static_cast<int32_t>(hash) >> 8;
Steve Blocka7e24c12009-10-30 11:49:00 +00001889 return (hash & (kCacheSize - 1));
1890 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001891
1892 static Address cache_array_address() {
1893 // Used to create an external reference.
1894 return reinterpret_cast<Address>(caches_);
1895 }
1896
1897 // Allow access to the caches_ array as an ExternalReference.
1898 friend class ExternalReference;
1899 // Inline implementation of the caching.
1900 friend class TranscendentalCacheStub;
1901
Steve Blocka7e24c12009-10-30 11:49:00 +00001902 static TranscendentalCache* caches_[kNumberOfCaches];
1903 Element elements_[kCacheSize];
1904 Type type_;
1905};
1906
1907
Leon Clarkee46be812010-01-19 14:06:41 +00001908// External strings table is a place where all external strings are
1909// registered. We need to keep track of such strings to properly
1910// finalize them.
1911class ExternalStringTable : public AllStatic {
1912 public:
1913 // Registers an external string.
1914 inline static void AddString(String* string);
1915
1916 inline static void Iterate(ObjectVisitor* v);
1917
1918 // Restores internal invariant and gets rid of collected strings.
1919 // Must be called after each Iterate() that modified the strings.
1920 static void CleanUp();
1921
1922 // Destroys all allocated memory.
1923 static void TearDown();
1924
1925 private:
1926 friend class Heap;
1927
1928 inline static void Verify();
1929
1930 inline static void AddOldString(String* string);
1931
1932 // Notifies the table that only a prefix of the new list is valid.
1933 inline static void ShrinkNewStrings(int position);
1934
1935 // To speed up scavenge collections new space string are kept
1936 // separate from old space strings.
1937 static List<Object*> new_space_strings_;
1938 static List<Object*> old_space_strings_;
1939};
1940
Steve Blocka7e24c12009-10-30 11:49:00 +00001941} } // namespace v8::internal
1942
1943#endif // V8_HEAP_H_