blob: 902fc77ee6961c2517604a780a83a7a25a146401 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
Steve Block6ded16b2010-05-10 14:33:55 +010033#include "splay-tree-inl.h"
34#include "v8-counters.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Steve Block6ded16b2010-05-10 14:33:55 +010039// Forward declarations.
40class ZoneScopeInfo;
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042// Defines all the roots in Heap.
43#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
Steve Blockd0582a62009-12-15 09:54:21 +000044 /* Put the byte array map early. We need it to be in place by the time */ \
45 /* the deserializer hits the next page, since it wants to put a byte */ \
46 /* array in the unused space at the end of the page. */ \
47 V(Map, byte_array_map, ByteArrayMap) \
48 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
49 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
50 /* Cluster the most popular ones in a few cache lines here at the top. */ \
Steve Blocka7e24c12009-10-30 11:49:00 +000051 V(Smi, stack_limit, StackLimit) \
52 V(Object, undefined_value, UndefinedValue) \
53 V(Object, the_hole_value, TheHoleValue) \
54 V(Object, null_value, NullValue) \
55 V(Object, true_value, TrueValue) \
56 V(Object, false_value, FalseValue) \
57 V(Map, heap_number_map, HeapNumberMap) \
58 V(Map, global_context_map, GlobalContextMap) \
59 V(Map, fixed_array_map, FixedArrayMap) \
60 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
61 V(Map, meta_map, MetaMap) \
62 V(Object, termination_exception, TerminationException) \
63 V(Map, hash_table_map, HashTableMap) \
64 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
Steve Blockd0582a62009-12-15 09:54:21 +000065 V(Map, string_map, StringMap) \
66 V(Map, ascii_string_map, AsciiStringMap) \
67 V(Map, symbol_map, SymbolMap) \
68 V(Map, ascii_symbol_map, AsciiSymbolMap) \
69 V(Map, cons_symbol_map, ConsSymbolMap) \
70 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
71 V(Map, external_symbol_map, ExternalSymbolMap) \
72 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
73 V(Map, cons_string_map, ConsStringMap) \
74 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
75 V(Map, external_string_map, ExternalStringMap) \
76 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
77 V(Map, undetectable_string_map, UndetectableStringMap) \
78 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000079 V(Map, pixel_array_map, PixelArrayMap) \
Steve Block3ce2e202009-11-05 08:53:23 +000080 V(Map, external_byte_array_map, ExternalByteArrayMap) \
81 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
82 V(Map, external_short_array_map, ExternalShortArrayMap) \
83 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
84 V(Map, external_int_array_map, ExternalIntArrayMap) \
85 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
86 V(Map, external_float_array_map, ExternalFloatArrayMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000087 V(Map, context_map, ContextMap) \
88 V(Map, catch_context_map, CatchContextMap) \
89 V(Map, code_map, CodeMap) \
90 V(Map, oddball_map, OddballMap) \
91 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000092 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
93 V(Map, proxy_map, ProxyMap) \
Steve Blocka7e24c12009-10-30 11:49:00 +000094 V(Object, nan_value, NanValue) \
95 V(Object, minus_zero_value, MinusZeroValue) \
96 V(String, empty_string, EmptyString) \
97 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
98 V(Map, neander_map, NeanderMap) \
99 V(JSObject, message_listeners, MessageListeners) \
100 V(Proxy, prototype_accessors, PrototypeAccessors) \
101 V(NumberDictionary, code_stubs, CodeStubs) \
102 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
103 V(Code, js_entry_code, JsEntryCode) \
104 V(Code, js_construct_entry_code, JsConstructEntryCode) \
105 V(Code, c_entry_code, CEntryCode) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 V(FixedArray, number_string_cache, NumberStringCache) \
107 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
108 V(FixedArray, natives_source_cache, NativesSourceCache) \
109 V(Object, last_script_id, LastScriptId) \
Andrei Popescu31002712010-02-23 13:46:05 +0000110 V(Script, empty_script, EmptyScript) \
Steve Blockd0582a62009-12-15 09:54:21 +0000111 V(Smi, real_stack_limit, RealStackLimit) \
Steve Blocka7e24c12009-10-30 11:49:00 +0000112
Steve Block6ded16b2010-05-10 14:33:55 +0100113#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
Steve Blocka7e24c12009-10-30 11:49:00 +0000114#define STRONG_ROOT_LIST(V) \
115 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
116 V(Code, re_c_entry_code, RegExpCEntryCode)
117#else
118#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
119#endif
120
121#define ROOT_LIST(V) \
122 STRONG_ROOT_LIST(V) \
123 V(SymbolTable, symbol_table, SymbolTable)
124
125#define SYMBOL_LIST(V) \
126 V(Array_symbol, "Array") \
127 V(Object_symbol, "Object") \
128 V(Proto_symbol, "__proto__") \
129 V(StringImpl_symbol, "StringImpl") \
130 V(arguments_symbol, "arguments") \
131 V(Arguments_symbol, "Arguments") \
132 V(arguments_shadow_symbol, ".arguments") \
133 V(call_symbol, "call") \
134 V(apply_symbol, "apply") \
135 V(caller_symbol, "caller") \
136 V(boolean_symbol, "boolean") \
137 V(Boolean_symbol, "Boolean") \
138 V(callee_symbol, "callee") \
139 V(constructor_symbol, "constructor") \
140 V(code_symbol, ".code") \
141 V(result_symbol, ".result") \
142 V(catch_var_symbol, ".catch-var") \
143 V(empty_symbol, "") \
144 V(eval_symbol, "eval") \
145 V(function_symbol, "function") \
146 V(length_symbol, "length") \
147 V(name_symbol, "name") \
148 V(number_symbol, "number") \
149 V(Number_symbol, "Number") \
150 V(RegExp_symbol, "RegExp") \
Steve Block6ded16b2010-05-10 14:33:55 +0100151 V(source_symbol, "source") \
152 V(global_symbol, "global") \
153 V(ignore_case_symbol, "ignoreCase") \
154 V(multiline_symbol, "multiline") \
155 V(input_symbol, "input") \
156 V(index_symbol, "index") \
157 V(last_index_symbol, "lastIndex") \
Steve Blocka7e24c12009-10-30 11:49:00 +0000158 V(object_symbol, "object") \
159 V(prototype_symbol, "prototype") \
160 V(string_symbol, "string") \
161 V(String_symbol, "String") \
162 V(Date_symbol, "Date") \
163 V(this_symbol, "this") \
164 V(to_string_symbol, "toString") \
165 V(char_at_symbol, "CharAt") \
166 V(undefined_symbol, "undefined") \
167 V(value_of_symbol, "valueOf") \
168 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
169 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
170 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
171 V(illegal_access_symbol, "illegal access") \
172 V(out_of_memory_symbol, "out-of-memory") \
173 V(illegal_execution_state_symbol, "illegal execution state") \
174 V(get_symbol, "get") \
175 V(set_symbol, "set") \
176 V(function_class_symbol, "Function") \
177 V(illegal_argument_symbol, "illegal argument") \
178 V(MakeReferenceError_symbol, "MakeReferenceError") \
179 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
180 V(MakeTypeError_symbol, "MakeTypeError") \
181 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
182 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
183 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
184 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
185 V(illegal_return_symbol, "illegal_return") \
186 V(illegal_break_symbol, "illegal_break") \
187 V(illegal_continue_symbol, "illegal_continue") \
188 V(unknown_label_symbol, "unknown_label") \
189 V(redeclaration_symbol, "redeclaration") \
190 V(failure_symbol, "<failure>") \
191 V(space_symbol, " ") \
192 V(exec_symbol, "exec") \
193 V(zero_symbol, "0") \
194 V(global_eval_symbol, "GlobalEval") \
Steve Blockd0582a62009-12-15 09:54:21 +0000195 V(identity_hash_symbol, "v8::IdentityHash") \
196 V(closure_symbol, "(closure)")
Steve Blocka7e24c12009-10-30 11:49:00 +0000197
198
199// Forward declaration of the GCTracer class.
200class GCTracer;
Steve Blockd0582a62009-12-15 09:54:21 +0000201class HeapStats;
Steve Blocka7e24c12009-10-30 11:49:00 +0000202
203
Steve Block6ded16b2010-05-10 14:33:55 +0100204typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
205
206
Steve Blocka7e24c12009-10-30 11:49:00 +0000207// The all static Heap captures the interface to the global object heap.
208// All JavaScript contexts by this process share the same object heap.
209
210class Heap : public AllStatic {
211 public:
212 // Configure heap size before setup. Return false if the heap has been
213 // setup already.
Steve Block3ce2e202009-11-05 08:53:23 +0000214 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000215 static bool ConfigureHeapDefault();
216
217 // Initializes the global object heap. If create_heap_objects is true,
218 // also creates the basic non-mutable objects.
219 // Returns whether it succeeded.
220 static bool Setup(bool create_heap_objects);
221
222 // Destroys all memory allocated by the heap.
223 static void TearDown();
224
Steve Blockd0582a62009-12-15 09:54:21 +0000225 // Set the stack limit in the roots_ array. Some architectures generate
226 // code that looks here, because it is faster than loading from the static
227 // jslimit_/real_jslimit_ variable in the StackGuard.
228 static void SetStackLimits();
Steve Blocka7e24c12009-10-30 11:49:00 +0000229
230 // Returns whether Setup has been called.
231 static bool HasBeenSetup();
232
Steve Block3ce2e202009-11-05 08:53:23 +0000233 // Returns the maximum amount of memory reserved for the heap. For
234 // the young generation, we reserve 4 times the amount needed for a
235 // semi space. The young generation consists of two semi spaces and
236 // we reserve twice the amount needed for those in order to ensure
237 // that new space can be aligned to its size.
238 static int MaxReserved() {
239 return 4 * reserved_semispace_size_ + max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000240 }
Steve Block3ce2e202009-11-05 08:53:23 +0000241 static int MaxSemiSpaceSize() { return max_semispace_size_; }
242 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000243 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
Steve Block3ce2e202009-11-05 08:53:23 +0000244 static int MaxOldGenerationSize() { return max_old_generation_size_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000245
246 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
247 // more spaces are needed until it reaches the limit.
248 static int Capacity();
249
Steve Block3ce2e202009-11-05 08:53:23 +0000250 // Returns the amount of memory currently committed for the heap.
251 static int CommittedMemory();
252
Steve Blocka7e24c12009-10-30 11:49:00 +0000253 // Returns the available bytes in space w/o growing.
254 // Heap doesn't guarantee that it can allocate an object that requires
255 // all available bytes. Check MaxHeapObjectSize() instead.
256 static int Available();
257
258 // Returns the maximum object size in paged space.
259 static inline int MaxObjectSizeInPagedSpace();
260
261 // Returns of size of all objects residing in the heap.
262 static int SizeOfObjects();
263
264 // Return the starting address and a mask for the new space. And-masking an
265 // address with the mask will result in the start address of the new space
266 // for all addresses in either semispace.
267 static Address NewSpaceStart() { return new_space_.start(); }
268 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
269 static Address NewSpaceTop() { return new_space_.top(); }
270
271 static NewSpace* new_space() { return &new_space_; }
272 static OldSpace* old_pointer_space() { return old_pointer_space_; }
273 static OldSpace* old_data_space() { return old_data_space_; }
274 static OldSpace* code_space() { return code_space_; }
275 static MapSpace* map_space() { return map_space_; }
276 static CellSpace* cell_space() { return cell_space_; }
277 static LargeObjectSpace* lo_space() { return lo_space_; }
278
279 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
280 static Address always_allocate_scope_depth_address() {
281 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
282 }
Steve Blockd0582a62009-12-15 09:54:21 +0000283 static bool linear_allocation() {
Leon Clarkee46be812010-01-19 14:06:41 +0000284 return linear_allocation_scope_depth_ != 0;
Steve Blockd0582a62009-12-15 09:54:21 +0000285 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000286
287 static Address* NewSpaceAllocationTopAddress() {
288 return new_space_.allocation_top_address();
289 }
290 static Address* NewSpaceAllocationLimitAddress() {
291 return new_space_.allocation_limit_address();
292 }
293
294 // Uncommit unused semi space.
295 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
296
297#ifdef ENABLE_HEAP_PROTECTION
298 // Protect/unprotect the heap by marking all spaces read-only/writable.
299 static void Protect();
300 static void Unprotect();
301#endif
302
303 // Allocates and initializes a new JavaScript object based on a
304 // constructor.
305 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
306 // failed.
307 // Please note this does not perform a garbage collection.
308 static Object* AllocateJSObject(JSFunction* constructor,
309 PretenureFlag pretenure = NOT_TENURED);
310
311 // Allocates and initializes a new global object based on a constructor.
312 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
313 // failed.
314 // Please note this does not perform a garbage collection.
315 static Object* AllocateGlobalObject(JSFunction* constructor);
316
317 // Returns a deep copy of the JavaScript object.
318 // Properties and elements are copied too.
319 // Returns failure if allocation failed.
320 static Object* CopyJSObject(JSObject* source);
321
322 // Allocates the function prototype.
323 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
324 // failed.
325 // Please note this does not perform a garbage collection.
326 static Object* AllocateFunctionPrototype(JSFunction* function);
327
328 // Reinitialize an JSGlobalProxy based on a constructor. The object
329 // must have the same size as objects allocated using the
330 // constructor. The object is reinitialized and behaves as an
331 // object that has been freshly allocated using the constructor.
332 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
333 JSGlobalProxy* global);
334
335 // Allocates and initializes a new JavaScript object based on a map.
336 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
337 // failed.
338 // Please note this does not perform a garbage collection.
339 static Object* AllocateJSObjectFromMap(Map* map,
340 PretenureFlag pretenure = NOT_TENURED);
341
342 // Allocates a heap object based on the map.
343 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
344 // failed.
345 // Please note this function does not perform a garbage collection.
346 static Object* Allocate(Map* map, AllocationSpace space);
347
348 // Allocates a JS Map in the heap.
349 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
350 // failed.
351 // Please note this function does not perform a garbage collection.
352 static Object* AllocateMap(InstanceType instance_type, int instance_size);
353
354 // Allocates a partial map for bootstrapping.
355 static Object* AllocatePartialMap(InstanceType instance_type,
356 int instance_size);
357
358 // Allocate a map for the specified function
359 static Object* AllocateInitialMap(JSFunction* fun);
360
Steve Block6ded16b2010-05-10 14:33:55 +0100361 // Allocates an empty code cache.
362 static Object* AllocateCodeCache();
363
Steve Blocka7e24c12009-10-30 11:49:00 +0000364 // Allocates and fully initializes a String. There are two String
365 // encodings: ASCII and two byte. One should choose between the three string
366 // allocation functions based on the encoding of the string buffer used to
367 // initialized the string.
368 // - ...FromAscii initializes the string from a buffer that is ASCII
369 // encoded (it does not check that the buffer is ASCII encoded) and the
370 // result will be ASCII encoded.
371 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
372 // encoded. If the characters are all single-byte characters, the
373 // result will be ASCII encoded, otherwise it will converted to two
374 // byte.
375 // - ...FromTwoByte initializes the string from a buffer that is two-byte
376 // encoded. If the characters are all single-byte characters, the
377 // result will be converted to ASCII, otherwise it will be left as
378 // two-byte.
379 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
380 // failed.
381 // Please note this does not perform a garbage collection.
382 static Object* AllocateStringFromAscii(
383 Vector<const char> str,
384 PretenureFlag pretenure = NOT_TENURED);
385 static Object* AllocateStringFromUtf8(
386 Vector<const char> str,
387 PretenureFlag pretenure = NOT_TENURED);
388 static Object* AllocateStringFromTwoByte(
389 Vector<const uc16> str,
390 PretenureFlag pretenure = NOT_TENURED);
391
392 // Allocates a symbol in old space based on the character stream.
393 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
394 // failed.
395 // Please note this function does not perform a garbage collection.
396 static inline Object* AllocateSymbol(Vector<const char> str,
397 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000398 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000399
400 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
401 int chars,
Steve Blockd0582a62009-12-15 09:54:21 +0000402 uint32_t hash_field);
Steve Blocka7e24c12009-10-30 11:49:00 +0000403
404 static Object* AllocateExternalSymbol(Vector<const char> str,
405 int chars);
406
407
408 // Allocates and partially initializes a String. There are two String
409 // encodings: ASCII and two byte. These functions allocate a string of the
410 // given length and set its map and length fields. The characters of the
411 // string are uninitialized.
412 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
413 // failed.
414 // Please note this does not perform a garbage collection.
415 static Object* AllocateRawAsciiString(
416 int length,
417 PretenureFlag pretenure = NOT_TENURED);
418 static Object* AllocateRawTwoByteString(
419 int length,
420 PretenureFlag pretenure = NOT_TENURED);
421
422 // Computes a single character string where the character has code.
423 // A cache is used for ascii codes.
424 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
425 // failed. Please note this does not perform a garbage collection.
426 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
427
428 // Allocate a byte array of the specified length
429 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
430 // failed.
431 // Please note this does not perform a garbage collection.
432 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
433
434 // Allocate a non-tenured byte array of the specified length
435 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
436 // failed.
437 // Please note this does not perform a garbage collection.
438 static Object* AllocateByteArray(int length);
439
440 // Allocate a pixel array of the specified length
441 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
442 // failed.
443 // Please note this does not perform a garbage collection.
444 static Object* AllocatePixelArray(int length,
445 uint8_t* external_pointer,
446 PretenureFlag pretenure);
447
Steve Block3ce2e202009-11-05 08:53:23 +0000448 // Allocates an external array of the specified length and type.
449 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
450 // failed.
451 // Please note this does not perform a garbage collection.
452 static Object* AllocateExternalArray(int length,
453 ExternalArrayType array_type,
454 void* external_pointer,
455 PretenureFlag pretenure);
456
Steve Blocka7e24c12009-10-30 11:49:00 +0000457 // Allocate a tenured JS global property cell.
458 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
459 // failed.
460 // Please note this does not perform a garbage collection.
461 static Object* AllocateJSGlobalPropertyCell(Object* value);
462
463 // Allocates a fixed array initialized with undefined values
464 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
465 // failed.
466 // Please note this does not perform a garbage collection.
467 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
Steve Block6ded16b2010-05-10 14:33:55 +0100468 // Allocates a fixed array initialized with undefined values
Steve Blocka7e24c12009-10-30 11:49:00 +0000469 static Object* AllocateFixedArray(int length);
470
Steve Block6ded16b2010-05-10 14:33:55 +0100471 // Allocates an uninitialized fixed array. It must be filled by the caller.
472 //
473 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
474 // failed.
475 // Please note this does not perform a garbage collection.
476 static Object* AllocateUninitializedFixedArray(int length);
477
Steve Blocka7e24c12009-10-30 11:49:00 +0000478 // Make a copy of src and return it. Returns
479 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
480 static Object* CopyFixedArray(FixedArray* src);
481
482 // Allocates a fixed array initialized with the hole values.
483 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
484 // failed.
485 // Please note this does not perform a garbage collection.
Steve Block6ded16b2010-05-10 14:33:55 +0100486 static Object* AllocateFixedArrayWithHoles(
487 int length,
488 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000489
490 // AllocateHashTable is identical to AllocateFixedArray except
491 // that the resulting object has hash_table_map as map.
Steve Block6ded16b2010-05-10 14:33:55 +0100492 static Object* AllocateHashTable(int length,
493 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000494
495 // Allocate a global (but otherwise uninitialized) context.
496 static Object* AllocateGlobalContext();
497
498 // Allocate a function context.
499 static Object* AllocateFunctionContext(int length, JSFunction* closure);
500
501 // Allocate a 'with' context.
502 static Object* AllocateWithContext(Context* previous,
503 JSObject* extension,
504 bool is_catch_context);
505
506 // Allocates a new utility object in the old generation.
507 static Object* AllocateStruct(InstanceType type);
508
509 // Allocates a function initialized with a shared part.
510 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
511 // failed.
512 // Please note this does not perform a garbage collection.
513 static Object* AllocateFunction(Map* function_map,
514 SharedFunctionInfo* shared,
Leon Clarkee46be812010-01-19 14:06:41 +0000515 Object* prototype,
516 PretenureFlag pretenure = TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000517
518 // Indicies for direct access into argument objects.
Leon Clarkee46be812010-01-19 14:06:41 +0000519 static const int kArgumentsObjectSize =
520 JSObject::kHeaderSize + 2 * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 static const int arguments_callee_index = 0;
522 static const int arguments_length_index = 1;
523
524 // Allocates an arguments object - optionally with an elements array.
525 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
526 // failed.
527 // Please note this does not perform a garbage collection.
528 static Object* AllocateArgumentsObject(Object* callee, int length);
529
Steve Blocka7e24c12009-10-30 11:49:00 +0000530 // Same as NewNumberFromDouble, but may return a preallocated/immutable
531 // number object (e.g., minus_zero_value_, nan_value_)
532 static Object* NumberFromDouble(double value,
533 PretenureFlag pretenure = NOT_TENURED);
534
535 // Allocated a HeapNumber from value.
536 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
537 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
538
539 // Converts an int into either a Smi or a HeapNumber object.
540 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
541 // failed.
542 // Please note this does not perform a garbage collection.
543 static inline Object* NumberFromInt32(int32_t value);
544
545 // Converts an int into either a Smi or a HeapNumber object.
546 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
547 // failed.
548 // Please note this does not perform a garbage collection.
549 static inline Object* NumberFromUint32(uint32_t value);
550
551 // Allocates a new proxy object.
552 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
553 // failed.
554 // Please note this does not perform a garbage collection.
555 static Object* AllocateProxy(Address proxy,
556 PretenureFlag pretenure = NOT_TENURED);
557
558 // Allocates a new SharedFunctionInfo object.
559 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
560 // failed.
561 // Please note this does not perform a garbage collection.
562 static Object* AllocateSharedFunctionInfo(Object* name);
563
564 // Allocates a new cons string object.
565 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
566 // failed.
567 // Please note this does not perform a garbage collection.
568 static Object* AllocateConsString(String* first, String* second);
569
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 // Allocates a new sub string object which is a substring of an underlying
571 // string buffer stretching from the index start (inclusive) to the index
572 // end (exclusive).
573 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
574 // failed.
575 // Please note this does not perform a garbage collection.
576 static Object* AllocateSubString(String* buffer,
577 int start,
Steve Block6ded16b2010-05-10 14:33:55 +0100578 int end,
579 PretenureFlag pretenure = NOT_TENURED);
Steve Blocka7e24c12009-10-30 11:49:00 +0000580
581 // Allocate a new external string object, which is backed by a string
582 // resource that resides outside the V8 heap.
583 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
584 // failed.
585 // Please note this does not perform a garbage collection.
586 static Object* AllocateExternalStringFromAscii(
587 ExternalAsciiString::Resource* resource);
588 static Object* AllocateExternalStringFromTwoByte(
589 ExternalTwoByteString::Resource* resource);
590
Leon Clarkee46be812010-01-19 14:06:41 +0000591 // Finalizes an external string by deleting the associated external
592 // data and clearing the resource pointer.
593 static inline void FinalizeExternalString(String* string);
594
Steve Blocka7e24c12009-10-30 11:49:00 +0000595 // Allocates an uninitialized object. The memory is non-executable if the
596 // hardware and OS allow.
597 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
598 // failed.
599 // Please note this function does not perform a garbage collection.
600 static inline Object* AllocateRaw(int size_in_bytes,
601 AllocationSpace space,
602 AllocationSpace retry_space);
603
604 // Initialize a filler object to keep the ability to iterate over the heap
605 // when shortening objects.
606 static void CreateFillerObjectAt(Address addr, int size);
607
608 // Makes a new native code object
609 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
610 // failed. On success, the pointer to the Code object is stored in the
611 // self_reference. This allows generated code to reference its own Code
612 // object by containing this pointer.
613 // Please note this function does not perform a garbage collection.
614 static Object* CreateCode(const CodeDesc& desc,
615 ZoneScopeInfo* sinfo,
616 Code::Flags flags,
617 Handle<Object> self_reference);
618
619 static Object* CopyCode(Code* code);
Steve Block6ded16b2010-05-10 14:33:55 +0100620
621 // Copy the code and scope info part of the code object, but insert
622 // the provided data as the relocation information.
623 static Object* CopyCode(Code* code, Vector<byte> reloc_info);
624
Steve Blocka7e24c12009-10-30 11:49:00 +0000625 // Finds the symbol for string in the symbol table.
626 // If not found, a new symbol is added to the table and returned.
627 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
628 // failed.
629 // Please note this function does not perform a garbage collection.
630 static Object* LookupSymbol(Vector<const char> str);
631 static Object* LookupAsciiSymbol(const char* str) {
632 return LookupSymbol(CStrVector(str));
633 }
634 static Object* LookupSymbol(String* str);
635 static bool LookupSymbolIfExists(String* str, String** symbol);
Steve Blockd0582a62009-12-15 09:54:21 +0000636 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
Steve Blocka7e24c12009-10-30 11:49:00 +0000637
638 // Compute the matching symbol map for a string if possible.
639 // NULL is returned if string is in new space or not flattened.
640 static Map* SymbolMapForString(String* str);
641
Steve Block6ded16b2010-05-10 14:33:55 +0100642 // Tries to flatten a string before compare operation.
643 //
644 // Returns a failure in case it was decided that flattening was
645 // necessary and failed. Note, if flattening is not necessary the
646 // string might stay non-flat even when not a failure is returned.
647 //
648 // Please note this function does not perform a garbage collection.
649 static inline Object* PrepareForCompare(String* str);
650
Steve Blocka7e24c12009-10-30 11:49:00 +0000651 // Converts the given boolean condition to JavaScript boolean value.
652 static Object* ToBoolean(bool condition) {
653 return condition ? true_value() : false_value();
654 }
655
656 // Code that should be run before and after each GC. Includes some
657 // reporting/verification activities when compiled with DEBUG set.
658 static void GarbageCollectionPrologue();
659 static void GarbageCollectionEpilogue();
660
Steve Blocka7e24c12009-10-30 11:49:00 +0000661 // Performs garbage collection operation.
662 // Returns whether required_space bytes are available after the collection.
663 static bool CollectGarbage(int required_space, AllocationSpace space);
664
665 // Performs a full garbage collection. Force compaction if the
666 // parameter is true.
667 static void CollectAllGarbage(bool force_compaction);
668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669 // Notify the heap that a context has been disposed.
Steve Block6ded16b2010-05-10 14:33:55 +0100670 static int NotifyContextDisposed() { return ++contexts_disposed_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000671
672 // Utility to invoke the scavenger. This is needed in test code to
673 // ensure correct callback for weak global handles.
674 static void PerformScavenge();
675
676#ifdef DEBUG
677 // Utility used with flag gc-greedy.
678 static bool GarbageCollectionGreedyCheck();
679#endif
680
Steve Block6ded16b2010-05-10 14:33:55 +0100681 static void AddGCPrologueCallback(
682 GCEpilogueCallback callback, GCType gc_type_filter);
683 static void RemoveGCPrologueCallback(GCEpilogueCallback callback);
684
685 static void AddGCEpilogueCallback(
686 GCEpilogueCallback callback, GCType gc_type_filter);
687 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
688
Steve Blocka7e24c12009-10-30 11:49:00 +0000689 static void SetGlobalGCPrologueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100690 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000691 global_gc_prologue_callback_ = callback;
692 }
693 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
Steve Block6ded16b2010-05-10 14:33:55 +0100694 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
Steve Blocka7e24c12009-10-30 11:49:00 +0000695 global_gc_epilogue_callback_ = callback;
696 }
697
698 // Heap root getters. We have versions with and without type::cast() here.
699 // You can't use type::cast during GC because the assert fails.
700#define ROOT_ACCESSOR(type, name, camel_name) \
701 static inline type* name() { \
702 return type::cast(roots_[k##camel_name##RootIndex]); \
703 } \
704 static inline type* raw_unchecked_##name() { \
705 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
706 }
707 ROOT_LIST(ROOT_ACCESSOR)
708#undef ROOT_ACCESSOR
709
710// Utility type maps
711#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
712 static inline Map* name##_map() { \
713 return Map::cast(roots_[k##Name##MapRootIndex]); \
714 }
715 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
716#undef STRUCT_MAP_ACCESSOR
717
718#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
719 return String::cast(roots_[k##name##RootIndex]); \
720 }
721 SYMBOL_LIST(SYMBOL_ACCESSOR)
722#undef SYMBOL_ACCESSOR
723
724 // The hidden_symbol is special because it is the empty string, but does
725 // not match the empty string.
726 static String* hidden_symbol() { return hidden_symbol_; }
727
728 // Iterates over all roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000729 static void IterateRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000730 // Iterates over all strong roots in the heap.
Steve Blockd0582a62009-12-15 09:54:21 +0000731 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
Leon Clarked91b9f72010-01-27 17:25:45 +0000732 // Iterates over all the other roots in the heap.
733 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000734
735 // Iterates remembered set of an old space.
736 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
737
738 // Iterates a range of remembered set addresses starting with rset_start
739 // corresponding to the range of allocated pointers
740 // [object_start, object_end).
741 // Returns the number of bits that were set.
742 static int IterateRSetRange(Address object_start,
743 Address object_end,
744 Address rset_start,
745 ObjectSlotCallback copy_object_func);
746
747 // Returns whether the object resides in new space.
748 static inline bool InNewSpace(Object* object);
749 static inline bool InFromSpace(Object* object);
750 static inline bool InToSpace(Object* object);
751
752 // Checks whether an address/object in the heap (including auxiliary
753 // area and unused area).
754 static bool Contains(Address addr);
755 static bool Contains(HeapObject* value);
756
757 // Checks whether an address/object in a space.
Steve Blockd0582a62009-12-15 09:54:21 +0000758 // Currently used by tests, serialization and heap verification only.
Steve Blocka7e24c12009-10-30 11:49:00 +0000759 static bool InSpace(Address addr, AllocationSpace space);
760 static bool InSpace(HeapObject* value, AllocationSpace space);
761
762 // Finds out which space an object should get promoted to based on its type.
763 static inline OldSpace* TargetSpace(HeapObject* object);
764 static inline AllocationSpace TargetSpaceId(InstanceType type);
765
766 // Sets the stub_cache_ (only used when expanding the dictionary).
767 static void public_set_code_stubs(NumberDictionary* value) {
768 roots_[kCodeStubsRootIndex] = value;
769 }
770
771 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
772 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
773 roots_[kNonMonomorphicCacheRootIndex] = value;
774 }
775
Andrei Popescu31002712010-02-23 13:46:05 +0000776 static void public_set_empty_script(Script* script) {
777 roots_[kEmptyScriptRootIndex] = script;
778 }
779
Steve Blocka7e24c12009-10-30 11:49:00 +0000780 // Update the next script id.
781 static inline void SetLastScriptId(Object* last_script_id);
782
783 // Generated code can embed this address to get access to the roots.
784 static Object** roots_address() { return roots_; }
785
786#ifdef DEBUG
787 static void Print();
788 static void PrintHandles();
789
790 // Verify the heap is in its normal state before or after a GC.
791 static void Verify();
792
793 // Report heap statistics.
794 static void ReportHeapStatistics(const char* title);
795 static void ReportCodeStatistics(const char* title);
796
797 // Fill in bogus values in from space
798 static void ZapFromSpace();
799#endif
800
801#if defined(ENABLE_LOGGING_AND_PROFILING)
802 // Print short heap statistics.
803 static void PrintShortHeapStatistics();
804#endif
805
806 // Makes a new symbol object
807 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
808 // failed.
809 // Please note this function does not perform a garbage collection.
810 static Object* CreateSymbol(const char* str, int length, int hash);
811 static Object* CreateSymbol(String* str);
812
813 // Write barrier support for address[offset] = o.
814 static inline void RecordWrite(Address address, int offset);
815
Steve Block6ded16b2010-05-10 14:33:55 +0100816 // Write barrier support for address[start : start + len[ = o.
817 static inline void RecordWrites(Address address, int start, int len);
818
Steve Blocka7e24c12009-10-30 11:49:00 +0000819 // Given an address occupied by a live code object, return that object.
820 static Object* FindCodeObject(Address a);
821
822 // Invoke Shrink on shrinkable spaces.
823 static void Shrink();
824
825 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
826 static inline HeapState gc_state() { return gc_state_; }
827
828#ifdef DEBUG
829 static bool IsAllocationAllowed() { return allocation_allowed_; }
830 static inline bool allow_allocation(bool enable);
831
832 static bool disallow_allocation_failure() {
833 return disallow_allocation_failure_;
834 }
835
Leon Clarkee46be812010-01-19 14:06:41 +0000836 static void TracePathToObject(Object* target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000837 static void TracePathToGlobal();
838#endif
839
840 // Callback function passed to Heap::Iterate etc. Copies an object if
841 // necessary, the object might be promoted to an old space. The caller must
842 // ensure the precondition that the object is (a) a heap object and (b) in
843 // the heap's from space.
844 static void ScavengePointer(HeapObject** p);
845 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
846
847 // Clear a range of remembered set addresses corresponding to the object
848 // area address 'start' with size 'size_in_bytes', eg, when adding blocks
849 // to the free list.
850 static void ClearRSetRange(Address start, int size_in_bytes);
851
852 // Rebuild remembered set in old and map spaces.
853 static void RebuildRSets();
854
Leon Clarkee46be812010-01-19 14:06:41 +0000855 // Update an old object's remembered set
856 static int UpdateRSet(HeapObject* obj);
857
Steve Blocka7e24c12009-10-30 11:49:00 +0000858 // Commits from space if it is uncommitted.
859 static void EnsureFromSpaceIsCommitted();
860
Leon Clarkee46be812010-01-19 14:06:41 +0000861 // Support for partial snapshots. After calling this we can allocate a
862 // certain number of bytes using only linear allocation (with a
863 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
864 // or causing a GC. It returns true of space was reserved or false if a GC is
865 // needed. For paged spaces the space requested must include the space wasted
866 // at the end of each page when allocating linearly.
867 static void ReserveSpace(
868 int new_space_size,
869 int pointer_space_size,
870 int data_space_size,
871 int code_space_size,
872 int map_space_size,
873 int cell_space_size,
874 int large_object_size);
875
Steve Blocka7e24c12009-10-30 11:49:00 +0000876 //
877 // Support for the API.
878 //
879
880 static bool CreateApiObjects();
881
882 // Attempt to find the number in a small cache. If we finds it, return
883 // the string representation of the number. Otherwise return undefined.
884 static Object* GetNumberStringCache(Object* number);
885
886 // Update the cache with a new number-string pair.
887 static void SetNumberStringCache(Object* number, String* str);
888
Steve Blocka7e24c12009-10-30 11:49:00 +0000889 // Adjusts the amount of registered external memory.
890 // Returns the adjusted value.
891 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
892
Steve Block6ded16b2010-05-10 14:33:55 +0100893 // Allocate uninitialized fixed array.
Steve Blocka7e24c12009-10-30 11:49:00 +0000894 static Object* AllocateRawFixedArray(int length);
Steve Block6ded16b2010-05-10 14:33:55 +0100895 static Object* AllocateRawFixedArray(int length,
896 PretenureFlag pretenure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000897
898 // True if we have reached the allocation limit in the old generation that
899 // should force the next GC (caused normally) to be a full one.
900 static bool OldGenerationPromotionLimitReached() {
901 return (PromotedSpaceSize() + PromotedExternalMemorySize())
902 > old_gen_promotion_limit_;
903 }
904
Leon Clarkee46be812010-01-19 14:06:41 +0000905 static intptr_t OldGenerationSpaceAvailable() {
906 return old_gen_allocation_limit_ -
907 (PromotedSpaceSize() + PromotedExternalMemorySize());
908 }
909
Steve Blocka7e24c12009-10-30 11:49:00 +0000910 // True if we have reached the allocation limit in the old generation that
911 // should artificially cause a GC right now.
912 static bool OldGenerationAllocationLimitReached() {
Leon Clarkee46be812010-01-19 14:06:41 +0000913 return OldGenerationSpaceAvailable() < 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000914 }
915
916 // Can be called when the embedding application is idle.
917 static bool IdleNotification();
918
919 // Declare all the root indices.
920 enum RootListIndex {
921#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
922 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
923#undef ROOT_INDEX_DECLARATION
924
925// Utility type maps
926#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
927 STRUCT_LIST(DECLARE_STRUCT_MAP)
928#undef DECLARE_STRUCT_MAP
929
930#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
931 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
932#undef SYMBOL_DECLARATION
933
934 kSymbolTableRootIndex,
935 kStrongRootListLength = kSymbolTableRootIndex,
936 kRootListLength
937 };
938
Steve Block6ded16b2010-05-10 14:33:55 +0100939 static Object* NumberToString(Object* number,
940 bool check_number_string_cache = true);
Steve Blocka7e24c12009-10-30 11:49:00 +0000941
Steve Block3ce2e202009-11-05 08:53:23 +0000942 static Map* MapForExternalArrayType(ExternalArrayType array_type);
943 static RootListIndex RootIndexForExternalArrayType(
944 ExternalArrayType array_type);
945
Steve Blockd0582a62009-12-15 09:54:21 +0000946 static void RecordStats(HeapStats* stats);
947
Steve Block6ded16b2010-05-10 14:33:55 +0100948 // Copy block of memory from src to dst. Size of block should be aligned
949 // by pointer size.
950 static inline void CopyBlock(Object** dst, Object** src, int byte_size);
951
952 // Optimized version of memmove for blocks with pointer size aligned sizes and
953 // pointer size aligned addresses.
954 static inline void MoveBlock(Object** dst, Object** src, int byte_size);
955
956 // Check new space expansion criteria and expand semispaces if it was hit.
957 static void CheckNewSpaceExpansionCriteria();
958
959 static inline void IncrementYoungSurvivorsCounter(int survived) {
960 survived_since_last_expansion_ += survived;
961 }
962
963 static void UpdateNewSpaceReferencesInExternalStringTable(
964 ExternalStringTableUpdaterCallback updater_func);
965
966 // Helper function that governs the promotion policy from new space to
967 // old. If the object's old address lies below the new space's age
968 // mark or if we've already filled the bottom 1/16th of the to space,
969 // we try to promote this object.
970 static inline bool ShouldBePromoted(Address old_address, int object_size);
971
972 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
973
Steve Blocka7e24c12009-10-30 11:49:00 +0000974 private:
Steve Block3ce2e202009-11-05 08:53:23 +0000975 static int reserved_semispace_size_;
976 static int max_semispace_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000977 static int initial_semispace_size_;
Steve Block3ce2e202009-11-05 08:53:23 +0000978 static int max_old_generation_size_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000979 static size_t code_range_size_;
980
981 // For keeping track of how much data has survived
982 // scavenge since last new space expansion.
983 static int survived_since_last_expansion_;
984
985 static int always_allocate_scope_depth_;
Steve Blockd0582a62009-12-15 09:54:21 +0000986 static int linear_allocation_scope_depth_;
Steve Block6ded16b2010-05-10 14:33:55 +0100987
988 // For keeping track of context disposals.
989 static int contexts_disposed_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000990
Steve Blocka7e24c12009-10-30 11:49:00 +0000991#if defined(V8_TARGET_ARCH_X64)
992 static const int kMaxObjectSizeInNewSpace = 512*KB;
993#else
994 static const int kMaxObjectSizeInNewSpace = 256*KB;
995#endif
996
997 static NewSpace new_space_;
998 static OldSpace* old_pointer_space_;
999 static OldSpace* old_data_space_;
1000 static OldSpace* code_space_;
1001 static MapSpace* map_space_;
1002 static CellSpace* cell_space_;
1003 static LargeObjectSpace* lo_space_;
1004 static HeapState gc_state_;
1005
1006 // Returns the size of object residing in non new spaces.
1007 static int PromotedSpaceSize();
1008
1009 // Returns the amount of external memory registered since last global gc.
1010 static int PromotedExternalMemorySize();
1011
1012 static int mc_count_; // how many mark-compact collections happened
1013 static int gc_count_; // how many gc happened
1014
Steve Block6ded16b2010-05-10 14:33:55 +01001015 // Total length of the strings we failed to flatten since the last GC.
1016 static int unflattened_strings_length_;
1017
Steve Blocka7e24c12009-10-30 11:49:00 +00001018#define ROOT_ACCESSOR(type, name, camel_name) \
1019 static inline void set_##name(type* value) { \
1020 roots_[k##camel_name##RootIndex] = value; \
1021 }
1022 ROOT_LIST(ROOT_ACCESSOR)
1023#undef ROOT_ACCESSOR
1024
1025#ifdef DEBUG
1026 static bool allocation_allowed_;
1027
1028 // If the --gc-interval flag is set to a positive value, this
1029 // variable holds the value indicating the number of allocations
1030 // remain until the next failure and garbage collection.
1031 static int allocation_timeout_;
1032
1033 // Do we expect to be able to handle allocation failure at this
1034 // time?
1035 static bool disallow_allocation_failure_;
1036#endif // DEBUG
1037
1038 // Limit that triggers a global GC on the next (normally caused) GC. This
1039 // is checked when we have already decided to do a GC to help determine
1040 // which collector to invoke.
1041 static int old_gen_promotion_limit_;
1042
1043 // Limit that triggers a global GC as soon as is reasonable. This is
1044 // checked before expanding a paged space in the old generation and on
1045 // every allocation in large object space.
1046 static int old_gen_allocation_limit_;
1047
1048 // Limit on the amount of externally allocated memory allowed
1049 // between global GCs. If reached a global GC is forced.
1050 static int external_allocation_limit_;
1051
1052 // The amount of external memory registered through the API kept alive
1053 // by global handles
1054 static int amount_of_external_allocated_memory_;
1055
1056 // Caches the amount of external memory registered at the last global gc.
1057 static int amount_of_external_allocated_memory_at_last_global_gc_;
1058
1059 // Indicates that an allocation has failed in the old generation since the
1060 // last GC.
1061 static int old_gen_exhausted_;
1062
1063 static Object* roots_[kRootListLength];
1064
1065 struct StringTypeTable {
1066 InstanceType type;
1067 int size;
1068 RootListIndex index;
1069 };
1070
1071 struct ConstantSymbolTable {
1072 const char* contents;
1073 RootListIndex index;
1074 };
1075
1076 struct StructTable {
1077 InstanceType type;
1078 int size;
1079 RootListIndex index;
1080 };
1081
1082 static const StringTypeTable string_type_table[];
1083 static const ConstantSymbolTable constant_symbol_table[];
1084 static const StructTable struct_table[];
1085
1086 // The special hidden symbol which is an empty string, but does not match
1087 // any string when looked up in properties.
1088 static String* hidden_symbol_;
1089
1090 // GC callback function, called before and after mark-compact GC.
1091 // Allocations in the callback function are disallowed.
Steve Block6ded16b2010-05-10 14:33:55 +01001092 struct GCPrologueCallbackPair {
1093 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1094 : callback(callback), gc_type(gc_type) {
1095 }
1096 bool operator==(const GCPrologueCallbackPair& pair) const {
1097 return pair.callback == callback;
1098 }
1099 GCPrologueCallback callback;
1100 GCType gc_type;
1101 };
1102 static List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1103
1104 struct GCEpilogueCallbackPair {
1105 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1106 : callback(callback), gc_type(gc_type) {
1107 }
1108 bool operator==(const GCEpilogueCallbackPair& pair) const {
1109 return pair.callback == callback;
1110 }
1111 GCEpilogueCallback callback;
1112 GCType gc_type;
1113 };
1114 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1115
Steve Blocka7e24c12009-10-30 11:49:00 +00001116 static GCCallback global_gc_prologue_callback_;
1117 static GCCallback global_gc_epilogue_callback_;
1118
1119 // Checks whether a global GC is necessary
1120 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1121
1122 // Performs garbage collection
1123 static void PerformGarbageCollection(AllocationSpace space,
1124 GarbageCollector collector,
1125 GCTracer* tracer);
1126
Steve Blocka7e24c12009-10-30 11:49:00 +00001127 // Allocate an uninitialized object in map space. The behavior is identical
1128 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1129 // have to test the allocation space argument and (b) can reduce code size
1130 // (since both AllocateRaw and AllocateRawMap are inlined).
1131 static inline Object* AllocateRawMap();
1132
1133 // Allocate an uninitialized object in the global property cell space.
1134 static inline Object* AllocateRawCell();
1135
1136 // Initializes a JSObject based on its map.
1137 static void InitializeJSObjectFromMap(JSObject* obj,
1138 FixedArray* properties,
1139 Map* map);
1140
1141 static bool CreateInitialMaps();
1142 static bool CreateInitialObjects();
1143
1144 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1145 // that assigns wrong vtable entries.
1146 static void CreateCEntryStub();
Steve Blocka7e24c12009-10-30 11:49:00 +00001147 static void CreateJSEntryStub();
1148 static void CreateJSConstructEntryStub();
1149 static void CreateRegExpCEntryStub();
1150
1151 static void CreateFixedStubs();
1152
Steve Block6ded16b2010-05-10 14:33:55 +01001153 static Object* CreateOddball(const char* to_string, Object* to_number);
Steve Blocka7e24c12009-10-30 11:49:00 +00001154
1155 // Allocate empty fixed array.
1156 static Object* AllocateEmptyFixedArray();
1157
1158 // Performs a minor collection in new generation.
1159 static void Scavenge();
Steve Block6ded16b2010-05-10 14:33:55 +01001160
1161 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1162 Object** pointer);
1163
Leon Clarkee46be812010-01-19 14:06:41 +00001164 static Address DoScavenge(ObjectVisitor* scavenge_visitor,
1165 Address new_space_front);
Steve Blocka7e24c12009-10-30 11:49:00 +00001166
1167 // Performs a major collection in the whole heap.
1168 static void MarkCompact(GCTracer* tracer);
1169
1170 // Code to be run before and after mark-compact.
1171 static void MarkCompactPrologue(bool is_compacting);
1172 static void MarkCompactEpilogue(bool is_compacting);
1173
1174 // Helper function used by CopyObject to copy a source object to an
1175 // allocated target object and update the forwarding pointer in the source
1176 // object. Returns the target object.
Leon Clarkee46be812010-01-19 14:06:41 +00001177 static inline HeapObject* MigrateObject(HeapObject* source,
1178 HeapObject* target,
1179 int size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001180
Steve Block6ded16b2010-05-10 14:33:55 +01001181 static void ClearJSFunctionResultCaches();
1182
Steve Blocka7e24c12009-10-30 11:49:00 +00001183#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1184 // Record the copy of an object in the NewSpace's statistics.
1185 static void RecordCopiedObject(HeapObject* obj);
1186
1187 // Record statistics before and after garbage collection.
1188 static void ReportStatisticsBeforeGC();
1189 static void ReportStatisticsAfterGC();
1190#endif
1191
Steve Blocka7e24c12009-10-30 11:49:00 +00001192 // Rebuild remembered set in an old space.
1193 static void RebuildRSets(PagedSpace* space);
1194
1195 // Rebuild remembered set in the large object space.
1196 static void RebuildRSets(LargeObjectSpace* space);
1197
1198 // Slow part of scavenge object.
1199 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1200
Steve Blocka7e24c12009-10-30 11:49:00 +00001201 // Initializes a function with a shared part and prototype.
1202 // Returns the function.
1203 // Note: this code was factored out of AllocateFunction such that
1204 // other parts of the VM could use it. Specifically, a function that creates
1205 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1206 // Please note this does not perform a garbage collection.
1207 static inline Object* InitializeFunction(JSFunction* function,
1208 SharedFunctionInfo* shared,
1209 Object* prototype);
1210
Leon Clarkee46be812010-01-19 14:06:41 +00001211
1212 // Initializes the number to string cache based on the max semispace size.
1213 static Object* InitializeNumberStringCache();
1214 // Flush the number to string cache.
1215 static void FlushNumberStringCache();
1216
Steve Blocka7e24c12009-10-30 11:49:00 +00001217 static const int kInitialSymbolTableSize = 2048;
1218 static const int kInitialEvalCacheSize = 64;
1219
1220 friend class Factory;
1221 friend class DisallowAllocationFailure;
1222 friend class AlwaysAllocateScope;
Steve Blockd0582a62009-12-15 09:54:21 +00001223 friend class LinearAllocationScope;
1224};
1225
1226
1227class HeapStats {
1228 public:
Steve Block6ded16b2010-05-10 14:33:55 +01001229 int* start_marker;
1230 int* new_space_size;
1231 int* new_space_capacity;
1232 int* old_pointer_space_size;
1233 int* old_pointer_space_capacity;
1234 int* old_data_space_size;
1235 int* old_data_space_capacity;
1236 int* code_space_size;
1237 int* code_space_capacity;
1238 int* map_space_size;
1239 int* map_space_capacity;
1240 int* cell_space_size;
1241 int* cell_space_capacity;
1242 int* lo_space_size;
1243 int* global_handle_count;
1244 int* weak_global_handle_count;
1245 int* pending_global_handle_count;
1246 int* near_death_global_handle_count;
1247 int* destroyed_global_handle_count;
1248 int* end_marker;
Steve Blocka7e24c12009-10-30 11:49:00 +00001249};
1250
1251
1252class AlwaysAllocateScope {
1253 public:
1254 AlwaysAllocateScope() {
1255 // We shouldn't hit any nested scopes, because that requires
1256 // non-handle code to call handle code. The code still works but
1257 // performance will degrade, so we want to catch this situation
1258 // in debug mode.
1259 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1260 Heap::always_allocate_scope_depth_++;
1261 }
1262
1263 ~AlwaysAllocateScope() {
1264 Heap::always_allocate_scope_depth_--;
1265 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1266 }
1267};
1268
1269
Steve Blockd0582a62009-12-15 09:54:21 +00001270class LinearAllocationScope {
1271 public:
1272 LinearAllocationScope() {
1273 Heap::linear_allocation_scope_depth_++;
1274 }
1275
1276 ~LinearAllocationScope() {
1277 Heap::linear_allocation_scope_depth_--;
1278 ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
1279 }
1280};
1281
1282
Steve Blocka7e24c12009-10-30 11:49:00 +00001283#ifdef DEBUG
1284// Visitor class to verify interior pointers that do not have remembered set
1285// bits. All heap object pointers have to point into the heap to a location
1286// that has a map pointer at its first word. Caveat: Heap::Contains is an
1287// approximation because it can return true for objects in a heap space but
1288// above the allocation pointer.
1289class VerifyPointersVisitor: public ObjectVisitor {
1290 public:
1291 void VisitPointers(Object** start, Object** end) {
1292 for (Object** current = start; current < end; current++) {
1293 if ((*current)->IsHeapObject()) {
1294 HeapObject* object = HeapObject::cast(*current);
1295 ASSERT(Heap::Contains(object));
1296 ASSERT(object->map()->IsMap());
1297 }
1298 }
1299 }
1300};
1301
1302
1303// Visitor class to verify interior pointers that have remembered set bits.
1304// As VerifyPointersVisitor but also checks that remembered set bits are
1305// always set for pointers into new space.
1306class VerifyPointersAndRSetVisitor: public ObjectVisitor {
1307 public:
1308 void VisitPointers(Object** start, Object** end) {
1309 for (Object** current = start; current < end; current++) {
1310 if ((*current)->IsHeapObject()) {
1311 HeapObject* object = HeapObject::cast(*current);
1312 ASSERT(Heap::Contains(object));
1313 ASSERT(object->map()->IsMap());
1314 if (Heap::InNewSpace(object)) {
1315 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
1316 }
1317 }
1318 }
1319 }
1320};
1321#endif
1322
1323
1324// Space iterator for iterating over all spaces of the heap.
1325// Returns each space in turn, and null when it is done.
1326class AllSpaces BASE_EMBEDDED {
1327 public:
1328 Space* next();
1329 AllSpaces() { counter_ = FIRST_SPACE; }
1330 private:
1331 int counter_;
1332};
1333
1334
1335// Space iterator for iterating over all old spaces of the heap: Old pointer
1336// space, old data space and code space.
1337// Returns each space in turn, and null when it is done.
1338class OldSpaces BASE_EMBEDDED {
1339 public:
1340 OldSpace* next();
1341 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1342 private:
1343 int counter_;
1344};
1345
1346
1347// Space iterator for iterating over all the paged spaces of the heap:
Leon Clarkee46be812010-01-19 14:06:41 +00001348// Map space, old pointer space, old data space, code space and cell space.
Steve Blocka7e24c12009-10-30 11:49:00 +00001349// Returns each space in turn, and null when it is done.
1350class PagedSpaces BASE_EMBEDDED {
1351 public:
1352 PagedSpace* next();
1353 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1354 private:
1355 int counter_;
1356};
1357
1358
1359// Space iterator for iterating over all spaces of the heap.
1360// For each space an object iterator is provided. The deallocation of the
1361// returned object iterators is handled by the space iterator.
1362class SpaceIterator : public Malloced {
1363 public:
1364 SpaceIterator();
1365 virtual ~SpaceIterator();
1366
1367 bool has_next();
1368 ObjectIterator* next();
1369
1370 private:
1371 ObjectIterator* CreateIterator();
1372
1373 int current_space_; // from enum AllocationSpace.
1374 ObjectIterator* iterator_; // object iterator for the current space.
1375};
1376
1377
1378// A HeapIterator provides iteration over the whole heap It aggregates a the
1379// specific iterators for the different spaces as these can only iterate over
1380// one space only.
1381
1382class HeapIterator BASE_EMBEDDED {
1383 public:
1384 explicit HeapIterator();
1385 virtual ~HeapIterator();
1386
Steve Blocka7e24c12009-10-30 11:49:00 +00001387 HeapObject* next();
1388 void reset();
1389
1390 private:
1391 // Perform the initialization.
1392 void Init();
1393
1394 // Perform all necessary shutdown (destruction) work.
1395 void Shutdown();
1396
1397 // Space iterator for iterating all the spaces.
1398 SpaceIterator* space_iterator_;
1399 // Object iterator for the space currently being iterated.
1400 ObjectIterator* object_iterator_;
1401};
1402
1403
1404// Cache for mapping (map, property name) into field offset.
1405// Cleared at startup and prior to mark sweep collection.
1406class KeyedLookupCache {
1407 public:
1408 // Lookup field offset for (map, name). If absent, -1 is returned.
1409 static int Lookup(Map* map, String* name);
1410
1411 // Update an element in the cache.
1412 static void Update(Map* map, String* name, int field_offset);
1413
1414 // Clear the cache.
1415 static void Clear();
Leon Clarkee46be812010-01-19 14:06:41 +00001416
1417 static const int kLength = 64;
1418 static const int kCapacityMask = kLength - 1;
1419 static const int kMapHashShift = 2;
1420
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 private:
1422 static inline int Hash(Map* map, String* name);
Leon Clarkee46be812010-01-19 14:06:41 +00001423
1424 // Get the address of the keys and field_offsets arrays. Used in
1425 // generated code to perform cache lookups.
1426 static Address keys_address() {
1427 return reinterpret_cast<Address>(&keys_);
1428 }
1429
1430 static Address field_offsets_address() {
1431 return reinterpret_cast<Address>(&field_offsets_);
1432 }
1433
Steve Blocka7e24c12009-10-30 11:49:00 +00001434 struct Key {
1435 Map* map;
1436 String* name;
1437 };
1438 static Key keys_[kLength];
1439 static int field_offsets_[kLength];
Steve Blocka7e24c12009-10-30 11:49:00 +00001440
Leon Clarkee46be812010-01-19 14:06:41 +00001441 friend class ExternalReference;
1442};
Steve Blocka7e24c12009-10-30 11:49:00 +00001443
1444
1445// Cache for mapping (array, property name) into descriptor index.
1446// The cache contains both positive and negative results.
1447// Descriptor index equals kNotFound means the property is absent.
1448// Cleared at startup and prior to any gc.
1449class DescriptorLookupCache {
1450 public:
1451 // Lookup descriptor index for (map, name).
1452 // If absent, kAbsent is returned.
1453 static int Lookup(DescriptorArray* array, String* name) {
1454 if (!StringShape(name).IsSymbol()) return kAbsent;
1455 int index = Hash(array, name);
1456 Key& key = keys_[index];
1457 if ((key.array == array) && (key.name == name)) return results_[index];
1458 return kAbsent;
1459 }
1460
1461 // Update an element in the cache.
1462 static void Update(DescriptorArray* array, String* name, int result) {
1463 ASSERT(result != kAbsent);
1464 if (StringShape(name).IsSymbol()) {
1465 int index = Hash(array, name);
1466 Key& key = keys_[index];
1467 key.array = array;
1468 key.name = name;
1469 results_[index] = result;
1470 }
1471 }
1472
1473 // Clear the cache.
1474 static void Clear();
1475
1476 static const int kAbsent = -2;
1477 private:
1478 static int Hash(DescriptorArray* array, String* name) {
1479 // Uses only lower 32 bits if pointers are larger.
Andrei Popescu402d9372010-02-26 13:31:12 +00001480 uint32_t array_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001481 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
Andrei Popescu402d9372010-02-26 13:31:12 +00001482 uint32_t name_hash =
Steve Blocka7e24c12009-10-30 11:49:00 +00001483 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1484 return (array_hash ^ name_hash) % kLength;
1485 }
1486
1487 static const int kLength = 64;
1488 struct Key {
1489 DescriptorArray* array;
1490 String* name;
1491 };
1492
1493 static Key keys_[kLength];
1494 static int results_[kLength];
1495};
1496
1497
1498// ----------------------------------------------------------------------------
1499// Marking stack for tracing live objects.
1500
1501class MarkingStack {
1502 public:
1503 void Initialize(Address low, Address high) {
1504 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1505 high_ = reinterpret_cast<HeapObject**>(high);
1506 overflowed_ = false;
1507 }
1508
1509 bool is_full() { return top_ >= high_; }
1510
1511 bool is_empty() { return top_ <= low_; }
1512
1513 bool overflowed() { return overflowed_; }
1514
1515 void clear_overflowed() { overflowed_ = false; }
1516
1517 // Push the (marked) object on the marking stack if there is room,
1518 // otherwise mark the object as overflowed and wait for a rescan of the
1519 // heap.
1520 void Push(HeapObject* object) {
1521 CHECK(object->IsHeapObject());
1522 if (is_full()) {
1523 object->SetOverflow();
1524 overflowed_ = true;
1525 } else {
1526 *(top_++) = object;
1527 }
1528 }
1529
1530 HeapObject* Pop() {
1531 ASSERT(!is_empty());
1532 HeapObject* object = *(--top_);
1533 CHECK(object->IsHeapObject());
1534 return object;
1535 }
1536
1537 private:
1538 HeapObject** low_;
1539 HeapObject** top_;
1540 HeapObject** high_;
1541 bool overflowed_;
1542};
1543
1544
1545// A helper class to document/test C++ scopes where we do not
1546// expect a GC. Usage:
1547//
1548// /* Allocation not allowed: we cannot handle a GC in this scope. */
1549// { AssertNoAllocation nogc;
1550// ...
1551// }
1552
1553#ifdef DEBUG
1554
1555class DisallowAllocationFailure {
1556 public:
1557 DisallowAllocationFailure() {
1558 old_state_ = Heap::disallow_allocation_failure_;
1559 Heap::disallow_allocation_failure_ = true;
1560 }
1561 ~DisallowAllocationFailure() {
1562 Heap::disallow_allocation_failure_ = old_state_;
1563 }
1564 private:
1565 bool old_state_;
1566};
1567
1568class AssertNoAllocation {
1569 public:
1570 AssertNoAllocation() {
1571 old_state_ = Heap::allow_allocation(false);
1572 }
1573
1574 ~AssertNoAllocation() {
1575 Heap::allow_allocation(old_state_);
1576 }
1577
1578 private:
1579 bool old_state_;
1580};
1581
1582class DisableAssertNoAllocation {
1583 public:
1584 DisableAssertNoAllocation() {
1585 old_state_ = Heap::allow_allocation(true);
1586 }
1587
1588 ~DisableAssertNoAllocation() {
1589 Heap::allow_allocation(old_state_);
1590 }
1591
1592 private:
1593 bool old_state_;
1594};
1595
1596#else // ndef DEBUG
1597
1598class AssertNoAllocation {
1599 public:
1600 AssertNoAllocation() { }
1601 ~AssertNoAllocation() { }
1602};
1603
1604class DisableAssertNoAllocation {
1605 public:
1606 DisableAssertNoAllocation() { }
1607 ~DisableAssertNoAllocation() { }
1608};
1609
1610#endif
1611
1612// GCTracer collects and prints ONE line after each garbage collector
1613// invocation IFF --trace_gc is used.
1614
1615class GCTracer BASE_EMBEDDED {
1616 public:
Steve Block6ded16b2010-05-10 14:33:55 +01001617 // Time spent while in the external scope counts towards the
1618 // external time in the tracer and will be reported separately.
1619 class ExternalScope BASE_EMBEDDED {
1620 public:
1621 explicit ExternalScope(GCTracer* tracer) : tracer_(tracer) {
1622 start_time_ = OS::TimeCurrentMillis();
1623 }
1624 ~ExternalScope() {
1625 tracer_->external_time_ += OS::TimeCurrentMillis() - start_time_;
1626 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001627
Steve Block6ded16b2010-05-10 14:33:55 +01001628 private:
1629 GCTracer* tracer_;
1630 double start_time_;
1631 };
1632
1633 GCTracer();
Steve Blocka7e24c12009-10-30 11:49:00 +00001634 ~GCTracer();
1635
1636 // Sets the collector.
1637 void set_collector(GarbageCollector collector) { collector_ = collector; }
1638
1639 // Sets the GC count.
1640 void set_gc_count(int count) { gc_count_ = count; }
1641
1642 // Sets the full GC count.
1643 void set_full_gc_count(int count) { full_gc_count_ = count; }
1644
1645 // Sets the flag that this is a compacting full GC.
1646 void set_is_compacting() { is_compacting_ = true; }
Steve Block6ded16b2010-05-10 14:33:55 +01001647 bool is_compacting() const { return is_compacting_; }
Steve Blocka7e24c12009-10-30 11:49:00 +00001648
1649 // Increment and decrement the count of marked objects.
1650 void increment_marked_count() { ++marked_count_; }
1651 void decrement_marked_count() { --marked_count_; }
1652
1653 int marked_count() { return marked_count_; }
1654
1655 private:
1656 // Returns a string matching the collector.
1657 const char* CollectorString();
1658
1659 // Returns size of object in heap (in MB).
1660 double SizeOfHeapObjects() {
1661 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1662 }
1663
1664 double start_time_; // Timestamp set in the constructor.
1665 double start_size_; // Size of objects in heap set in constructor.
1666 GarbageCollector collector_; // Type of collector.
1667
Steve Block6ded16b2010-05-10 14:33:55 +01001668 // Keep track of the amount of time spent in external callbacks.
1669 double external_time_;
1670
Steve Blocka7e24c12009-10-30 11:49:00 +00001671 // A count (including this one, eg, the first collection is 1) of the
1672 // number of garbage collections.
1673 int gc_count_;
1674
1675 // A count (including this one) of the number of full garbage collections.
1676 int full_gc_count_;
1677
1678 // True if the current GC is a compacting full collection, false
1679 // otherwise.
1680 bool is_compacting_;
1681
1682 // True if the *previous* full GC cwas a compacting collection (will be
1683 // false if there has not been a previous full GC).
1684 bool previous_has_compacted_;
1685
1686 // On a full GC, a count of the number of marked objects. Incremented
1687 // when an object is marked and decremented when an object's mark bit is
1688 // cleared. Will be zero on a scavenge collection.
1689 int marked_count_;
1690
1691 // The count from the end of the previous full GC. Will be zero if there
1692 // was no previous full GC.
1693 int previous_marked_count_;
1694};
1695
1696
1697class TranscendentalCache {
1698 public:
1699 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1700
1701 explicit TranscendentalCache(Type t);
1702
1703 // Returns a heap number with f(input), where f is a math function specified
1704 // by the 'type' argument.
1705 static inline Object* Get(Type type, double input) {
1706 TranscendentalCache* cache = caches_[type];
1707 if (cache == NULL) {
1708 caches_[type] = cache = new TranscendentalCache(type);
1709 }
1710 return cache->Get(input);
1711 }
1712
1713 // The cache contains raw Object pointers. This method disposes of
1714 // them before a garbage collection.
1715 static void Clear();
1716
1717 private:
1718 inline Object* Get(double input) {
1719 Converter c;
1720 c.dbl = input;
1721 int hash = Hash(c);
1722 Element e = elements_[hash];
1723 if (e.in[0] == c.integers[0] &&
1724 e.in[1] == c.integers[1]) {
1725 ASSERT(e.output != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00001726 Counters::transcendental_cache_hit.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001727 return e.output;
1728 }
1729 double answer = Calculate(input);
1730 Object* heap_number = Heap::AllocateHeapNumber(answer);
1731 if (!heap_number->IsFailure()) {
1732 elements_[hash].in[0] = c.integers[0];
1733 elements_[hash].in[1] = c.integers[1];
1734 elements_[hash].output = heap_number;
1735 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001736 Counters::transcendental_cache_miss.Increment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001737 return heap_number;
1738 }
1739
1740 inline double Calculate(double input) {
1741 switch (type_) {
1742 case ACOS:
1743 return acos(input);
1744 case ASIN:
1745 return asin(input);
1746 case ATAN:
1747 return atan(input);
1748 case COS:
1749 return cos(input);
1750 case EXP:
1751 return exp(input);
1752 case LOG:
1753 return log(input);
1754 case SIN:
1755 return sin(input);
1756 case TAN:
1757 return tan(input);
1758 default:
1759 return 0.0; // Never happens.
1760 }
1761 }
1762 static const int kCacheSize = 512;
1763 struct Element {
1764 uint32_t in[2];
1765 Object* output;
1766 };
1767 union Converter {
1768 double dbl;
1769 uint32_t integers[2];
1770 };
1771 inline static int Hash(const Converter& c) {
1772 uint32_t hash = (c.integers[0] ^ c.integers[1]);
1773 hash ^= hash >> 16;
1774 hash ^= hash >> 8;
1775 return (hash & (kCacheSize - 1));
1776 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001777
1778 static Address cache_array_address() {
1779 // Used to create an external reference.
1780 return reinterpret_cast<Address>(caches_);
1781 }
1782
1783 // Allow access to the caches_ array as an ExternalReference.
1784 friend class ExternalReference;
1785 // Inline implementation of the caching.
1786 friend class TranscendentalCacheStub;
1787
Steve Blocka7e24c12009-10-30 11:49:00 +00001788 static TranscendentalCache* caches_[kNumberOfCaches];
1789 Element elements_[kCacheSize];
1790 Type type_;
1791};
1792
1793
Leon Clarkee46be812010-01-19 14:06:41 +00001794// External strings table is a place where all external strings are
1795// registered. We need to keep track of such strings to properly
1796// finalize them.
1797class ExternalStringTable : public AllStatic {
1798 public:
1799 // Registers an external string.
1800 inline static void AddString(String* string);
1801
1802 inline static void Iterate(ObjectVisitor* v);
1803
1804 // Restores internal invariant and gets rid of collected strings.
1805 // Must be called after each Iterate() that modified the strings.
1806 static void CleanUp();
1807
1808 // Destroys all allocated memory.
1809 static void TearDown();
1810
1811 private:
1812 friend class Heap;
1813
1814 inline static void Verify();
1815
1816 inline static void AddOldString(String* string);
1817
1818 // Notifies the table that only a prefix of the new list is valid.
1819 inline static void ShrinkNewStrings(int position);
1820
1821 // To speed up scavenge collections new space string are kept
1822 // separate from old space strings.
1823 static List<Object*> new_space_strings_;
1824 static List<Object*> old_space_strings_;
1825};
1826
Steve Blocka7e24c12009-10-30 11:49:00 +00001827} } // namespace v8::internal
1828
1829#endif // V8_HEAP_H_