blob: e878efcf2cf836cf03b7cb4371e4e70d21839998 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_HEAP_H_
29#define V8_HEAP_H_
30
31#include <math.h>
32
33#include "zone-inl.h"
34
35
36namespace v8 {
37namespace internal {
38
39// Defines all the roots in Heap.
40#define UNCONDITIONAL_STRONG_ROOT_LIST(V) \
41 /* Cluster the most popular ones in a few cache lines here at the top. */ \
42 V(Smi, stack_limit, StackLimit) \
43 V(Object, undefined_value, UndefinedValue) \
44 V(Object, the_hole_value, TheHoleValue) \
45 V(Object, null_value, NullValue) \
46 V(Object, true_value, TrueValue) \
47 V(Object, false_value, FalseValue) \
48 V(Map, heap_number_map, HeapNumberMap) \
49 V(Map, global_context_map, GlobalContextMap) \
50 V(Map, fixed_array_map, FixedArrayMap) \
51 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
52 V(Map, meta_map, MetaMap) \
53 V(Object, termination_exception, TerminationException) \
54 V(Map, hash_table_map, HashTableMap) \
55 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
56 V(Map, short_string_map, ShortStringMap) \
57 V(Map, medium_string_map, MediumStringMap) \
58 V(Map, long_string_map, LongStringMap) \
59 V(Map, short_ascii_string_map, ShortAsciiStringMap) \
60 V(Map, medium_ascii_string_map, MediumAsciiStringMap) \
61 V(Map, long_ascii_string_map, LongAsciiStringMap) \
62 V(Map, short_symbol_map, ShortSymbolMap) \
63 V(Map, medium_symbol_map, MediumSymbolMap) \
64 V(Map, long_symbol_map, LongSymbolMap) \
65 V(Map, short_ascii_symbol_map, ShortAsciiSymbolMap) \
66 V(Map, medium_ascii_symbol_map, MediumAsciiSymbolMap) \
67 V(Map, long_ascii_symbol_map, LongAsciiSymbolMap) \
68 V(Map, short_cons_symbol_map, ShortConsSymbolMap) \
69 V(Map, medium_cons_symbol_map, MediumConsSymbolMap) \
70 V(Map, long_cons_symbol_map, LongConsSymbolMap) \
71 V(Map, short_cons_ascii_symbol_map, ShortConsAsciiSymbolMap) \
72 V(Map, medium_cons_ascii_symbol_map, MediumConsAsciiSymbolMap) \
73 V(Map, long_cons_ascii_symbol_map, LongConsAsciiSymbolMap) \
74 V(Map, short_sliced_symbol_map, ShortSlicedSymbolMap) \
75 V(Map, medium_sliced_symbol_map, MediumSlicedSymbolMap) \
76 V(Map, long_sliced_symbol_map, LongSlicedSymbolMap) \
77 V(Map, short_sliced_ascii_symbol_map, ShortSlicedAsciiSymbolMap) \
78 V(Map, medium_sliced_ascii_symbol_map, MediumSlicedAsciiSymbolMap) \
79 V(Map, long_sliced_ascii_symbol_map, LongSlicedAsciiSymbolMap) \
80 V(Map, short_external_symbol_map, ShortExternalSymbolMap) \
81 V(Map, medium_external_symbol_map, MediumExternalSymbolMap) \
82 V(Map, long_external_symbol_map, LongExternalSymbolMap) \
83 V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \
84 V(Map, medium_external_ascii_symbol_map, MediumExternalAsciiSymbolMap) \
85 V(Map, long_external_ascii_symbol_map, LongExternalAsciiSymbolMap) \
86 V(Map, short_cons_string_map, ShortConsStringMap) \
87 V(Map, medium_cons_string_map, MediumConsStringMap) \
88 V(Map, long_cons_string_map, LongConsStringMap) \
89 V(Map, short_cons_ascii_string_map, ShortConsAsciiStringMap) \
90 V(Map, medium_cons_ascii_string_map, MediumConsAsciiStringMap) \
91 V(Map, long_cons_ascii_string_map, LongConsAsciiStringMap) \
92 V(Map, short_sliced_string_map, ShortSlicedStringMap) \
93 V(Map, medium_sliced_string_map, MediumSlicedStringMap) \
94 V(Map, long_sliced_string_map, LongSlicedStringMap) \
95 V(Map, short_sliced_ascii_string_map, ShortSlicedAsciiStringMap) \
96 V(Map, medium_sliced_ascii_string_map, MediumSlicedAsciiStringMap) \
97 V(Map, long_sliced_ascii_string_map, LongSlicedAsciiStringMap) \
98 V(Map, short_external_string_map, ShortExternalStringMap) \
99 V(Map, medium_external_string_map, MediumExternalStringMap) \
100 V(Map, long_external_string_map, LongExternalStringMap) \
101 V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
102 V(Map, medium_external_ascii_string_map, MediumExternalAsciiStringMap) \
103 V(Map, long_external_ascii_string_map, LongExternalAsciiStringMap) \
104 V(Map, undetectable_short_string_map, UndetectableShortStringMap) \
105 V(Map, undetectable_medium_string_map, UndetectableMediumStringMap) \
106 V(Map, undetectable_long_string_map, UndetectableLongStringMap) \
107 V(Map, undetectable_short_ascii_string_map, UndetectableShortAsciiStringMap) \
108 V(Map, \
109 undetectable_medium_ascii_string_map, \
110 UndetectableMediumAsciiStringMap) \
111 V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap) \
112 V(Map, byte_array_map, ByteArrayMap) \
113 V(Map, pixel_array_map, PixelArrayMap) \
114 V(Map, context_map, ContextMap) \
115 V(Map, catch_context_map, CatchContextMap) \
116 V(Map, code_map, CodeMap) \
117 V(Map, oddball_map, OddballMap) \
118 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
119 V(Map, boilerplate_function_map, BoilerplateFunctionMap) \
120 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
121 V(Map, proxy_map, ProxyMap) \
122 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
123 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
124 V(Object, nan_value, NanValue) \
125 V(Object, minus_zero_value, MinusZeroValue) \
126 V(String, empty_string, EmptyString) \
127 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
128 V(Map, neander_map, NeanderMap) \
129 V(JSObject, message_listeners, MessageListeners) \
130 V(Proxy, prototype_accessors, PrototypeAccessors) \
131 V(NumberDictionary, code_stubs, CodeStubs) \
132 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
133 V(Code, js_entry_code, JsEntryCode) \
134 V(Code, js_construct_entry_code, JsConstructEntryCode) \
135 V(Code, c_entry_code, CEntryCode) \
136 V(Code, c_entry_debug_break_code, CEntryDebugBreakCode) \
137 V(FixedArray, number_string_cache, NumberStringCache) \
138 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
139 V(FixedArray, natives_source_cache, NativesSourceCache) \
140 V(Object, last_script_id, LastScriptId) \
141
142#if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
143#define STRONG_ROOT_LIST(V) \
144 UNCONDITIONAL_STRONG_ROOT_LIST(V) \
145 V(Code, re_c_entry_code, RegExpCEntryCode)
146#else
147#define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
148#endif
149
150#define ROOT_LIST(V) \
151 STRONG_ROOT_LIST(V) \
152 V(SymbolTable, symbol_table, SymbolTable)
153
154#define SYMBOL_LIST(V) \
155 V(Array_symbol, "Array") \
156 V(Object_symbol, "Object") \
157 V(Proto_symbol, "__proto__") \
158 V(StringImpl_symbol, "StringImpl") \
159 V(arguments_symbol, "arguments") \
160 V(Arguments_symbol, "Arguments") \
161 V(arguments_shadow_symbol, ".arguments") \
162 V(call_symbol, "call") \
163 V(apply_symbol, "apply") \
164 V(caller_symbol, "caller") \
165 V(boolean_symbol, "boolean") \
166 V(Boolean_symbol, "Boolean") \
167 V(callee_symbol, "callee") \
168 V(constructor_symbol, "constructor") \
169 V(code_symbol, ".code") \
170 V(result_symbol, ".result") \
171 V(catch_var_symbol, ".catch-var") \
172 V(empty_symbol, "") \
173 V(eval_symbol, "eval") \
174 V(function_symbol, "function") \
175 V(length_symbol, "length") \
176 V(name_symbol, "name") \
177 V(number_symbol, "number") \
178 V(Number_symbol, "Number") \
179 V(RegExp_symbol, "RegExp") \
180 V(object_symbol, "object") \
181 V(prototype_symbol, "prototype") \
182 V(string_symbol, "string") \
183 V(String_symbol, "String") \
184 V(Date_symbol, "Date") \
185 V(this_symbol, "this") \
186 V(to_string_symbol, "toString") \
187 V(char_at_symbol, "CharAt") \
188 V(undefined_symbol, "undefined") \
189 V(value_of_symbol, "valueOf") \
190 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
191 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
192 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
193 V(illegal_access_symbol, "illegal access") \
194 V(out_of_memory_symbol, "out-of-memory") \
195 V(illegal_execution_state_symbol, "illegal execution state") \
196 V(get_symbol, "get") \
197 V(set_symbol, "set") \
198 V(function_class_symbol, "Function") \
199 V(illegal_argument_symbol, "illegal argument") \
200 V(MakeReferenceError_symbol, "MakeReferenceError") \
201 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
202 V(MakeTypeError_symbol, "MakeTypeError") \
203 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
204 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
205 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
206 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
207 V(illegal_return_symbol, "illegal_return") \
208 V(illegal_break_symbol, "illegal_break") \
209 V(illegal_continue_symbol, "illegal_continue") \
210 V(unknown_label_symbol, "unknown_label") \
211 V(redeclaration_symbol, "redeclaration") \
212 V(failure_symbol, "<failure>") \
213 V(space_symbol, " ") \
214 V(exec_symbol, "exec") \
215 V(zero_symbol, "0") \
216 V(global_eval_symbol, "GlobalEval") \
217 V(identity_hash_symbol, "v8::IdentityHash")
218
219
220// Forward declaration of the GCTracer class.
221class GCTracer;
222
223
224// The all static Heap captures the interface to the global object heap.
225// All JavaScript contexts by this process share the same object heap.
226
227class Heap : public AllStatic {
228 public:
229 // Configure heap size before setup. Return false if the heap has been
230 // setup already.
231 static bool ConfigureHeap(int semispace_size, int old_gen_size);
232 static bool ConfigureHeapDefault();
233
234 // Initializes the global object heap. If create_heap_objects is true,
235 // also creates the basic non-mutable objects.
236 // Returns whether it succeeded.
237 static bool Setup(bool create_heap_objects);
238
239 // Destroys all memory allocated by the heap.
240 static void TearDown();
241
242 // Sets the stack limit in the roots_ array. Some architectures generate code
243 // that looks here, because it is faster than loading from the static jslimit_
244 // variable.
245 static void SetStackLimit(intptr_t limit);
246
247 // Returns whether Setup has been called.
248 static bool HasBeenSetup();
249
250 // Returns the maximum heap capacity.
251 static int MaxCapacity() {
252 return young_generation_size_ + old_generation_size_;
253 }
254 static int SemiSpaceSize() { return semispace_size_; }
255 static int InitialSemiSpaceSize() { return initial_semispace_size_; }
256 static int YoungGenerationSize() { return young_generation_size_; }
257 static int OldGenerationSize() { return old_generation_size_; }
258
259 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
260 // more spaces are needed until it reaches the limit.
261 static int Capacity();
262
263 // Returns the available bytes in space w/o growing.
264 // Heap doesn't guarantee that it can allocate an object that requires
265 // all available bytes. Check MaxHeapObjectSize() instead.
266 static int Available();
267
268 // Returns the maximum object size in paged space.
269 static inline int MaxObjectSizeInPagedSpace();
270
271 // Returns of size of all objects residing in the heap.
272 static int SizeOfObjects();
273
274 // Return the starting address and a mask for the new space. And-masking an
275 // address with the mask will result in the start address of the new space
276 // for all addresses in either semispace.
277 static Address NewSpaceStart() { return new_space_.start(); }
278 static uintptr_t NewSpaceMask() { return new_space_.mask(); }
279 static Address NewSpaceTop() { return new_space_.top(); }
280
281 static NewSpace* new_space() { return &new_space_; }
282 static OldSpace* old_pointer_space() { return old_pointer_space_; }
283 static OldSpace* old_data_space() { return old_data_space_; }
284 static OldSpace* code_space() { return code_space_; }
285 static MapSpace* map_space() { return map_space_; }
286 static CellSpace* cell_space() { return cell_space_; }
287 static LargeObjectSpace* lo_space() { return lo_space_; }
288
289 static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
290 static Address always_allocate_scope_depth_address() {
291 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
292 }
293
294 static Address* NewSpaceAllocationTopAddress() {
295 return new_space_.allocation_top_address();
296 }
297 static Address* NewSpaceAllocationLimitAddress() {
298 return new_space_.allocation_limit_address();
299 }
300
301 // Uncommit unused semi space.
302 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
303
304#ifdef ENABLE_HEAP_PROTECTION
305 // Protect/unprotect the heap by marking all spaces read-only/writable.
306 static void Protect();
307 static void Unprotect();
308#endif
309
310 // Allocates and initializes a new JavaScript object based on a
311 // constructor.
312 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
313 // failed.
314 // Please note this does not perform a garbage collection.
315 static Object* AllocateJSObject(JSFunction* constructor,
316 PretenureFlag pretenure = NOT_TENURED);
317
318 // Allocates and initializes a new global object based on a constructor.
319 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
320 // failed.
321 // Please note this does not perform a garbage collection.
322 static Object* AllocateGlobalObject(JSFunction* constructor);
323
324 // Returns a deep copy of the JavaScript object.
325 // Properties and elements are copied too.
326 // Returns failure if allocation failed.
327 static Object* CopyJSObject(JSObject* source);
328
329 // Allocates the function prototype.
330 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
331 // failed.
332 // Please note this does not perform a garbage collection.
333 static Object* AllocateFunctionPrototype(JSFunction* function);
334
335 // Reinitialize an JSGlobalProxy based on a constructor. The object
336 // must have the same size as objects allocated using the
337 // constructor. The object is reinitialized and behaves as an
338 // object that has been freshly allocated using the constructor.
339 static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
340 JSGlobalProxy* global);
341
342 // Allocates and initializes a new JavaScript object based on a map.
343 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
344 // failed.
345 // Please note this does not perform a garbage collection.
346 static Object* AllocateJSObjectFromMap(Map* map,
347 PretenureFlag pretenure = NOT_TENURED);
348
349 // Allocates a heap object based on the map.
350 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
351 // failed.
352 // Please note this function does not perform a garbage collection.
353 static Object* Allocate(Map* map, AllocationSpace space);
354
355 // Allocates a JS Map in the heap.
356 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
357 // failed.
358 // Please note this function does not perform a garbage collection.
359 static Object* AllocateMap(InstanceType instance_type, int instance_size);
360
361 // Allocates a partial map for bootstrapping.
362 static Object* AllocatePartialMap(InstanceType instance_type,
363 int instance_size);
364
365 // Allocate a map for the specified function
366 static Object* AllocateInitialMap(JSFunction* fun);
367
368 // Allocates and fully initializes a String. There are two String
369 // encodings: ASCII and two byte. One should choose between the three string
370 // allocation functions based on the encoding of the string buffer used to
371 // initialized the string.
372 // - ...FromAscii initializes the string from a buffer that is ASCII
373 // encoded (it does not check that the buffer is ASCII encoded) and the
374 // result will be ASCII encoded.
375 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
376 // encoded. If the characters are all single-byte characters, the
377 // result will be ASCII encoded, otherwise it will converted to two
378 // byte.
379 // - ...FromTwoByte initializes the string from a buffer that is two-byte
380 // encoded. If the characters are all single-byte characters, the
381 // result will be converted to ASCII, otherwise it will be left as
382 // two-byte.
383 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
384 // failed.
385 // Please note this does not perform a garbage collection.
386 static Object* AllocateStringFromAscii(
387 Vector<const char> str,
388 PretenureFlag pretenure = NOT_TENURED);
389 static Object* AllocateStringFromUtf8(
390 Vector<const char> str,
391 PretenureFlag pretenure = NOT_TENURED);
392 static Object* AllocateStringFromTwoByte(
393 Vector<const uc16> str,
394 PretenureFlag pretenure = NOT_TENURED);
395
396 // Allocates a symbol in old space based on the character stream.
397 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
398 // failed.
399 // Please note this function does not perform a garbage collection.
400 static inline Object* AllocateSymbol(Vector<const char> str,
401 int chars,
402 uint32_t length_field);
403
404 static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
405 int chars,
406 uint32_t length_field);
407
408 static Object* AllocateExternalSymbol(Vector<const char> str,
409 int chars);
410
411
412 // Allocates and partially initializes a String. There are two String
413 // encodings: ASCII and two byte. These functions allocate a string of the
414 // given length and set its map and length fields. The characters of the
415 // string are uninitialized.
416 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
417 // failed.
418 // Please note this does not perform a garbage collection.
419 static Object* AllocateRawAsciiString(
420 int length,
421 PretenureFlag pretenure = NOT_TENURED);
422 static Object* AllocateRawTwoByteString(
423 int length,
424 PretenureFlag pretenure = NOT_TENURED);
425
426 // Computes a single character string where the character has code.
427 // A cache is used for ascii codes.
428 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
429 // failed. Please note this does not perform a garbage collection.
430 static Object* LookupSingleCharacterStringFromCode(uint16_t code);
431
432 // Allocate a byte array of the specified length
433 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
434 // failed.
435 // Please note this does not perform a garbage collection.
436 static Object* AllocateByteArray(int length, PretenureFlag pretenure);
437
438 // Allocate a non-tenured byte array of the specified length
439 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
440 // failed.
441 // Please note this does not perform a garbage collection.
442 static Object* AllocateByteArray(int length);
443
444 // Allocate a pixel array of the specified length
445 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
446 // failed.
447 // Please note this does not perform a garbage collection.
448 static Object* AllocatePixelArray(int length,
449 uint8_t* external_pointer,
450 PretenureFlag pretenure);
451
452 // Allocate a tenured JS global property cell.
453 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
454 // failed.
455 // Please note this does not perform a garbage collection.
456 static Object* AllocateJSGlobalPropertyCell(Object* value);
457
458 // Allocates a fixed array initialized with undefined values
459 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
460 // failed.
461 // Please note this does not perform a garbage collection.
462 static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
463 // Allocate uninitialized, non-tenured fixed array with length elements.
464 static Object* AllocateFixedArray(int length);
465
466 // Make a copy of src and return it. Returns
467 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
468 static Object* CopyFixedArray(FixedArray* src);
469
470 // Allocates a fixed array initialized with the hole values.
471 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
472 // failed.
473 // Please note this does not perform a garbage collection.
474 static Object* AllocateFixedArrayWithHoles(int length);
475
476 // AllocateHashTable is identical to AllocateFixedArray except
477 // that the resulting object has hash_table_map as map.
478 static Object* AllocateHashTable(int length);
479
480 // Allocate a global (but otherwise uninitialized) context.
481 static Object* AllocateGlobalContext();
482
483 // Allocate a function context.
484 static Object* AllocateFunctionContext(int length, JSFunction* closure);
485
486 // Allocate a 'with' context.
487 static Object* AllocateWithContext(Context* previous,
488 JSObject* extension,
489 bool is_catch_context);
490
491 // Allocates a new utility object in the old generation.
492 static Object* AllocateStruct(InstanceType type);
493
494 // Allocates a function initialized with a shared part.
495 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
496 // failed.
497 // Please note this does not perform a garbage collection.
498 static Object* AllocateFunction(Map* function_map,
499 SharedFunctionInfo* shared,
500 Object* prototype);
501
502 // Indicies for direct access into argument objects.
503 static const int arguments_callee_index = 0;
504 static const int arguments_length_index = 1;
505
506 // Allocates an arguments object - optionally with an elements array.
507 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
508 // failed.
509 // Please note this does not perform a garbage collection.
510 static Object* AllocateArgumentsObject(Object* callee, int length);
511
512 // Converts a double into either a Smi or a HeapNumber object.
513 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
514 // failed.
515 // Please note this does not perform a garbage collection.
516 static Object* NewNumberFromDouble(double value,
517 PretenureFlag pretenure = NOT_TENURED);
518
519 // Same as NewNumberFromDouble, but may return a preallocated/immutable
520 // number object (e.g., minus_zero_value_, nan_value_)
521 static Object* NumberFromDouble(double value,
522 PretenureFlag pretenure = NOT_TENURED);
523
524 // Allocated a HeapNumber from value.
525 static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
526 static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
527
528 // Converts an int into either a Smi or a HeapNumber object.
529 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
530 // failed.
531 // Please note this does not perform a garbage collection.
532 static inline Object* NumberFromInt32(int32_t value);
533
534 // Converts an int into either a Smi or a HeapNumber object.
535 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
536 // failed.
537 // Please note this does not perform a garbage collection.
538 static inline Object* NumberFromUint32(uint32_t value);
539
540 // Allocates a new proxy object.
541 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
542 // failed.
543 // Please note this does not perform a garbage collection.
544 static Object* AllocateProxy(Address proxy,
545 PretenureFlag pretenure = NOT_TENURED);
546
547 // Allocates a new SharedFunctionInfo object.
548 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
549 // failed.
550 // Please note this does not perform a garbage collection.
551 static Object* AllocateSharedFunctionInfo(Object* name);
552
553 // Allocates a new cons string object.
554 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
555 // failed.
556 // Please note this does not perform a garbage collection.
557 static Object* AllocateConsString(String* first, String* second);
558
559 // Allocates a new sliced string object which is a slice of an underlying
560 // string buffer stretching from the index start (inclusive) to the index
561 // end (exclusive).
562 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
563 // failed.
564 // Please note this does not perform a garbage collection.
565 static Object* AllocateSlicedString(String* buffer,
566 int start,
567 int end);
568
569 // Allocates a new sub string object which is a substring of an underlying
570 // string buffer stretching from the index start (inclusive) to the index
571 // end (exclusive).
572 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
573 // failed.
574 // Please note this does not perform a garbage collection.
575 static Object* AllocateSubString(String* buffer,
576 int start,
577 int end);
578
579 // Allocate a new external string object, which is backed by a string
580 // resource that resides outside the V8 heap.
581 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
582 // failed.
583 // Please note this does not perform a garbage collection.
584 static Object* AllocateExternalStringFromAscii(
585 ExternalAsciiString::Resource* resource);
586 static Object* AllocateExternalStringFromTwoByte(
587 ExternalTwoByteString::Resource* resource);
588
589 // Allocates an uninitialized object. The memory is non-executable if the
590 // hardware and OS allow.
591 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
592 // failed.
593 // Please note this function does not perform a garbage collection.
594 static inline Object* AllocateRaw(int size_in_bytes,
595 AllocationSpace space,
596 AllocationSpace retry_space);
597
598 // Initialize a filler object to keep the ability to iterate over the heap
599 // when shortening objects.
600 static void CreateFillerObjectAt(Address addr, int size);
601
602 // Makes a new native code object
603 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
604 // failed. On success, the pointer to the Code object is stored in the
605 // self_reference. This allows generated code to reference its own Code
606 // object by containing this pointer.
607 // Please note this function does not perform a garbage collection.
608 static Object* CreateCode(const CodeDesc& desc,
609 ZoneScopeInfo* sinfo,
610 Code::Flags flags,
611 Handle<Object> self_reference);
612
613 static Object* CopyCode(Code* code);
614 // Finds the symbol for string in the symbol table.
615 // If not found, a new symbol is added to the table and returned.
616 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
617 // failed.
618 // Please note this function does not perform a garbage collection.
619 static Object* LookupSymbol(Vector<const char> str);
620 static Object* LookupAsciiSymbol(const char* str) {
621 return LookupSymbol(CStrVector(str));
622 }
623 static Object* LookupSymbol(String* str);
624 static bool LookupSymbolIfExists(String* str, String** symbol);
625
626 // Compute the matching symbol map for a string if possible.
627 // NULL is returned if string is in new space or not flattened.
628 static Map* SymbolMapForString(String* str);
629
630 // Converts the given boolean condition to JavaScript boolean value.
631 static Object* ToBoolean(bool condition) {
632 return condition ? true_value() : false_value();
633 }
634
635 // Code that should be run before and after each GC. Includes some
636 // reporting/verification activities when compiled with DEBUG set.
637 static void GarbageCollectionPrologue();
638 static void GarbageCollectionEpilogue();
639
640 // Code that should be executed after the garbage collection proper.
641 static void PostGarbageCollectionProcessing();
642
643 // Performs garbage collection operation.
644 // Returns whether required_space bytes are available after the collection.
645 static bool CollectGarbage(int required_space, AllocationSpace space);
646
647 // Performs a full garbage collection. Force compaction if the
648 // parameter is true.
649 static void CollectAllGarbage(bool force_compaction);
650
651 // Performs a full garbage collection if a context has been disposed
652 // since the last time the check was performed.
653 static void CollectAllGarbageIfContextDisposed();
654
655 // Notify the heap that a context has been disposed.
656 static void NotifyContextDisposed();
657
658 // Utility to invoke the scavenger. This is needed in test code to
659 // ensure correct callback for weak global handles.
660 static void PerformScavenge();
661
662#ifdef DEBUG
663 // Utility used with flag gc-greedy.
664 static bool GarbageCollectionGreedyCheck();
665#endif
666
667 static void SetGlobalGCPrologueCallback(GCCallback callback) {
668 global_gc_prologue_callback_ = callback;
669 }
670 static void SetGlobalGCEpilogueCallback(GCCallback callback) {
671 global_gc_epilogue_callback_ = callback;
672 }
673
674 // Heap root getters. We have versions with and without type::cast() here.
675 // You can't use type::cast during GC because the assert fails.
676#define ROOT_ACCESSOR(type, name, camel_name) \
677 static inline type* name() { \
678 return type::cast(roots_[k##camel_name##RootIndex]); \
679 } \
680 static inline type* raw_unchecked_##name() { \
681 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
682 }
683 ROOT_LIST(ROOT_ACCESSOR)
684#undef ROOT_ACCESSOR
685
686// Utility type maps
687#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
688 static inline Map* name##_map() { \
689 return Map::cast(roots_[k##Name##MapRootIndex]); \
690 }
691 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
692#undef STRUCT_MAP_ACCESSOR
693
694#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
695 return String::cast(roots_[k##name##RootIndex]); \
696 }
697 SYMBOL_LIST(SYMBOL_ACCESSOR)
698#undef SYMBOL_ACCESSOR
699
700 // The hidden_symbol is special because it is the empty string, but does
701 // not match the empty string.
702 static String* hidden_symbol() { return hidden_symbol_; }
703
704 // Iterates over all roots in the heap.
705 static void IterateRoots(ObjectVisitor* v);
706 // Iterates over all strong roots in the heap.
707 static void IterateStrongRoots(ObjectVisitor* v);
708
709 // Iterates remembered set of an old space.
710 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
711
712 // Iterates a range of remembered set addresses starting with rset_start
713 // corresponding to the range of allocated pointers
714 // [object_start, object_end).
715 // Returns the number of bits that were set.
716 static int IterateRSetRange(Address object_start,
717 Address object_end,
718 Address rset_start,
719 ObjectSlotCallback copy_object_func);
720
721 // Returns whether the object resides in new space.
722 static inline bool InNewSpace(Object* object);
723 static inline bool InFromSpace(Object* object);
724 static inline bool InToSpace(Object* object);
725
726 // Checks whether an address/object in the heap (including auxiliary
727 // area and unused area).
728 static bool Contains(Address addr);
729 static bool Contains(HeapObject* value);
730
731 // Checks whether an address/object in a space.
732 // Currently used by tests and heap verification only.
733 static bool InSpace(Address addr, AllocationSpace space);
734 static bool InSpace(HeapObject* value, AllocationSpace space);
735
736 // Finds out which space an object should get promoted to based on its type.
737 static inline OldSpace* TargetSpace(HeapObject* object);
738 static inline AllocationSpace TargetSpaceId(InstanceType type);
739
740 // Sets the stub_cache_ (only used when expanding the dictionary).
741 static void public_set_code_stubs(NumberDictionary* value) {
742 roots_[kCodeStubsRootIndex] = value;
743 }
744
745 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
746 static void public_set_non_monomorphic_cache(NumberDictionary* value) {
747 roots_[kNonMonomorphicCacheRootIndex] = value;
748 }
749
750 // Update the next script id.
751 static inline void SetLastScriptId(Object* last_script_id);
752
753 // Generated code can embed this address to get access to the roots.
754 static Object** roots_address() { return roots_; }
755
756#ifdef DEBUG
757 static void Print();
758 static void PrintHandles();
759
760 // Verify the heap is in its normal state before or after a GC.
761 static void Verify();
762
763 // Report heap statistics.
764 static void ReportHeapStatistics(const char* title);
765 static void ReportCodeStatistics(const char* title);
766
767 // Fill in bogus values in from space
768 static void ZapFromSpace();
769#endif
770
771#if defined(ENABLE_LOGGING_AND_PROFILING)
772 // Print short heap statistics.
773 static void PrintShortHeapStatistics();
774#endif
775
776 // Makes a new symbol object
777 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
778 // failed.
779 // Please note this function does not perform a garbage collection.
780 static Object* CreateSymbol(const char* str, int length, int hash);
781 static Object* CreateSymbol(String* str);
782
783 // Write barrier support for address[offset] = o.
784 static inline void RecordWrite(Address address, int offset);
785
786 // Given an address occupied by a live code object, return that object.
787 static Object* FindCodeObject(Address a);
788
789 // Invoke Shrink on shrinkable spaces.
790 static void Shrink();
791
792 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
793 static inline HeapState gc_state() { return gc_state_; }
794
795#ifdef DEBUG
796 static bool IsAllocationAllowed() { return allocation_allowed_; }
797 static inline bool allow_allocation(bool enable);
798
799 static bool disallow_allocation_failure() {
800 return disallow_allocation_failure_;
801 }
802
803 static void TracePathToObject();
804 static void TracePathToGlobal();
805#endif
806
807 // Callback function passed to Heap::Iterate etc. Copies an object if
808 // necessary, the object might be promoted to an old space. The caller must
809 // ensure the precondition that the object is (a) a heap object and (b) in
810 // the heap's from space.
811 static void ScavengePointer(HeapObject** p);
812 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
813
814 // Clear a range of remembered set addresses corresponding to the object
815 // area address 'start' with size 'size_in_bytes', eg, when adding blocks
816 // to the free list.
817 static void ClearRSetRange(Address start, int size_in_bytes);
818
819 // Rebuild remembered set in old and map spaces.
820 static void RebuildRSets();
821
822 // Commits from space if it is uncommitted.
823 static void EnsureFromSpaceIsCommitted();
824
825 //
826 // Support for the API.
827 //
828
829 static bool CreateApiObjects();
830
831 // Attempt to find the number in a small cache. If we finds it, return
832 // the string representation of the number. Otherwise return undefined.
833 static Object* GetNumberStringCache(Object* number);
834
835 // Update the cache with a new number-string pair.
836 static void SetNumberStringCache(Object* number, String* str);
837
838 // Entries in the cache. Must be a power of 2.
839 static const int kNumberStringCacheSize = 64;
840
841 // Adjusts the amount of registered external memory.
842 // Returns the adjusted value.
843 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
844
845 // Allocate unitialized fixed array (pretenure == NON_TENURE).
846 static Object* AllocateRawFixedArray(int length);
847
848 // True if we have reached the allocation limit in the old generation that
849 // should force the next GC (caused normally) to be a full one.
850 static bool OldGenerationPromotionLimitReached() {
851 return (PromotedSpaceSize() + PromotedExternalMemorySize())
852 > old_gen_promotion_limit_;
853 }
854
855 // True if we have reached the allocation limit in the old generation that
856 // should artificially cause a GC right now.
857 static bool OldGenerationAllocationLimitReached() {
858 return (PromotedSpaceSize() + PromotedExternalMemorySize())
859 > old_gen_allocation_limit_;
860 }
861
862 // Can be called when the embedding application is idle.
863 static bool IdleNotification();
864
865 // Declare all the root indices.
866 enum RootListIndex {
867#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
868 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
869#undef ROOT_INDEX_DECLARATION
870
871// Utility type maps
872#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
873 STRUCT_LIST(DECLARE_STRUCT_MAP)
874#undef DECLARE_STRUCT_MAP
875
876#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
877 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
878#undef SYMBOL_DECLARATION
879
880 kSymbolTableRootIndex,
881 kStrongRootListLength = kSymbolTableRootIndex,
882 kRootListLength
883 };
884
885 static Object* NumberToString(Object* number);
886
887 private:
888 static int semispace_size_;
889 static int initial_semispace_size_;
890 static int young_generation_size_;
891 static int old_generation_size_;
892 static size_t code_range_size_;
893
894 // For keeping track of how much data has survived
895 // scavenge since last new space expansion.
896 static int survived_since_last_expansion_;
897
898 static int always_allocate_scope_depth_;
899 static bool context_disposed_pending_;
900
901 static const int kMaxMapSpaceSize = 8*MB;
902
903#if defined(V8_TARGET_ARCH_X64)
904 static const int kMaxObjectSizeInNewSpace = 512*KB;
905#else
906 static const int kMaxObjectSizeInNewSpace = 256*KB;
907#endif
908
909 static NewSpace new_space_;
910 static OldSpace* old_pointer_space_;
911 static OldSpace* old_data_space_;
912 static OldSpace* code_space_;
913 static MapSpace* map_space_;
914 static CellSpace* cell_space_;
915 static LargeObjectSpace* lo_space_;
916 static HeapState gc_state_;
917
918 // Returns the size of object residing in non new spaces.
919 static int PromotedSpaceSize();
920
921 // Returns the amount of external memory registered since last global gc.
922 static int PromotedExternalMemorySize();
923
924 static int mc_count_; // how many mark-compact collections happened
925 static int gc_count_; // how many gc happened
926
927#define ROOT_ACCESSOR(type, name, camel_name) \
928 static inline void set_##name(type* value) { \
929 roots_[k##camel_name##RootIndex] = value; \
930 }
931 ROOT_LIST(ROOT_ACCESSOR)
932#undef ROOT_ACCESSOR
933
934#ifdef DEBUG
935 static bool allocation_allowed_;
936
937 // If the --gc-interval flag is set to a positive value, this
938 // variable holds the value indicating the number of allocations
939 // remain until the next failure and garbage collection.
940 static int allocation_timeout_;
941
942 // Do we expect to be able to handle allocation failure at this
943 // time?
944 static bool disallow_allocation_failure_;
945#endif // DEBUG
946
947 // Limit that triggers a global GC on the next (normally caused) GC. This
948 // is checked when we have already decided to do a GC to help determine
949 // which collector to invoke.
950 static int old_gen_promotion_limit_;
951
952 // Limit that triggers a global GC as soon as is reasonable. This is
953 // checked before expanding a paged space in the old generation and on
954 // every allocation in large object space.
955 static int old_gen_allocation_limit_;
956
957 // Limit on the amount of externally allocated memory allowed
958 // between global GCs. If reached a global GC is forced.
959 static int external_allocation_limit_;
960
961 // The amount of external memory registered through the API kept alive
962 // by global handles
963 static int amount_of_external_allocated_memory_;
964
965 // Caches the amount of external memory registered at the last global gc.
966 static int amount_of_external_allocated_memory_at_last_global_gc_;
967
968 // Indicates that an allocation has failed in the old generation since the
969 // last GC.
970 static int old_gen_exhausted_;
971
972 static Object* roots_[kRootListLength];
973
974 struct StringTypeTable {
975 InstanceType type;
976 int size;
977 RootListIndex index;
978 };
979
980 struct ConstantSymbolTable {
981 const char* contents;
982 RootListIndex index;
983 };
984
985 struct StructTable {
986 InstanceType type;
987 int size;
988 RootListIndex index;
989 };
990
991 static const StringTypeTable string_type_table[];
992 static const ConstantSymbolTable constant_symbol_table[];
993 static const StructTable struct_table[];
994
995 // The special hidden symbol which is an empty string, but does not match
996 // any string when looked up in properties.
997 static String* hidden_symbol_;
998
999 // GC callback function, called before and after mark-compact GC.
1000 // Allocations in the callback function are disallowed.
1001 static GCCallback global_gc_prologue_callback_;
1002 static GCCallback global_gc_epilogue_callback_;
1003
1004 // Checks whether a global GC is necessary
1005 static GarbageCollector SelectGarbageCollector(AllocationSpace space);
1006
1007 // Performs garbage collection
1008 static void PerformGarbageCollection(AllocationSpace space,
1009 GarbageCollector collector,
1010 GCTracer* tracer);
1011
1012 // Returns either a Smi or a Number object from 'value'. If 'new_object'
1013 // is false, it may return a preallocated immutable object.
1014 static Object* SmiOrNumberFromDouble(double value,
1015 bool new_object,
1016 PretenureFlag pretenure = NOT_TENURED);
1017
1018 // Allocate an uninitialized object in map space. The behavior is identical
1019 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1020 // have to test the allocation space argument and (b) can reduce code size
1021 // (since both AllocateRaw and AllocateRawMap are inlined).
1022 static inline Object* AllocateRawMap();
1023
1024 // Allocate an uninitialized object in the global property cell space.
1025 static inline Object* AllocateRawCell();
1026
1027 // Initializes a JSObject based on its map.
1028 static void InitializeJSObjectFromMap(JSObject* obj,
1029 FixedArray* properties,
1030 Map* map);
1031
1032 static bool CreateInitialMaps();
1033 static bool CreateInitialObjects();
1034
1035 // These four Create*EntryStub functions are here because of a gcc-4.4 bug
1036 // that assigns wrong vtable entries.
1037 static void CreateCEntryStub();
1038 static void CreateCEntryDebugBreakStub();
1039 static void CreateJSEntryStub();
1040 static void CreateJSConstructEntryStub();
1041 static void CreateRegExpCEntryStub();
1042
1043 static void CreateFixedStubs();
1044
1045 static Object* CreateOddball(Map* map,
1046 const char* to_string,
1047 Object* to_number);
1048
1049 // Allocate empty fixed array.
1050 static Object* AllocateEmptyFixedArray();
1051
1052 // Performs a minor collection in new generation.
1053 static void Scavenge();
1054
1055 // Performs a major collection in the whole heap.
1056 static void MarkCompact(GCTracer* tracer);
1057
1058 // Code to be run before and after mark-compact.
1059 static void MarkCompactPrologue(bool is_compacting);
1060 static void MarkCompactEpilogue(bool is_compacting);
1061
1062 // Helper function used by CopyObject to copy a source object to an
1063 // allocated target object and update the forwarding pointer in the source
1064 // object. Returns the target object.
1065 static HeapObject* MigrateObject(HeapObject* source,
1066 HeapObject* target,
1067 int size);
1068
1069 // Helper function that governs the promotion policy from new space to
1070 // old. If the object's old address lies below the new space's age
1071 // mark or if we've already filled the bottom 1/16th of the to space,
1072 // we try to promote this object.
1073 static inline bool ShouldBePromoted(Address old_address, int object_size);
1074#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1075 // Record the copy of an object in the NewSpace's statistics.
1076 static void RecordCopiedObject(HeapObject* obj);
1077
1078 // Record statistics before and after garbage collection.
1079 static void ReportStatisticsBeforeGC();
1080 static void ReportStatisticsAfterGC();
1081#endif
1082
1083 // Update an old object's remembered set
1084 static int UpdateRSet(HeapObject* obj);
1085
1086 // Rebuild remembered set in an old space.
1087 static void RebuildRSets(PagedSpace* space);
1088
1089 // Rebuild remembered set in the large object space.
1090 static void RebuildRSets(LargeObjectSpace* space);
1091
1092 // Slow part of scavenge object.
1093 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1094
1095 // Copy memory from src to dst.
1096 static inline void CopyBlock(Object** dst, Object** src, int byte_size);
1097
1098 // Initializes a function with a shared part and prototype.
1099 // Returns the function.
1100 // Note: this code was factored out of AllocateFunction such that
1101 // other parts of the VM could use it. Specifically, a function that creates
1102 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1103 // Please note this does not perform a garbage collection.
1104 static inline Object* InitializeFunction(JSFunction* function,
1105 SharedFunctionInfo* shared,
1106 Object* prototype);
1107
1108 static const int kInitialSymbolTableSize = 2048;
1109 static const int kInitialEvalCacheSize = 64;
1110
1111 friend class Factory;
1112 friend class DisallowAllocationFailure;
1113 friend class AlwaysAllocateScope;
1114};
1115
1116
1117class AlwaysAllocateScope {
1118 public:
1119 AlwaysAllocateScope() {
1120 // We shouldn't hit any nested scopes, because that requires
1121 // non-handle code to call handle code. The code still works but
1122 // performance will degrade, so we want to catch this situation
1123 // in debug mode.
1124 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1125 Heap::always_allocate_scope_depth_++;
1126 }
1127
1128 ~AlwaysAllocateScope() {
1129 Heap::always_allocate_scope_depth_--;
1130 ASSERT(Heap::always_allocate_scope_depth_ == 0);
1131 }
1132};
1133
1134
1135#ifdef DEBUG
1136// Visitor class to verify interior pointers that do not have remembered set
1137// bits. All heap object pointers have to point into the heap to a location
1138// that has a map pointer at its first word. Caveat: Heap::Contains is an
1139// approximation because it can return true for objects in a heap space but
1140// above the allocation pointer.
1141class VerifyPointersVisitor: public ObjectVisitor {
1142 public:
1143 void VisitPointers(Object** start, Object** end) {
1144 for (Object** current = start; current < end; current++) {
1145 if ((*current)->IsHeapObject()) {
1146 HeapObject* object = HeapObject::cast(*current);
1147 ASSERT(Heap::Contains(object));
1148 ASSERT(object->map()->IsMap());
1149 }
1150 }
1151 }
1152};
1153
1154
1155// Visitor class to verify interior pointers that have remembered set bits.
1156// As VerifyPointersVisitor but also checks that remembered set bits are
1157// always set for pointers into new space.
1158class VerifyPointersAndRSetVisitor: public ObjectVisitor {
1159 public:
1160 void VisitPointers(Object** start, Object** end) {
1161 for (Object** current = start; current < end; current++) {
1162 if ((*current)->IsHeapObject()) {
1163 HeapObject* object = HeapObject::cast(*current);
1164 ASSERT(Heap::Contains(object));
1165 ASSERT(object->map()->IsMap());
1166 if (Heap::InNewSpace(object)) {
1167 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
1168 }
1169 }
1170 }
1171 }
1172};
1173#endif
1174
1175
1176// Space iterator for iterating over all spaces of the heap.
1177// Returns each space in turn, and null when it is done.
1178class AllSpaces BASE_EMBEDDED {
1179 public:
1180 Space* next();
1181 AllSpaces() { counter_ = FIRST_SPACE; }
1182 private:
1183 int counter_;
1184};
1185
1186
1187// Space iterator for iterating over all old spaces of the heap: Old pointer
1188// space, old data space and code space.
1189// Returns each space in turn, and null when it is done.
1190class OldSpaces BASE_EMBEDDED {
1191 public:
1192 OldSpace* next();
1193 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1194 private:
1195 int counter_;
1196};
1197
1198
1199// Space iterator for iterating over all the paged spaces of the heap:
1200// Map space, old pointer space, old data space and code space.
1201// Returns each space in turn, and null when it is done.
1202class PagedSpaces BASE_EMBEDDED {
1203 public:
1204 PagedSpace* next();
1205 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1206 private:
1207 int counter_;
1208};
1209
1210
1211// Space iterator for iterating over all spaces of the heap.
1212// For each space an object iterator is provided. The deallocation of the
1213// returned object iterators is handled by the space iterator.
1214class SpaceIterator : public Malloced {
1215 public:
1216 SpaceIterator();
1217 virtual ~SpaceIterator();
1218
1219 bool has_next();
1220 ObjectIterator* next();
1221
1222 private:
1223 ObjectIterator* CreateIterator();
1224
1225 int current_space_; // from enum AllocationSpace.
1226 ObjectIterator* iterator_; // object iterator for the current space.
1227};
1228
1229
1230// A HeapIterator provides iteration over the whole heap It aggregates a the
1231// specific iterators for the different spaces as these can only iterate over
1232// one space only.
1233
1234class HeapIterator BASE_EMBEDDED {
1235 public:
1236 explicit HeapIterator();
1237 virtual ~HeapIterator();
1238
1239 bool has_next();
1240 HeapObject* next();
1241 void reset();
1242
1243 private:
1244 // Perform the initialization.
1245 void Init();
1246
1247 // Perform all necessary shutdown (destruction) work.
1248 void Shutdown();
1249
1250 // Space iterator for iterating all the spaces.
1251 SpaceIterator* space_iterator_;
1252 // Object iterator for the space currently being iterated.
1253 ObjectIterator* object_iterator_;
1254};
1255
1256
1257// Cache for mapping (map, property name) into field offset.
1258// Cleared at startup and prior to mark sweep collection.
1259class KeyedLookupCache {
1260 public:
1261 // Lookup field offset for (map, name). If absent, -1 is returned.
1262 static int Lookup(Map* map, String* name);
1263
1264 // Update an element in the cache.
1265 static void Update(Map* map, String* name, int field_offset);
1266
1267 // Clear the cache.
1268 static void Clear();
1269 private:
1270 static inline int Hash(Map* map, String* name);
1271 static const int kLength = 64;
1272 struct Key {
1273 Map* map;
1274 String* name;
1275 };
1276 static Key keys_[kLength];
1277 static int field_offsets_[kLength];
1278};
1279
1280
1281
1282// Cache for mapping (array, property name) into descriptor index.
1283// The cache contains both positive and negative results.
1284// Descriptor index equals kNotFound means the property is absent.
1285// Cleared at startup and prior to any gc.
1286class DescriptorLookupCache {
1287 public:
1288 // Lookup descriptor index for (map, name).
1289 // If absent, kAbsent is returned.
1290 static int Lookup(DescriptorArray* array, String* name) {
1291 if (!StringShape(name).IsSymbol()) return kAbsent;
1292 int index = Hash(array, name);
1293 Key& key = keys_[index];
1294 if ((key.array == array) && (key.name == name)) return results_[index];
1295 return kAbsent;
1296 }
1297
1298 // Update an element in the cache.
1299 static void Update(DescriptorArray* array, String* name, int result) {
1300 ASSERT(result != kAbsent);
1301 if (StringShape(name).IsSymbol()) {
1302 int index = Hash(array, name);
1303 Key& key = keys_[index];
1304 key.array = array;
1305 key.name = name;
1306 results_[index] = result;
1307 }
1308 }
1309
1310 // Clear the cache.
1311 static void Clear();
1312
1313 static const int kAbsent = -2;
1314 private:
1315 static int Hash(DescriptorArray* array, String* name) {
1316 // Uses only lower 32 bits if pointers are larger.
1317 uintptr_t array_hash =
1318 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1319 uintptr_t name_hash =
1320 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1321 return (array_hash ^ name_hash) % kLength;
1322 }
1323
1324 static const int kLength = 64;
1325 struct Key {
1326 DescriptorArray* array;
1327 String* name;
1328 };
1329
1330 static Key keys_[kLength];
1331 static int results_[kLength];
1332};
1333
1334
1335// ----------------------------------------------------------------------------
1336// Marking stack for tracing live objects.
1337
1338class MarkingStack {
1339 public:
1340 void Initialize(Address low, Address high) {
1341 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1342 high_ = reinterpret_cast<HeapObject**>(high);
1343 overflowed_ = false;
1344 }
1345
1346 bool is_full() { return top_ >= high_; }
1347
1348 bool is_empty() { return top_ <= low_; }
1349
1350 bool overflowed() { return overflowed_; }
1351
1352 void clear_overflowed() { overflowed_ = false; }
1353
1354 // Push the (marked) object on the marking stack if there is room,
1355 // otherwise mark the object as overflowed and wait for a rescan of the
1356 // heap.
1357 void Push(HeapObject* object) {
1358 CHECK(object->IsHeapObject());
1359 if (is_full()) {
1360 object->SetOverflow();
1361 overflowed_ = true;
1362 } else {
1363 *(top_++) = object;
1364 }
1365 }
1366
1367 HeapObject* Pop() {
1368 ASSERT(!is_empty());
1369 HeapObject* object = *(--top_);
1370 CHECK(object->IsHeapObject());
1371 return object;
1372 }
1373
1374 private:
1375 HeapObject** low_;
1376 HeapObject** top_;
1377 HeapObject** high_;
1378 bool overflowed_;
1379};
1380
1381
1382// A helper class to document/test C++ scopes where we do not
1383// expect a GC. Usage:
1384//
1385// /* Allocation not allowed: we cannot handle a GC in this scope. */
1386// { AssertNoAllocation nogc;
1387// ...
1388// }
1389
1390#ifdef DEBUG
1391
1392class DisallowAllocationFailure {
1393 public:
1394 DisallowAllocationFailure() {
1395 old_state_ = Heap::disallow_allocation_failure_;
1396 Heap::disallow_allocation_failure_ = true;
1397 }
1398 ~DisallowAllocationFailure() {
1399 Heap::disallow_allocation_failure_ = old_state_;
1400 }
1401 private:
1402 bool old_state_;
1403};
1404
1405class AssertNoAllocation {
1406 public:
1407 AssertNoAllocation() {
1408 old_state_ = Heap::allow_allocation(false);
1409 }
1410
1411 ~AssertNoAllocation() {
1412 Heap::allow_allocation(old_state_);
1413 }
1414
1415 private:
1416 bool old_state_;
1417};
1418
1419class DisableAssertNoAllocation {
1420 public:
1421 DisableAssertNoAllocation() {
1422 old_state_ = Heap::allow_allocation(true);
1423 }
1424
1425 ~DisableAssertNoAllocation() {
1426 Heap::allow_allocation(old_state_);
1427 }
1428
1429 private:
1430 bool old_state_;
1431};
1432
1433#else // ndef DEBUG
1434
1435class AssertNoAllocation {
1436 public:
1437 AssertNoAllocation() { }
1438 ~AssertNoAllocation() { }
1439};
1440
1441class DisableAssertNoAllocation {
1442 public:
1443 DisableAssertNoAllocation() { }
1444 ~DisableAssertNoAllocation() { }
1445};
1446
1447#endif
1448
1449// GCTracer collects and prints ONE line after each garbage collector
1450// invocation IFF --trace_gc is used.
1451
1452class GCTracer BASE_EMBEDDED {
1453 public:
1454 GCTracer();
1455
1456 ~GCTracer();
1457
1458 // Sets the collector.
1459 void set_collector(GarbageCollector collector) { collector_ = collector; }
1460
1461 // Sets the GC count.
1462 void set_gc_count(int count) { gc_count_ = count; }
1463
1464 // Sets the full GC count.
1465 void set_full_gc_count(int count) { full_gc_count_ = count; }
1466
1467 // Sets the flag that this is a compacting full GC.
1468 void set_is_compacting() { is_compacting_ = true; }
1469
1470 // Increment and decrement the count of marked objects.
1471 void increment_marked_count() { ++marked_count_; }
1472 void decrement_marked_count() { --marked_count_; }
1473
1474 int marked_count() { return marked_count_; }
1475
1476 private:
1477 // Returns a string matching the collector.
1478 const char* CollectorString();
1479
1480 // Returns size of object in heap (in MB).
1481 double SizeOfHeapObjects() {
1482 return (static_cast<double>(Heap::SizeOfObjects())) / MB;
1483 }
1484
1485 double start_time_; // Timestamp set in the constructor.
1486 double start_size_; // Size of objects in heap set in constructor.
1487 GarbageCollector collector_; // Type of collector.
1488
1489 // A count (including this one, eg, the first collection is 1) of the
1490 // number of garbage collections.
1491 int gc_count_;
1492
1493 // A count (including this one) of the number of full garbage collections.
1494 int full_gc_count_;
1495
1496 // True if the current GC is a compacting full collection, false
1497 // otherwise.
1498 bool is_compacting_;
1499
1500 // True if the *previous* full GC cwas a compacting collection (will be
1501 // false if there has not been a previous full GC).
1502 bool previous_has_compacted_;
1503
1504 // On a full GC, a count of the number of marked objects. Incremented
1505 // when an object is marked and decremented when an object's mark bit is
1506 // cleared. Will be zero on a scavenge collection.
1507 int marked_count_;
1508
1509 // The count from the end of the previous full GC. Will be zero if there
1510 // was no previous full GC.
1511 int previous_marked_count_;
1512};
1513
1514
1515class TranscendentalCache {
1516 public:
1517 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
1518
1519 explicit TranscendentalCache(Type t);
1520
1521 // Returns a heap number with f(input), where f is a math function specified
1522 // by the 'type' argument.
1523 static inline Object* Get(Type type, double input) {
1524 TranscendentalCache* cache = caches_[type];
1525 if (cache == NULL) {
1526 caches_[type] = cache = new TranscendentalCache(type);
1527 }
1528 return cache->Get(input);
1529 }
1530
1531 // The cache contains raw Object pointers. This method disposes of
1532 // them before a garbage collection.
1533 static void Clear();
1534
1535 private:
1536 inline Object* Get(double input) {
1537 Converter c;
1538 c.dbl = input;
1539 int hash = Hash(c);
1540 Element e = elements_[hash];
1541 if (e.in[0] == c.integers[0] &&
1542 e.in[1] == c.integers[1]) {
1543 ASSERT(e.output != NULL);
1544 return e.output;
1545 }
1546 double answer = Calculate(input);
1547 Object* heap_number = Heap::AllocateHeapNumber(answer);
1548 if (!heap_number->IsFailure()) {
1549 elements_[hash].in[0] = c.integers[0];
1550 elements_[hash].in[1] = c.integers[1];
1551 elements_[hash].output = heap_number;
1552 }
1553 return heap_number;
1554 }
1555
1556 inline double Calculate(double input) {
1557 switch (type_) {
1558 case ACOS:
1559 return acos(input);
1560 case ASIN:
1561 return asin(input);
1562 case ATAN:
1563 return atan(input);
1564 case COS:
1565 return cos(input);
1566 case EXP:
1567 return exp(input);
1568 case LOG:
1569 return log(input);
1570 case SIN:
1571 return sin(input);
1572 case TAN:
1573 return tan(input);
1574 default:
1575 return 0.0; // Never happens.
1576 }
1577 }
1578 static const int kCacheSize = 512;
1579 struct Element {
1580 uint32_t in[2];
1581 Object* output;
1582 };
1583 union Converter {
1584 double dbl;
1585 uint32_t integers[2];
1586 };
1587 inline static int Hash(const Converter& c) {
1588 uint32_t hash = (c.integers[0] ^ c.integers[1]);
1589 hash ^= hash >> 16;
1590 hash ^= hash >> 8;
1591 return (hash & (kCacheSize - 1));
1592 }
1593 static TranscendentalCache* caches_[kNumberOfCaches];
1594 Element elements_[kCacheSize];
1595 Type type_;
1596};
1597
1598
1599} } // namespace v8::internal
1600
1601#endif // V8_HEAP_H_