blob: 726887a23a6ee785167817c42343f789eb4b5973 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include <stdlib.h>
29#include <utility>
30
31#include "src/compilation-cache.h"
32#include "src/context-measure.h"
33#include "src/deoptimizer.h"
34#include "src/execution.h"
35#include "src/factory.h"
36#include "src/global-handles.h"
37#include "src/heap/gc-tracer.h"
38#include "src/heap/memory-reducer.h"
39#include "src/ic/ic.h"
40#include "src/macro-assembler.h"
41#include "src/regexp/jsregexp.h"
42#include "src/snapshot/snapshot.h"
43#include "test/cctest/cctest.h"
44#include "test/cctest/heap/heap-tester.h"
45#include "test/cctest/heap/utils-inl.h"
46#include "test/cctest/test-feedback-vector.h"
47
48
49namespace v8 {
50namespace internal {
51
52static void CheckMap(Map* map, int type, int instance_size) {
53 CHECK(map->IsHeapObject());
54#ifdef DEBUG
55 CHECK(CcTest::heap()->Contains(map));
56#endif
57 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
58 CHECK_EQ(type, map->instance_type());
59 CHECK_EQ(instance_size, map->instance_size());
60}
61
62
63TEST(HeapMaps) {
64 CcTest::InitializeVM();
65 Heap* heap = CcTest::heap();
66 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
67 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
68#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
69 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
70 SIMD128_TYPES(SIMD128_TYPE)
71#undef SIMD128_TYPE
72 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
73 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
74}
75
76
77static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
78 CHECK(obj->IsOddball());
79 Handle<Object> handle(obj, isolate);
80 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
81 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
82}
83
84
85static void CheckSmi(Isolate* isolate, int value, const char* string) {
86 Handle<Object> handle(Smi::FromInt(value), isolate);
87 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
88 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
89}
90
91
92static void CheckNumber(Isolate* isolate, double value, const char* string) {
93 Handle<Object> number = isolate->factory()->NewNumber(value);
94 CHECK(number->IsNumber());
95 Handle<Object> print_string =
96 Object::ToString(isolate, number).ToHandleChecked();
97 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
98}
99
100
101static void CheckFindCodeObject(Isolate* isolate) {
102 // Test FindCodeObject
103#define __ assm.
104
105 Assembler assm(isolate, NULL, 0);
106
107 __ nop(); // supported on all architectures
108
109 CodeDesc desc;
110 assm.GetCode(&desc);
111 Handle<Code> code = isolate->factory()->NewCode(
112 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
113 CHECK(code->IsCode());
114
115 HeapObject* obj = HeapObject::cast(*code);
116 Address obj_addr = obj->address();
117
118 for (int i = 0; i < obj->Size(); i += kPointerSize) {
119 Object* found = isolate->FindCodeObject(obj_addr + i);
120 CHECK_EQ(*code, found);
121 }
122
123 Handle<Code> copy = isolate->factory()->NewCode(
124 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
125 HeapObject* obj_copy = HeapObject::cast(*copy);
126 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
127 obj_copy->Size() / 2);
128 CHECK(not_right != *code);
129}
130
131
132TEST(HandleNull) {
133 CcTest::InitializeVM();
134 Isolate* isolate = CcTest::i_isolate();
135 HandleScope outer_scope(isolate);
136 LocalContext context;
137 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
138 CHECK(!n.is_null());
139}
140
141
142TEST(HeapObjects) {
143 CcTest::InitializeVM();
144 Isolate* isolate = CcTest::i_isolate();
145 Factory* factory = isolate->factory();
146 Heap* heap = isolate->heap();
147
148 HandleScope sc(isolate);
149 Handle<Object> value = factory->NewNumber(1.000123);
150 CHECK(value->IsHeapNumber());
151 CHECK(value->IsNumber());
152 CHECK_EQ(1.000123, value->Number());
153
154 value = factory->NewNumber(1.0);
155 CHECK(value->IsSmi());
156 CHECK(value->IsNumber());
157 CHECK_EQ(1.0, value->Number());
158
159 value = factory->NewNumberFromInt(1024);
160 CHECK(value->IsSmi());
161 CHECK(value->IsNumber());
162 CHECK_EQ(1024.0, value->Number());
163
164 value = factory->NewNumberFromInt(Smi::kMinValue);
165 CHECK(value->IsSmi());
166 CHECK(value->IsNumber());
167 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
168
169 value = factory->NewNumberFromInt(Smi::kMaxValue);
170 CHECK(value->IsSmi());
171 CHECK(value->IsNumber());
172 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
173
174#if !defined(V8_TARGET_ARCH_64_BIT)
175 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
176 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
177 CHECK(value->IsHeapNumber());
178 CHECK(value->IsNumber());
179 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
180#endif
181
182 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
183 CHECK(value->IsHeapNumber());
184 CHECK(value->IsNumber());
185 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
186 value->Number());
187
188 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
189 CHECK(value->IsHeapNumber());
190 CHECK(value->IsNumber());
191 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
192 value->Number());
193
194 // nan oddball checks
195 CHECK(factory->nan_value()->IsNumber());
196 CHECK(std::isnan(factory->nan_value()->Number()));
197
198 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
199 CHECK(s->IsString());
200 CHECK_EQ(10, s->length());
201
202 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
203 Handle<JSGlobalObject> global(
204 CcTest::i_isolate()->context()->global_object());
205 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
206
207 // Check ToString for oddballs
208 CheckOddball(isolate, heap->true_value(), "true");
209 CheckOddball(isolate, heap->false_value(), "false");
210 CheckOddball(isolate, heap->null_value(), "null");
211 CheckOddball(isolate, heap->undefined_value(), "undefined");
212
213 // Check ToString for Smis
214 CheckSmi(isolate, 0, "0");
215 CheckSmi(isolate, 42, "42");
216 CheckSmi(isolate, -42, "-42");
217
218 // Check ToString for Numbers
219 CheckNumber(isolate, 1.1, "1.1");
220
221 CheckFindCodeObject(isolate);
222}
223
224
225template <typename T, typename LANE_TYPE, int LANES>
226static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
227 LANE_TYPE other_value) {
228 // Check against lane_values, and check that all lanes can be set to
229 // other_value without disturbing the other lanes.
230 for (int i = 0; i < LANES; i++) {
231 CHECK_EQ(lane_values[i], value->get_lane(i));
232 }
233 for (int i = 0; i < LANES; i++) {
234 value->set_lane(i, other_value); // change the value
235 for (int j = 0; j < LANES; j++) {
236 if (i != j)
237 CHECK_EQ(lane_values[j], value->get_lane(j));
238 else
239 CHECK_EQ(other_value, value->get_lane(j));
240 }
241 value->set_lane(i, lane_values[i]); // restore the lane
242 }
243 CHECK(value->BooleanValue()); // SIMD values are 'true'.
244}
245
246
247TEST(SimdObjects) {
248 CcTest::InitializeVM();
249 Isolate* isolate = CcTest::i_isolate();
250 Factory* factory = isolate->factory();
251
252 HandleScope sc(isolate);
253
254 // Float32x4
255 {
256 float lanes[4] = {1, 2, 3, 4};
257 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
258 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
259
260 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
261 CHECK(value->IsFloat32x4());
262 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
263
264 // Check special lane values.
265 value->set_lane(1, -0.0);
266 CHECK_EQ(-0.0f, value->get_lane(1));
267 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
268 value->set_lane(2, quiet_NaN);
269 CHECK(std::isnan(value->get_lane(2)));
270 value->set_lane(3, signaling_NaN);
271 CHECK(std::isnan(value->get_lane(3)));
272
273#ifdef OBJECT_PRINT
274 // Check value printing.
275 {
276 value = factory->NewFloat32x4(lanes);
277 std::ostringstream os;
278 value->Float32x4Print(os);
279 CHECK_EQ("1, 2, 3, 4", os.str());
280 }
281 {
282 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
283 value = factory->NewFloat32x4(special_lanes);
284 std::ostringstream os;
285 value->Float32x4Print(os);
286 // Value printing doesn't preserve signed zeroes.
287 CHECK_EQ("0, 0, NaN, NaN", os.str());
288 }
289#endif // OBJECT_PRINT
290 }
291 // Int32x4
292 {
293 int32_t lanes[4] = {1, 2, 3, 4};
294
295 Handle<Int32x4> value = factory->NewInt32x4(lanes);
296 CHECK(value->IsInt32x4());
297 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
298
299#ifdef OBJECT_PRINT
300 std::ostringstream os;
301 value->Int32x4Print(os);
302 CHECK_EQ("1, 2, 3, 4", os.str());
303#endif // OBJECT_PRINT
304 }
305 // Uint32x4
306 {
307 uint32_t lanes[4] = {1, 2, 3, 4};
308
309 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
310 CHECK(value->IsUint32x4());
311 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
312
313#ifdef OBJECT_PRINT
314 std::ostringstream os;
315 value->Uint32x4Print(os);
316 CHECK_EQ("1, 2, 3, 4", os.str());
317#endif // OBJECT_PRINT
318 }
319 // Bool32x4
320 {
321 bool lanes[4] = {true, false, true, false};
322
323 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
324 CHECK(value->IsBool32x4());
325 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
326
327#ifdef OBJECT_PRINT
328 std::ostringstream os;
329 value->Bool32x4Print(os);
330 CHECK_EQ("true, false, true, false", os.str());
331#endif // OBJECT_PRINT
332 }
333 // Int16x8
334 {
335 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
336
337 Handle<Int16x8> value = factory->NewInt16x8(lanes);
338 CHECK(value->IsInt16x8());
339 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
340
341#ifdef OBJECT_PRINT
342 std::ostringstream os;
343 value->Int16x8Print(os);
344 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
345#endif // OBJECT_PRINT
346 }
347 // Uint16x8
348 {
349 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
350
351 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
352 CHECK(value->IsUint16x8());
353 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
354
355#ifdef OBJECT_PRINT
356 std::ostringstream os;
357 value->Uint16x8Print(os);
358 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
359#endif // OBJECT_PRINT
360 }
361 // Bool16x8
362 {
363 bool lanes[8] = {true, false, true, false, true, false, true, false};
364
365 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
366 CHECK(value->IsBool16x8());
367 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
368
369#ifdef OBJECT_PRINT
370 std::ostringstream os;
371 value->Bool16x8Print(os);
372 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
373#endif // OBJECT_PRINT
374 }
375 // Int8x16
376 {
377 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
378
379 Handle<Int8x16> value = factory->NewInt8x16(lanes);
380 CHECK(value->IsInt8x16());
381 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
382
383#ifdef OBJECT_PRINT
384 std::ostringstream os;
385 value->Int8x16Print(os);
386 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
387#endif // OBJECT_PRINT
388 }
389 // Uint8x16
390 {
391 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
392
393 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
394 CHECK(value->IsUint8x16());
395 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
396
397#ifdef OBJECT_PRINT
398 std::ostringstream os;
399 value->Uint8x16Print(os);
400 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
401#endif // OBJECT_PRINT
402 }
403 // Bool8x16
404 {
405 bool lanes[16] = {true, false, true, false, true, false, true, false,
406 true, false, true, false, true, false, true, false};
407
408 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
409 CHECK(value->IsBool8x16());
410 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
411
412#ifdef OBJECT_PRINT
413 std::ostringstream os;
414 value->Bool8x16Print(os);
415 CHECK_EQ(
416 "true, false, true, false, true, false, true, false, true, false, "
417 "true, false, true, false, true, false",
418 os.str());
419#endif // OBJECT_PRINT
420 }
421}
422
423
424TEST(Tagging) {
425 CcTest::InitializeVM();
426 int request = 24;
427 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
428 CHECK(Smi::FromInt(42)->IsSmi());
429 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
430 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
431}
432
433
434TEST(GarbageCollection) {
435 CcTest::InitializeVM();
436 Isolate* isolate = CcTest::i_isolate();
437 Heap* heap = isolate->heap();
438 Factory* factory = isolate->factory();
439
440 HandleScope sc(isolate);
441 // Check GC.
442 heap->CollectGarbage(NEW_SPACE);
443
444 Handle<JSGlobalObject> global(
445 CcTest::i_isolate()->context()->global_object());
446 Handle<String> name = factory->InternalizeUtf8String("theFunction");
447 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
448 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
449 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
450 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
451 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
452
453 {
454 HandleScope inner_scope(isolate);
455 // Allocate a function and keep it in global object's property.
456 Handle<JSFunction> function = factory->NewFunction(name);
457 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
458 // Allocate an object. Unrooted after leaving the scope.
459 Handle<JSObject> obj = factory->NewJSObject(function);
460 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
461 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
462
463 CHECK_EQ(Smi::FromInt(23),
464 *Object::GetProperty(obj, prop_name).ToHandleChecked());
465 CHECK_EQ(Smi::FromInt(24),
466 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
467 }
468
469 heap->CollectGarbage(NEW_SPACE);
470
471 // Function should be alive.
472 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
473 // Check function is retained.
474 Handle<Object> func_value =
475 Object::GetProperty(global, name).ToHandleChecked();
476 CHECK(func_value->IsJSFunction());
477 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
478
479 {
480 HandleScope inner_scope(isolate);
481 // Allocate another object, make it reachable from global.
482 Handle<JSObject> obj = factory->NewJSObject(function);
483 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
484 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
485 }
486
487 // After gc, it should survive.
488 heap->CollectGarbage(NEW_SPACE);
489
490 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
491 Handle<Object> obj =
492 Object::GetProperty(global, obj_name).ToHandleChecked();
493 CHECK(obj->IsJSObject());
494 CHECK_EQ(Smi::FromInt(23),
495 *Object::GetProperty(obj, prop_name).ToHandleChecked());
496}
497
498
499static void VerifyStringAllocation(Isolate* isolate, const char* string) {
500 HandleScope scope(isolate);
501 Handle<String> s = isolate->factory()->NewStringFromUtf8(
502 CStrVector(string)).ToHandleChecked();
503 CHECK_EQ(StrLength(string), s->length());
504 for (int index = 0; index < s->length(); index++) {
505 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
506 }
507}
508
509
510TEST(String) {
511 CcTest::InitializeVM();
512 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
513
514 VerifyStringAllocation(isolate, "a");
515 VerifyStringAllocation(isolate, "ab");
516 VerifyStringAllocation(isolate, "abc");
517 VerifyStringAllocation(isolate, "abcd");
518 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
519}
520
521
522TEST(LocalHandles) {
523 CcTest::InitializeVM();
524 Isolate* isolate = CcTest::i_isolate();
525 Factory* factory = isolate->factory();
526
527 v8::HandleScope scope(CcTest::isolate());
528 const char* name = "Kasper the spunky";
529 Handle<String> string = factory->NewStringFromAsciiChecked(name);
530 CHECK_EQ(StrLength(name), string->length());
531}
532
533
534TEST(GlobalHandles) {
535 CcTest::InitializeVM();
536 Isolate* isolate = CcTest::i_isolate();
537 Heap* heap = isolate->heap();
538 Factory* factory = isolate->factory();
539 GlobalHandles* global_handles = isolate->global_handles();
540
541 Handle<Object> h1;
542 Handle<Object> h2;
543 Handle<Object> h3;
544 Handle<Object> h4;
545
546 {
547 HandleScope scope(isolate);
548
549 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
550 Handle<Object> u = factory->NewNumber(1.12344);
551
552 h1 = global_handles->Create(*i);
553 h2 = global_handles->Create(*u);
554 h3 = global_handles->Create(*i);
555 h4 = global_handles->Create(*u);
556 }
557
558 // after gc, it should survive
559 heap->CollectGarbage(NEW_SPACE);
560
561 CHECK((*h1)->IsString());
562 CHECK((*h2)->IsHeapNumber());
563 CHECK((*h3)->IsString());
564 CHECK((*h4)->IsHeapNumber());
565
566 CHECK_EQ(*h3, *h1);
567 GlobalHandles::Destroy(h1.location());
568 GlobalHandles::Destroy(h3.location());
569
570 CHECK_EQ(*h4, *h2);
571 GlobalHandles::Destroy(h2.location());
572 GlobalHandles::Destroy(h4.location());
573}
574
575
576static bool WeakPointerCleared = false;
577
578static void TestWeakGlobalHandleCallback(
579 const v8::WeakCallbackData<v8::Value, void>& data) {
580 std::pair<v8::Persistent<v8::Value>*, int>* p =
581 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
582 data.GetParameter());
583 if (p->second == 1234) WeakPointerCleared = true;
584 p->first->Reset();
585}
586
587
588TEST(WeakGlobalHandlesScavenge) {
589 i::FLAG_stress_compaction = false;
590 CcTest::InitializeVM();
591 Isolate* isolate = CcTest::i_isolate();
592 Heap* heap = isolate->heap();
593 Factory* factory = isolate->factory();
594 GlobalHandles* global_handles = isolate->global_handles();
595
596 WeakPointerCleared = false;
597
598 Handle<Object> h1;
599 Handle<Object> h2;
600
601 {
602 HandleScope scope(isolate);
603
604 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
605 Handle<Object> u = factory->NewNumber(1.12344);
606
607 h1 = global_handles->Create(*i);
608 h2 = global_handles->Create(*u);
609 }
610
611 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
612 GlobalHandles::MakeWeak(h2.location(),
613 reinterpret_cast<void*>(&handle_and_id),
614 &TestWeakGlobalHandleCallback);
615
616 // Scavenge treats weak pointers as normal roots.
617 heap->CollectGarbage(NEW_SPACE);
618
619 CHECK((*h1)->IsString());
620 CHECK((*h2)->IsHeapNumber());
621
622 CHECK(!WeakPointerCleared);
623 CHECK(!global_handles->IsNearDeath(h2.location()));
624 CHECK(!global_handles->IsNearDeath(h1.location()));
625
626 GlobalHandles::Destroy(h1.location());
627 GlobalHandles::Destroy(h2.location());
628}
629
630
631TEST(WeakGlobalHandlesMark) {
632 CcTest::InitializeVM();
633 Isolate* isolate = CcTest::i_isolate();
634 Heap* heap = isolate->heap();
635 Factory* factory = isolate->factory();
636 GlobalHandles* global_handles = isolate->global_handles();
637
638 WeakPointerCleared = false;
639
640 Handle<Object> h1;
641 Handle<Object> h2;
642
643 {
644 HandleScope scope(isolate);
645
646 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
647 Handle<Object> u = factory->NewNumber(1.12344);
648
649 h1 = global_handles->Create(*i);
650 h2 = global_handles->Create(*u);
651 }
652
653 // Make sure the objects are promoted.
654 heap->CollectGarbage(OLD_SPACE);
655 heap->CollectGarbage(NEW_SPACE);
656 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
657
658 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
659 GlobalHandles::MakeWeak(h2.location(),
660 reinterpret_cast<void*>(&handle_and_id),
661 &TestWeakGlobalHandleCallback);
662 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
663 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
664
665 // Incremental marking potentially marked handles before they turned weak.
666 heap->CollectAllGarbage();
667
668 CHECK((*h1)->IsString());
669
670 CHECK(WeakPointerCleared);
671 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
672
673 GlobalHandles::Destroy(h1.location());
674}
675
676
677TEST(DeleteWeakGlobalHandle) {
678 i::FLAG_stress_compaction = false;
679 CcTest::InitializeVM();
680 Isolate* isolate = CcTest::i_isolate();
681 Heap* heap = isolate->heap();
682 Factory* factory = isolate->factory();
683 GlobalHandles* global_handles = isolate->global_handles();
684
685 WeakPointerCleared = false;
686
687 Handle<Object> h;
688
689 {
690 HandleScope scope(isolate);
691
692 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
693 h = global_handles->Create(*i);
694 }
695
696 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
697 GlobalHandles::MakeWeak(h.location(),
698 reinterpret_cast<void*>(&handle_and_id),
699 &TestWeakGlobalHandleCallback);
700
701 // Scanvenge does not recognize weak reference.
702 heap->CollectGarbage(NEW_SPACE);
703
704 CHECK(!WeakPointerCleared);
705
706 // Mark-compact treats weak reference properly.
707 heap->CollectGarbage(OLD_SPACE);
708
709 CHECK(WeakPointerCleared);
710}
711
712
713TEST(BytecodeArray) {
714 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
715 static const int kRawBytesSize = sizeof(kRawBytes);
716 static const int kFrameSize = 32;
717 static const int kParameterCount = 2;
718
719 i::FLAG_manual_evacuation_candidates_selection = true;
720 CcTest::InitializeVM();
721 Isolate* isolate = CcTest::i_isolate();
722 Heap* heap = isolate->heap();
723 Factory* factory = isolate->factory();
724 HandleScope scope(isolate);
725
726 SimulateFullSpace(heap->old_space());
727 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
728 for (int i = 0; i < 5; i++) {
729 Handle<Object> number = factory->NewHeapNumber(i);
730 constant_pool->set(i, *number);
731 }
732
733 // Allocate and initialize BytecodeArray
734 Handle<BytecodeArray> array = factory->NewBytecodeArray(
735 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
736
737 CHECK(array->IsBytecodeArray());
738 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
739 CHECK_EQ(array->frame_size(), kFrameSize);
740 CHECK_EQ(array->parameter_count(), kParameterCount);
741 CHECK_EQ(array->constant_pool(), *constant_pool);
742 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
743 CHECK_GE(array->address() + array->BytecodeArraySize(),
744 array->GetFirstBytecodeAddress() + array->length());
745 for (int i = 0; i < kRawBytesSize; i++) {
746 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
747 CHECK_EQ(array->get(i), kRawBytes[i]);
748 }
749
750 FixedArray* old_constant_pool_address = *constant_pool;
751
752 // Perform a full garbage collection and force the constant pool to be on an
753 // evacuation candidate.
754 Page* evac_page = Page::FromAddress(constant_pool->address());
755 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
756 heap->CollectAllGarbage();
757
758 // BytecodeArray should survive.
759 CHECK_EQ(array->length(), kRawBytesSize);
760 CHECK_EQ(array->frame_size(), kFrameSize);
761 for (int i = 0; i < kRawBytesSize; i++) {
762 CHECK_EQ(array->get(i), kRawBytes[i]);
763 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
764 }
765
766 // Constant pool should have been migrated.
767 CHECK_EQ(array->constant_pool(), *constant_pool);
768 CHECK_NE(array->constant_pool(), old_constant_pool_address);
769}
770
771
772static const char* not_so_random_string_table[] = {
773 "abstract",
774 "boolean",
775 "break",
776 "byte",
777 "case",
778 "catch",
779 "char",
780 "class",
781 "const",
782 "continue",
783 "debugger",
784 "default",
785 "delete",
786 "do",
787 "double",
788 "else",
789 "enum",
790 "export",
791 "extends",
792 "false",
793 "final",
794 "finally",
795 "float",
796 "for",
797 "function",
798 "goto",
799 "if",
800 "implements",
801 "import",
802 "in",
803 "instanceof",
804 "int",
805 "interface",
806 "long",
807 "native",
808 "new",
809 "null",
810 "package",
811 "private",
812 "protected",
813 "public",
814 "return",
815 "short",
816 "static",
817 "super",
818 "switch",
819 "synchronized",
820 "this",
821 "throw",
822 "throws",
823 "transient",
824 "true",
825 "try",
826 "typeof",
827 "var",
828 "void",
829 "volatile",
830 "while",
831 "with",
832 0
833};
834
835
836static void CheckInternalizedStrings(const char** strings) {
837 Isolate* isolate = CcTest::i_isolate();
838 Factory* factory = isolate->factory();
839 for (const char* string = *strings; *strings != 0; string = *strings++) {
840 HandleScope scope(isolate);
841 Handle<String> a =
842 isolate->factory()->InternalizeUtf8String(CStrVector(string));
843 // InternalizeUtf8String may return a failure if a GC is needed.
844 CHECK(a->IsInternalizedString());
845 Handle<String> b = factory->InternalizeUtf8String(string);
846 CHECK_EQ(*b, *a);
847 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
848 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
849 CHECK_EQ(*b, *a);
850 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
851 }
852}
853
854
855TEST(StringTable) {
856 CcTest::InitializeVM();
857
858 v8::HandleScope sc(CcTest::isolate());
859 CheckInternalizedStrings(not_so_random_string_table);
860 CheckInternalizedStrings(not_so_random_string_table);
861}
862
863
864TEST(FunctionAllocation) {
865 CcTest::InitializeVM();
866 Isolate* isolate = CcTest::i_isolate();
867 Factory* factory = isolate->factory();
868
869 v8::HandleScope sc(CcTest::isolate());
870 Handle<String> name = factory->InternalizeUtf8String("theFunction");
871 Handle<JSFunction> function = factory->NewFunction(name);
872
873 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
874 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
875
876 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
877 Handle<JSObject> obj = factory->NewJSObject(function);
878 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
879 CHECK_EQ(Smi::FromInt(23),
880 *Object::GetProperty(obj, prop_name).ToHandleChecked());
881 // Check that we can add properties to function objects.
882 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
883 CHECK_EQ(Smi::FromInt(24),
884 *Object::GetProperty(function, prop_name).ToHandleChecked());
885}
886
887
888TEST(ObjectProperties) {
889 CcTest::InitializeVM();
890 Isolate* isolate = CcTest::i_isolate();
891 Factory* factory = isolate->factory();
892
893 v8::HandleScope sc(CcTest::isolate());
894 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
895 Handle<Object> object = Object::GetProperty(
896 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
897 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
898 Handle<JSObject> obj = factory->NewJSObject(constructor);
899 Handle<String> first = factory->InternalizeUtf8String("first");
900 Handle<String> second = factory->InternalizeUtf8String("second");
901
902 Handle<Smi> one(Smi::FromInt(1), isolate);
903 Handle<Smi> two(Smi::FromInt(2), isolate);
904
905 // check for empty
906 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
907
908 // add first
909 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
910 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
911
912 // delete first
913 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
914 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
915
916 // add first and then second
917 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
918 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
919 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
920 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
921
922 // delete first and then second
923 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
924 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
925 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
926 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
927 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
928
929 // add first and then second
930 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
931 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
932 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
933 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
934
935 // delete second and then first
936 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
937 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
938 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
939 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
940 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
941
942 // check string and internalized string match
943 const char* string1 = "fisk";
944 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
945 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
946 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
947 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
948
949 // check internalized string and string match
950 const char* string2 = "fugl";
951 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
952 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
953 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
954 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
955}
956
957
958TEST(JSObjectMaps) {
959 CcTest::InitializeVM();
960 Isolate* isolate = CcTest::i_isolate();
961 Factory* factory = isolate->factory();
962
963 v8::HandleScope sc(CcTest::isolate());
964 Handle<String> name = factory->InternalizeUtf8String("theFunction");
965 Handle<JSFunction> function = factory->NewFunction(name);
966
967 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
968 Handle<JSObject> obj = factory->NewJSObject(function);
969 Handle<Map> initial_map(function->initial_map());
970
971 // Set a propery
972 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
973 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
974 CHECK_EQ(Smi::FromInt(23),
975 *Object::GetProperty(obj, prop_name).ToHandleChecked());
976
977 // Check the map has changed
978 CHECK(*initial_map != obj->map());
979}
980
981
982TEST(JSArray) {
983 CcTest::InitializeVM();
984 Isolate* isolate = CcTest::i_isolate();
985 Factory* factory = isolate->factory();
986
987 v8::HandleScope sc(CcTest::isolate());
988 Handle<String> name = factory->InternalizeUtf8String("Array");
989 Handle<Object> fun_obj = Object::GetProperty(
990 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
991 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
992
993 // Allocate the object.
994 Handle<Object> element;
995 Handle<JSObject> object = factory->NewJSObject(function);
996 Handle<JSArray> array = Handle<JSArray>::cast(object);
997 // We just initialized the VM, no heap allocation failure yet.
998 JSArray::Initialize(array, 0);
999
1000 // Set array length to 0.
1001 JSArray::SetLength(array, 0);
1002 CHECK_EQ(Smi::FromInt(0), array->length());
1003 // Must be in fast mode.
1004 CHECK(array->HasFastSmiOrObjectElements());
1005
1006 // array[length] = name.
1007 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
1008 CHECK_EQ(Smi::FromInt(1), array->length());
1009 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1010 CHECK_EQ(*element, *name);
1011
1012 // Set array length with larger than smi value.
1013 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1014
1015 uint32_t int_length = 0;
1016 CHECK(array->length()->ToArrayIndex(&int_length));
1017 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1018 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1019
1020 // array[length] = name.
1021 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1022 uint32_t new_int_length = 0;
1023 CHECK(array->length()->ToArrayIndex(&new_int_length));
1024 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1025 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1026 CHECK_EQ(*element, *name);
1027 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1028 CHECK_EQ(*element, *name);
1029}
1030
1031
1032TEST(JSObjectCopy) {
1033 CcTest::InitializeVM();
1034 Isolate* isolate = CcTest::i_isolate();
1035 Factory* factory = isolate->factory();
1036
1037 v8::HandleScope sc(CcTest::isolate());
1038 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1039 Handle<Object> object = Object::GetProperty(
1040 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1041 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1042 Handle<JSObject> obj = factory->NewJSObject(constructor);
1043 Handle<String> first = factory->InternalizeUtf8String("first");
1044 Handle<String> second = factory->InternalizeUtf8String("second");
1045
1046 Handle<Smi> one(Smi::FromInt(1), isolate);
1047 Handle<Smi> two(Smi::FromInt(2), isolate);
1048
1049 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1050 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1051
1052 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1053 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1054
1055 // Make the clone.
1056 Handle<Object> value1, value2;
1057 Handle<JSObject> clone = factory->CopyJSObject(obj);
1058 CHECK(!clone.is_identical_to(obj));
1059
1060 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1061 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1062 CHECK_EQ(*value1, *value2);
1063 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1064 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1065 CHECK_EQ(*value1, *value2);
1066
1067 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1068 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1069 CHECK_EQ(*value1, *value2);
1070 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1071 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1072 CHECK_EQ(*value1, *value2);
1073
1074 // Flip the values.
1075 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1076 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1077
1078 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1079 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1080
1081 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1082 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1083 CHECK_EQ(*value1, *value2);
1084 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1085 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1086 CHECK_EQ(*value1, *value2);
1087
1088 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1089 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1090 CHECK_EQ(*value1, *value2);
1091 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1092 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1093 CHECK_EQ(*value1, *value2);
1094}
1095
1096
1097TEST(StringAllocation) {
1098 CcTest::InitializeVM();
1099 Isolate* isolate = CcTest::i_isolate();
1100 Factory* factory = isolate->factory();
1101
1102 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1103 for (int length = 0; length < 100; length++) {
1104 v8::HandleScope scope(CcTest::isolate());
1105 char* non_one_byte = NewArray<char>(3 * length + 1);
1106 char* one_byte = NewArray<char>(length + 1);
1107 non_one_byte[3 * length] = 0;
1108 one_byte[length] = 0;
1109 for (int i = 0; i < length; i++) {
1110 one_byte[i] = 'a';
1111 non_one_byte[3 * i] = chars[0];
1112 non_one_byte[3 * i + 1] = chars[1];
1113 non_one_byte[3 * i + 2] = chars[2];
1114 }
1115 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1116 Vector<const char>(non_one_byte, 3 * length));
1117 CHECK_EQ(length, non_one_byte_sym->length());
1118 Handle<String> one_byte_sym =
1119 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1120 CHECK_EQ(length, one_byte_sym->length());
1121 Handle<String> non_one_byte_str =
1122 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1123 .ToHandleChecked();
1124 non_one_byte_str->Hash();
1125 CHECK_EQ(length, non_one_byte_str->length());
1126 Handle<String> one_byte_str =
1127 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1128 .ToHandleChecked();
1129 one_byte_str->Hash();
1130 CHECK_EQ(length, one_byte_str->length());
1131 DeleteArray(non_one_byte);
1132 DeleteArray(one_byte);
1133 }
1134}
1135
1136
1137static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1138 // Count the number of objects found in the heap.
1139 int found_count = 0;
1140 HeapIterator iterator(heap);
1141 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1142 for (int i = 0; i < size; i++) {
1143 if (*objs[i] == obj) {
1144 found_count++;
1145 }
1146 }
1147 }
1148 return found_count;
1149}
1150
1151
1152TEST(Iteration) {
1153 CcTest::InitializeVM();
1154 Isolate* isolate = CcTest::i_isolate();
1155 Factory* factory = isolate->factory();
1156 v8::HandleScope scope(CcTest::isolate());
1157
1158 // Array of objects to scan haep for.
1159 const int objs_count = 6;
1160 Handle<Object> objs[objs_count];
1161 int next_objs_index = 0;
1162
1163 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1164 objs[next_objs_index++] = factory->NewJSArray(10);
1165 objs[next_objs_index++] =
1166 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
1167
1168 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1169 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1170 objs[next_objs_index++] =
1171 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1172
1173 // Allocate a large string (for large object space).
1174 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1175 char* str = new char[large_size];
1176 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1177 str[large_size - 1] = '\0';
1178 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1179 delete[] str;
1180
1181 // Add a Map object to look for.
1182 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1183
1184 CHECK_EQ(objs_count, next_objs_index);
1185 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1186}
1187
1188
1189UNINITIALIZED_TEST(TestCodeFlushing) {
1190 // If we do not flush code this test is invalid.
1191 if (!FLAG_flush_code) return;
1192 i::FLAG_allow_natives_syntax = true;
1193 i::FLAG_optimize_for_size = false;
1194 v8::Isolate::CreateParams create_params;
1195 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1196 v8::Isolate* isolate = v8::Isolate::New(create_params);
1197 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1198 isolate->Enter();
1199 Factory* factory = i_isolate->factory();
1200 {
1201 v8::HandleScope scope(isolate);
1202 v8::Context::New(isolate)->Enter();
1203 const char* source =
1204 "function foo() {"
1205 " var x = 42;"
1206 " var y = 42;"
1207 " var z = x + y;"
1208 "};"
1209 "foo()";
1210 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1211
1212 // This compile will add the code to the compilation cache.
1213 {
1214 v8::HandleScope scope(isolate);
1215 CompileRun(source);
1216 }
1217
1218 // Check function is compiled.
1219 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1220 foo_name).ToHandleChecked();
1221 CHECK(func_value->IsJSFunction());
1222 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1223 CHECK(function->shared()->is_compiled());
1224
1225 // The code will survive at least two GCs.
1226 i_isolate->heap()->CollectAllGarbage();
1227 i_isolate->heap()->CollectAllGarbage();
1228 CHECK(function->shared()->is_compiled());
1229
1230 // Simulate several GCs that use full marking.
1231 const int kAgingThreshold = 6;
1232 for (int i = 0; i < kAgingThreshold; i++) {
1233 i_isolate->heap()->CollectAllGarbage();
1234 }
1235
1236 // foo should no longer be in the compilation cache
1237 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1238 CHECK(!function->is_compiled() || function->IsOptimized());
1239 // Call foo to get it recompiled.
1240 CompileRun("foo()");
1241 CHECK(function->shared()->is_compiled());
1242 CHECK(function->is_compiled());
1243 }
1244 isolate->Exit();
1245 isolate->Dispose();
1246}
1247
1248
1249TEST(TestCodeFlushingPreAged) {
1250 // If we do not flush code this test is invalid.
1251 if (!FLAG_flush_code) return;
1252 i::FLAG_allow_natives_syntax = true;
1253 i::FLAG_optimize_for_size = true;
1254 CcTest::InitializeVM();
1255 Isolate* isolate = CcTest::i_isolate();
1256 Factory* factory = isolate->factory();
1257 v8::HandleScope scope(CcTest::isolate());
1258 const char* source = "function foo() {"
1259 " var x = 42;"
1260 " var y = 42;"
1261 " var z = x + y;"
1262 "};"
1263 "foo()";
1264 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1265
1266 // Compile foo, but don't run it.
1267 { v8::HandleScope scope(CcTest::isolate());
1268 CompileRun(source);
1269 }
1270
1271 // Check function is compiled.
1272 Handle<Object> func_value =
1273 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1274 CHECK(func_value->IsJSFunction());
1275 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1276 CHECK(function->shared()->is_compiled());
1277
1278 // The code has been run so will survive at least one GC.
1279 CcTest::heap()->CollectAllGarbage();
1280 CHECK(function->shared()->is_compiled());
1281
1282 // The code was only run once, so it should be pre-aged and collected on the
1283 // next GC.
1284 CcTest::heap()->CollectAllGarbage();
1285 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1286
1287 // Execute the function again twice, and ensure it is reset to the young age.
1288 { v8::HandleScope scope(CcTest::isolate());
1289 CompileRun("foo();"
1290 "foo();");
1291 }
1292
1293 // The code will survive at least two GC now that it is young again.
1294 CcTest::heap()->CollectAllGarbage();
1295 CcTest::heap()->CollectAllGarbage();
1296 CHECK(function->shared()->is_compiled());
1297
1298 // Simulate several GCs that use full marking.
1299 const int kAgingThreshold = 6;
1300 for (int i = 0; i < kAgingThreshold; i++) {
1301 CcTest::heap()->CollectAllGarbage();
1302 }
1303
1304 // foo should no longer be in the compilation cache
1305 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1306 CHECK(!function->is_compiled() || function->IsOptimized());
1307 // Call foo to get it recompiled.
1308 CompileRun("foo()");
1309 CHECK(function->shared()->is_compiled());
1310 CHECK(function->is_compiled());
1311}
1312
1313
1314TEST(TestCodeFlushingIncremental) {
1315 // If we do not flush code this test is invalid.
1316 if (!FLAG_flush_code) return;
1317 i::FLAG_allow_natives_syntax = true;
1318 i::FLAG_optimize_for_size = false;
1319 CcTest::InitializeVM();
1320 Isolate* isolate = CcTest::i_isolate();
1321 Factory* factory = isolate->factory();
1322 v8::HandleScope scope(CcTest::isolate());
1323 const char* source = "function foo() {"
1324 " var x = 42;"
1325 " var y = 42;"
1326 " var z = x + y;"
1327 "};"
1328 "foo()";
1329 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1330
1331 // This compile will add the code to the compilation cache.
1332 { v8::HandleScope scope(CcTest::isolate());
1333 CompileRun(source);
1334 }
1335
1336 // Check function is compiled.
1337 Handle<Object> func_value =
1338 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1339 CHECK(func_value->IsJSFunction());
1340 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1341 CHECK(function->shared()->is_compiled());
1342
1343 // The code will survive at least two GCs.
1344 CcTest::heap()->CollectAllGarbage();
1345 CcTest::heap()->CollectAllGarbage();
1346 CHECK(function->shared()->is_compiled());
1347
1348 // Simulate several GCs that use incremental marking.
1349 const int kAgingThreshold = 6;
1350 for (int i = 0; i < kAgingThreshold; i++) {
1351 SimulateIncrementalMarking(CcTest::heap());
1352 CcTest::heap()->CollectAllGarbage();
1353 }
1354 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1355 CHECK(!function->is_compiled() || function->IsOptimized());
1356
1357 // This compile will compile the function again.
1358 { v8::HandleScope scope(CcTest::isolate());
1359 CompileRun("foo();");
1360 }
1361
1362 // Simulate several GCs that use incremental marking but make sure
1363 // the loop breaks once the function is enqueued as a candidate.
1364 for (int i = 0; i < kAgingThreshold; i++) {
1365 SimulateIncrementalMarking(CcTest::heap());
1366 if (!function->next_function_link()->IsUndefined()) break;
1367 CcTest::heap()->CollectAllGarbage();
1368 }
1369
1370 // Force optimization while incremental marking is active and while
1371 // the function is enqueued as a candidate.
1372 { v8::HandleScope scope(CcTest::isolate());
1373 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1374 }
1375
1376 // Simulate one final GC to make sure the candidate queue is sane.
1377 CcTest::heap()->CollectAllGarbage();
1378 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1379 CHECK(function->is_compiled() || !function->IsOptimized());
1380}
1381
1382
1383TEST(TestCodeFlushingIncrementalScavenge) {
1384 // If we do not flush code this test is invalid.
1385 if (!FLAG_flush_code) return;
1386 i::FLAG_allow_natives_syntax = true;
1387 i::FLAG_optimize_for_size = false;
1388 CcTest::InitializeVM();
1389 Isolate* isolate = CcTest::i_isolate();
1390 Factory* factory = isolate->factory();
1391 v8::HandleScope scope(CcTest::isolate());
1392 const char* source = "var foo = function() {"
1393 " var x = 42;"
1394 " var y = 42;"
1395 " var z = x + y;"
1396 "};"
1397 "foo();"
1398 "var bar = function() {"
1399 " var x = 23;"
1400 "};"
1401 "bar();";
1402 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1403 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1404
1405 // Perfrom one initial GC to enable code flushing.
1406 CcTest::heap()->CollectAllGarbage();
1407
1408 // This compile will add the code to the compilation cache.
1409 { v8::HandleScope scope(CcTest::isolate());
1410 CompileRun(source);
1411 }
1412
1413 // Check functions are compiled.
1414 Handle<Object> func_value =
1415 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1416 CHECK(func_value->IsJSFunction());
1417 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1418 CHECK(function->shared()->is_compiled());
1419 Handle<Object> func_value2 =
1420 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1421 CHECK(func_value2->IsJSFunction());
1422 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1423 CHECK(function2->shared()->is_compiled());
1424
1425 // Clear references to functions so that one of them can die.
1426 { v8::HandleScope scope(CcTest::isolate());
1427 CompileRun("foo = 0; bar = 0;");
1428 }
1429
1430 // Bump the code age so that flushing is triggered while the function
1431 // object is still located in new-space.
1432 const int kAgingThreshold = 6;
1433 for (int i = 0; i < kAgingThreshold; i++) {
1434 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1435 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1436 }
1437
1438 // Simulate incremental marking so that the functions are enqueued as
1439 // code flushing candidates. Then kill one of the functions. Finally
1440 // perform a scavenge while incremental marking is still running.
1441 SimulateIncrementalMarking(CcTest::heap());
1442 *function2.location() = NULL;
1443 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1444
1445 // Simulate one final GC to make sure the candidate queue is sane.
1446 CcTest::heap()->CollectAllGarbage();
1447 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1448 CHECK(!function->is_compiled() || function->IsOptimized());
1449}
1450
1451
1452TEST(TestCodeFlushingIncrementalAbort) {
1453 // If we do not flush code this test is invalid.
1454 if (!FLAG_flush_code) return;
1455 i::FLAG_allow_natives_syntax = true;
1456 i::FLAG_optimize_for_size = false;
1457 CcTest::InitializeVM();
1458 Isolate* isolate = CcTest::i_isolate();
1459 Factory* factory = isolate->factory();
1460 Heap* heap = isolate->heap();
1461 v8::HandleScope scope(CcTest::isolate());
1462 const char* source = "function foo() {"
1463 " var x = 42;"
1464 " var y = 42;"
1465 " var z = x + y;"
1466 "};"
1467 "foo()";
1468 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1469
1470 // This compile will add the code to the compilation cache.
1471 { v8::HandleScope scope(CcTest::isolate());
1472 CompileRun(source);
1473 }
1474
1475 // Check function is compiled.
1476 Handle<Object> func_value =
1477 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1478 CHECK(func_value->IsJSFunction());
1479 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1480 CHECK(function->shared()->is_compiled());
1481
1482 // The code will survive at least two GCs.
1483 heap->CollectAllGarbage();
1484 heap->CollectAllGarbage();
1485 CHECK(function->shared()->is_compiled());
1486
1487 // Bump the code age so that flushing is triggered.
1488 const int kAgingThreshold = 6;
1489 for (int i = 0; i < kAgingThreshold; i++) {
1490 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1491 }
1492
1493 // Simulate incremental marking so that the function is enqueued as
1494 // code flushing candidate.
1495 SimulateIncrementalMarking(heap);
1496
1497 // Enable the debugger and add a breakpoint while incremental marking
1498 // is running so that incremental marking aborts and code flushing is
1499 // disabled.
1500 int position = 0;
1501 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1502 EnableDebugger(CcTest::isolate());
1503 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1504 isolate->debug()->ClearAllBreakPoints();
1505 DisableDebugger(CcTest::isolate());
1506
1507 // Force optimization now that code flushing is disabled.
1508 { v8::HandleScope scope(CcTest::isolate());
1509 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1510 }
1511
1512 // Simulate one final GC to make sure the candidate queue is sane.
1513 heap->CollectAllGarbage();
1514 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1515 CHECK(function->is_compiled() || !function->IsOptimized());
1516}
1517
1518
1519TEST(CompilationCacheCachingBehavior) {
1520 // If we do not flush code, or have the compilation cache turned off, this
1521 // test is invalid.
1522 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1523 return;
1524 }
1525 CcTest::InitializeVM();
1526 Isolate* isolate = CcTest::i_isolate();
1527 Factory* factory = isolate->factory();
1528 Heap* heap = isolate->heap();
1529 CompilationCache* compilation_cache = isolate->compilation_cache();
1530 LanguageMode language_mode =
1531 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1532
1533 v8::HandleScope scope(CcTest::isolate());
1534 const char* raw_source =
1535 "function foo() {"
1536 " var x = 42;"
1537 " var y = 42;"
1538 " var z = x + y;"
1539 "};"
1540 "foo()";
1541 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1542 Handle<Context> native_context = isolate->native_context();
1543
1544 {
1545 v8::HandleScope scope(CcTest::isolate());
1546 CompileRun(raw_source);
1547 }
1548
1549 // On first compilation, only a hash is inserted in the code cache. We can't
1550 // find that value.
1551 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1552 source, Handle<Object>(), 0, 0,
1553 v8::ScriptOriginOptions(false, true, false), native_context,
1554 language_mode);
1555 CHECK(info.is_null());
1556
1557 {
1558 v8::HandleScope scope(CcTest::isolate());
1559 CompileRun(raw_source);
1560 }
1561
1562 // On second compilation, the hash is replaced by a real cache entry mapping
1563 // the source to the shared function info containing the code.
1564 info = compilation_cache->LookupScript(
1565 source, Handle<Object>(), 0, 0,
1566 v8::ScriptOriginOptions(false, true, false), native_context,
1567 language_mode);
1568 CHECK(!info.is_null());
1569
1570 // Check that the code cache entry survives at least on GC.
1571 // (Unless --optimize-for-size, in which case it might get collected
1572 // immediately.)
1573 if (!FLAG_optimize_for_size) {
1574 heap->CollectAllGarbage();
1575 info = compilation_cache->LookupScript(
1576 source, Handle<Object>(), 0, 0,
1577 v8::ScriptOriginOptions(false, true, false), native_context,
1578 language_mode);
1579 CHECK(!info.is_null());
1580 }
1581
1582 // Progress code age until it's old and ready for GC.
1583 while (!info.ToHandleChecked()->code()->IsOld()) {
1584 // To guarantee progress, we have to MakeOlder with different parities.
1585 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
1586 // always NO_MARKING_PARITY and the code age only progresses if the parity
1587 // is different.
1588 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
1589 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
1590 }
1591
1592 heap->CollectAllGarbage();
1593 // Ensure code aging cleared the entry from the cache.
1594 info = compilation_cache->LookupScript(
1595 source, Handle<Object>(), 0, 0,
1596 v8::ScriptOriginOptions(false, true, false), native_context,
1597 language_mode);
1598 CHECK(info.is_null());
1599
1600 {
1601 v8::HandleScope scope(CcTest::isolate());
1602 CompileRun(raw_source);
1603 }
1604
1605 // On first compilation, only a hash is inserted in the code cache. We can't
1606 // find that value.
1607 info = compilation_cache->LookupScript(
1608 source, Handle<Object>(), 0, 0,
1609 v8::ScriptOriginOptions(false, true, false), native_context,
1610 language_mode);
1611 CHECK(info.is_null());
1612
1613 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1614 compilation_cache->MarkCompactPrologue();
1615 }
1616
1617 {
1618 v8::HandleScope scope(CcTest::isolate());
1619 CompileRun(raw_source);
1620 }
1621
1622 // If we aged the cache before caching the script, ensure that we didn't cache
1623 // on next compilation.
1624 info = compilation_cache->LookupScript(
1625 source, Handle<Object>(), 0, 0,
1626 v8::ScriptOriginOptions(false, true, false), native_context,
1627 language_mode);
1628 CHECK(info.is_null());
1629}
1630
1631
1632static void OptimizeEmptyFunction(const char* name) {
1633 HandleScope scope(CcTest::i_isolate());
1634 EmbeddedVector<char, 256> source;
1635 SNPrintF(source,
1636 "function %s() { return 0; }"
1637 "%s(); %s();"
1638 "%%OptimizeFunctionOnNextCall(%s);"
1639 "%s();",
1640 name, name, name, name, name);
1641 CompileRun(source.start());
1642}
1643
1644
1645// Count the number of native contexts in the weak list of native contexts.
1646int CountNativeContexts() {
1647 int count = 0;
1648 Object* object = CcTest::heap()->native_contexts_list();
1649 while (!object->IsUndefined()) {
1650 count++;
1651 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1652 }
1653 return count;
1654}
1655
1656
1657// Count the number of user functions in the weak list of optimized
1658// functions attached to a native context.
1659static int CountOptimizedUserFunctions(v8::Local<v8::Context> context) {
1660 int count = 0;
1661 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1662 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1663 while (object->IsJSFunction() &&
1664 !JSFunction::cast(object)->shared()->IsBuiltin()) {
1665 count++;
1666 object = JSFunction::cast(object)->next_function_link();
1667 }
1668 return count;
1669}
1670
1671
1672TEST(TestInternalWeakLists) {
1673 FLAG_always_opt = false;
1674 FLAG_allow_natives_syntax = true;
1675 v8::V8::Initialize();
1676
1677 // Some flags turn Scavenge collections into Mark-sweep collections
1678 // and hence are incompatible with this test case.
1679 if (FLAG_gc_global || FLAG_stress_compaction) return;
1680 FLAG_retain_maps_for_n_gc = 0;
1681
1682 static const int kNumTestContexts = 10;
1683
1684 Isolate* isolate = CcTest::i_isolate();
1685 Heap* heap = isolate->heap();
1686 HandleScope scope(isolate);
1687 v8::Local<v8::Context> ctx[kNumTestContexts];
1688 if (!isolate->use_crankshaft()) return;
1689
1690 CHECK_EQ(0, CountNativeContexts());
1691
1692 // Create a number of global contests which gets linked together.
1693 for (int i = 0; i < kNumTestContexts; i++) {
1694 ctx[i] = v8::Context::New(CcTest::isolate());
1695
1696 // Collect garbage that might have been created by one of the
1697 // installed extensions.
1698 isolate->compilation_cache()->Clear();
1699 heap->CollectAllGarbage();
1700
1701 CHECK_EQ(i + 1, CountNativeContexts());
1702
1703 ctx[i]->Enter();
1704
1705 // Create a handle scope so no function objects get stuck in the outer
1706 // handle scope.
1707 HandleScope scope(isolate);
1708 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1709 OptimizeEmptyFunction("f1");
1710 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1711 OptimizeEmptyFunction("f2");
1712 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1713 OptimizeEmptyFunction("f3");
1714 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1715 OptimizeEmptyFunction("f4");
1716 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1717 OptimizeEmptyFunction("f5");
1718 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1719
1720 // Remove function f1, and
1721 CompileRun("f1=null");
1722
1723 // Scavenge treats these references as strong.
1724 for (int j = 0; j < 10; j++) {
1725 CcTest::heap()->CollectGarbage(NEW_SPACE);
1726 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1727 }
1728
1729 // Mark compact handles the weak references.
1730 isolate->compilation_cache()->Clear();
1731 heap->CollectAllGarbage();
1732 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1733
1734 // Get rid of f3 and f5 in the same way.
1735 CompileRun("f3=null");
1736 for (int j = 0; j < 10; j++) {
1737 CcTest::heap()->CollectGarbage(NEW_SPACE);
1738 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1739 }
1740 CcTest::heap()->CollectAllGarbage();
1741 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1742 CompileRun("f5=null");
1743 for (int j = 0; j < 10; j++) {
1744 CcTest::heap()->CollectGarbage(NEW_SPACE);
1745 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1746 }
1747 CcTest::heap()->CollectAllGarbage();
1748 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1749
1750 ctx[i]->Exit();
1751 }
1752
1753 // Force compilation cache cleanup.
1754 CcTest::heap()->NotifyContextDisposed(true);
1755 CcTest::heap()->CollectAllGarbage();
1756
1757 // Dispose the native contexts one by one.
1758 for (int i = 0; i < kNumTestContexts; i++) {
1759 // TODO(dcarney): is there a better way to do this?
1760 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1761 *unsafe = CcTest::heap()->undefined_value();
1762 ctx[i].Clear();
1763
1764 // Scavenge treats these references as strong.
1765 for (int j = 0; j < 10; j++) {
1766 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1767 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1768 }
1769
1770 // Mark compact handles the weak references.
1771 CcTest::heap()->CollectAllGarbage();
1772 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1773 }
1774
1775 CHECK_EQ(0, CountNativeContexts());
1776}
1777
1778
1779// Count the number of native contexts in the weak list of native contexts
1780// causing a GC after the specified number of elements.
1781static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1782 Heap* heap = isolate->heap();
1783 int count = 0;
1784 Handle<Object> object(heap->native_contexts_list(), isolate);
1785 while (!object->IsUndefined()) {
1786 count++;
1787 if (count == n) heap->CollectAllGarbage();
1788 object =
1789 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1790 isolate);
1791 }
1792 return count;
1793}
1794
1795
1796// Count the number of user functions in the weak list of optimized
1797// functions attached to a native context causing a GC after the
1798// specified number of elements.
1799static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1800 int n) {
1801 int count = 0;
1802 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1803 Isolate* isolate = icontext->GetIsolate();
1804 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1805 isolate);
1806 while (object->IsJSFunction() &&
1807 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1808 count++;
1809 if (count == n) isolate->heap()->CollectAllGarbage();
1810 object = Handle<Object>(
1811 Object::cast(JSFunction::cast(*object)->next_function_link()),
1812 isolate);
1813 }
1814 return count;
1815}
1816
1817
1818TEST(TestInternalWeakListsTraverseWithGC) {
1819 FLAG_always_opt = false;
1820 FLAG_allow_natives_syntax = true;
1821 v8::V8::Initialize();
1822
1823 static const int kNumTestContexts = 10;
1824
1825 Isolate* isolate = CcTest::i_isolate();
1826 HandleScope scope(isolate);
1827 v8::Local<v8::Context> ctx[kNumTestContexts];
1828 if (!isolate->use_crankshaft()) return;
1829
1830 CHECK_EQ(0, CountNativeContexts());
1831
1832 // Create an number of contexts and check the length of the weak list both
1833 // with and without GCs while iterating the list.
1834 for (int i = 0; i < kNumTestContexts; i++) {
1835 ctx[i] = v8::Context::New(CcTest::isolate());
1836 CHECK_EQ(i + 1, CountNativeContexts());
1837 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1838 }
1839
1840 ctx[0]->Enter();
1841
1842 // Compile a number of functions the length of the weak list of optimized
1843 // functions both with and without GCs while iterating the list.
1844 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1845 OptimizeEmptyFunction("f1");
1846 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1847 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1848 OptimizeEmptyFunction("f2");
1849 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1850 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1851 OptimizeEmptyFunction("f3");
1852 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1853 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1854 OptimizeEmptyFunction("f4");
1855 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1856 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1857 OptimizeEmptyFunction("f5");
1858 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1859 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1860
1861 ctx[0]->Exit();
1862}
1863
1864
1865TEST(TestSizeOfRegExpCode) {
1866 if (!FLAG_regexp_optimization) return;
1867
1868 v8::V8::Initialize();
1869
1870 Isolate* isolate = CcTest::i_isolate();
1871 HandleScope scope(isolate);
1872
1873 LocalContext context;
1874
1875 // Adjust source below and this check to match
1876 // RegExpImple::kRegExpTooLargeToOptimize.
1877 CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1878
1879 // Compile a regexp that is much larger if we are using regexp optimizations.
1880 CompileRun(
1881 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1882 "var half_size_reg_exp;"
1883 "while (reg_exp_source.length < 20 * 1024) {"
1884 " half_size_reg_exp = reg_exp_source;"
1885 " reg_exp_source = reg_exp_source + reg_exp_source;"
1886 "}"
1887 // Flatten string.
1888 "reg_exp_source.match(/f/);");
1889
1890 // Get initial heap size after several full GCs, which will stabilize
1891 // the heap size and return with sweeping finished completely.
1892 CcTest::heap()->CollectAllGarbage();
1893 CcTest::heap()->CollectAllGarbage();
1894 CcTest::heap()->CollectAllGarbage();
1895 CcTest::heap()->CollectAllGarbage();
1896 CcTest::heap()->CollectAllGarbage();
1897 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1898 if (collector->sweeping_in_progress()) {
1899 collector->EnsureSweepingCompleted();
1900 }
1901 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1902
1903 CompileRun("'foo'.match(reg_exp_source);");
1904 CcTest::heap()->CollectAllGarbage();
1905 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1906
1907 CompileRun("'foo'.match(half_size_reg_exp);");
1908 CcTest::heap()->CollectAllGarbage();
1909 int size_with_optimized_regexp =
1910 static_cast<int>(CcTest::heap()->SizeOfObjects());
1911
1912 int size_of_regexp_code = size_with_regexp - initial_size;
1913
1914 // On some platforms the debug-code flag causes huge amounts of regexp code
1915 // to be emitted, breaking this test.
1916 if (!FLAG_debug_code) {
1917 CHECK_LE(size_of_regexp_code, 1 * MB);
1918 }
1919
1920 // Small regexp is half the size, but compiles to more than twice the code
1921 // due to the optimization steps.
1922 CHECK_GE(size_with_optimized_regexp,
1923 size_with_regexp + size_of_regexp_code * 2);
1924}
1925
1926
1927HEAP_TEST(TestSizeOfObjects) {
1928 v8::V8::Initialize();
1929
1930 // Get initial heap size after several full GCs, which will stabilize
1931 // the heap size and return with sweeping finished completely.
1932 CcTest::heap()->CollectAllGarbage();
1933 CcTest::heap()->CollectAllGarbage();
1934 CcTest::heap()->CollectAllGarbage();
1935 CcTest::heap()->CollectAllGarbage();
1936 CcTest::heap()->CollectAllGarbage();
1937 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1938 if (collector->sweeping_in_progress()) {
1939 collector->EnsureSweepingCompleted();
1940 }
1941 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1942
1943 {
1944 // Allocate objects on several different old-space pages so that
1945 // concurrent sweeper threads will be busy sweeping the old space on
1946 // subsequent GC runs.
1947 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1948 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1949 for (int i = 1; i <= 100; i++) {
1950 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1951 CHECK_EQ(initial_size + i * filler_size,
1952 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1953 }
1954 }
1955
1956 // The heap size should go back to initial size after a full GC, even
1957 // though sweeping didn't finish yet.
1958 CcTest::heap()->CollectAllGarbage();
1959
1960 // Normally sweeping would not be complete here, but no guarantees.
1961
1962 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1963
1964 // Waiting for sweeper threads should not change heap size.
1965 if (collector->sweeping_in_progress()) {
1966 collector->EnsureSweepingCompleted();
1967 }
1968 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1969}
1970
1971
1972TEST(TestAlignmentCalculations) {
1973 // Maximum fill amounts are consistent.
1974 int maximum_double_misalignment = kDoubleSize - kPointerSize;
1975 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
1976 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1977 CHECK_EQ(0, max_word_fill);
1978 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1979 CHECK_EQ(maximum_double_misalignment, max_double_fill);
1980 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1981 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1982 int max_simd128_unaligned_fill =
1983 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
1984 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
1985
1986 Address base = static_cast<Address>(NULL);
1987 int fill = 0;
1988
1989 // Word alignment never requires fill.
1990 fill = Heap::GetFillToAlign(base, kWordAligned);
1991 CHECK_EQ(0, fill);
1992 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
1993 CHECK_EQ(0, fill);
1994
1995 // No fill is required when address is double aligned.
1996 fill = Heap::GetFillToAlign(base, kDoubleAligned);
1997 CHECK_EQ(0, fill);
1998 // Fill is required if address is not double aligned.
1999 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
2000 CHECK_EQ(maximum_double_misalignment, fill);
2001 // kDoubleUnaligned has the opposite fill amounts.
2002 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2003 CHECK_EQ(maximum_double_misalignment, fill);
2004 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2005 CHECK_EQ(0, fill);
2006
2007 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2008 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2009 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2010 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2011 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2012 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2013 CHECK_EQ(kPointerSize, fill);
2014 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2015 CHECK_EQ(0, fill);
2016}
2017
2018
2019static HeapObject* NewSpaceAllocateAligned(int size,
2020 AllocationAlignment alignment) {
2021 Heap* heap = CcTest::heap();
2022 AllocationResult allocation =
2023 heap->new_space()->AllocateRawAligned(size, alignment);
2024 HeapObject* obj = NULL;
2025 allocation.To(&obj);
2026 heap->CreateFillerObjectAt(obj->address(), size);
2027 return obj;
2028}
2029
2030
2031// Get new space allocation into the desired alignment.
2032static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2033 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2034 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2035 if (fill) {
2036 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2037 }
2038 return *top_addr;
2039}
2040
2041
2042TEST(TestAlignedAllocation) {
2043 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2044 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2045 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2046 Address start;
2047 HeapObject* obj;
2048 HeapObject* filler;
2049 if (double_misalignment) {
2050 // Allocate a pointer sized object that must be double aligned at an
2051 // aligned address.
2052 start = AlignNewSpace(kDoubleAligned, 0);
2053 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2054 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2055 // There is no filler.
2056 CHECK_EQ(kPointerSize, *top_addr - start);
2057
2058 // Allocate a second pointer sized object that must be double aligned at an
2059 // unaligned address.
2060 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2061 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2062 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2063 // There is a filler object before the object.
2064 filler = HeapObject::FromAddress(start);
2065 CHECK(obj != filler && filler->IsFiller() &&
2066 filler->Size() == kPointerSize);
2067 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2068
2069 // Similarly for kDoubleUnaligned.
2070 start = AlignNewSpace(kDoubleUnaligned, 0);
2071 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2072 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2073 CHECK_EQ(kPointerSize, *top_addr - start);
2074 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2075 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2076 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2077 // There is a filler object before the object.
2078 filler = HeapObject::FromAddress(start);
2079 CHECK(obj != filler && filler->IsFiller() &&
2080 filler->Size() == kPointerSize);
2081 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2082 }
2083
2084 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2085 // on platform.
2086 start = AlignNewSpace(kSimd128Unaligned, 0);
2087 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2088 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2089 // There is no filler.
2090 CHECK_EQ(kPointerSize, *top_addr - start);
2091 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2092 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2093 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2094 // There is a filler object before the object.
2095 filler = HeapObject::FromAddress(start);
2096 CHECK(obj != filler && filler->IsFiller() &&
2097 filler->Size() == kSimd128Size - kPointerSize);
2098 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2099
2100 if (double_misalignment) {
2101 // Test the 2 other alignments possible on 32 bit platforms.
2102 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2103 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2104 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2105 // There is a filler object before the object.
2106 filler = HeapObject::FromAddress(start);
2107 CHECK(obj != filler && filler->IsFiller() &&
2108 filler->Size() == 2 * kPointerSize);
2109 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2110 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2111 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2112 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2113 // There is a filler object before the object.
2114 filler = HeapObject::FromAddress(start);
2115 CHECK(obj != filler && filler->IsFiller() &&
2116 filler->Size() == kPointerSize);
2117 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2118 }
2119}
2120
2121
2122static HeapObject* OldSpaceAllocateAligned(int size,
2123 AllocationAlignment alignment) {
2124 Heap* heap = CcTest::heap();
2125 AllocationResult allocation =
2126 heap->old_space()->AllocateRawAligned(size, alignment);
2127 HeapObject* obj = NULL;
2128 allocation.To(&obj);
2129 heap->CreateFillerObjectAt(obj->address(), size);
2130 return obj;
2131}
2132
2133
2134// Get old space allocation into the desired alignment.
2135static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2136 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2137 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2138 int allocation = fill + offset;
2139 if (allocation) {
2140 OldSpaceAllocateAligned(allocation, kWordAligned);
2141 }
2142 Address top = *top_addr;
2143 // Now force the remaining allocation onto the free list.
2144 CcTest::heap()->old_space()->EmptyAllocationInfo();
2145 return top;
2146}
2147
2148
2149// Test the case where allocation must be done from the free list, so filler
2150// may precede or follow the object.
2151TEST(TestAlignedOverAllocation) {
2152 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2153 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2154 Address start;
2155 HeapObject* obj;
2156 HeapObject* filler1;
2157 HeapObject* filler2;
2158 if (double_misalignment) {
2159 start = AlignOldSpace(kDoubleAligned, 0);
2160 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2161 // The object is aligned, and a filler object is created after.
2162 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2163 filler1 = HeapObject::FromAddress(start + kPointerSize);
2164 CHECK(obj != filler1 && filler1->IsFiller() &&
2165 filler1->Size() == kPointerSize);
2166 // Try the opposite alignment case.
2167 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2168 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2169 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2170 filler1 = HeapObject::FromAddress(start);
2171 CHECK(obj != filler1);
2172 CHECK(filler1->IsFiller());
2173 CHECK(filler1->Size() == kPointerSize);
2174 CHECK(obj != filler1 && filler1->IsFiller() &&
2175 filler1->Size() == kPointerSize);
2176
2177 // Similarly for kDoubleUnaligned.
2178 start = AlignOldSpace(kDoubleUnaligned, 0);
2179 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2180 // The object is aligned, and a filler object is created after.
2181 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2182 filler1 = HeapObject::FromAddress(start + kPointerSize);
2183 CHECK(obj != filler1 && filler1->IsFiller() &&
2184 filler1->Size() == kPointerSize);
2185 // Try the opposite alignment case.
2186 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2187 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2188 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2189 filler1 = HeapObject::FromAddress(start);
2190 CHECK(obj != filler1 && filler1->IsFiller() &&
2191 filler1->Size() == kPointerSize);
2192 }
2193
2194 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2195 // on platform.
2196 start = AlignOldSpace(kSimd128Unaligned, 0);
2197 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2198 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2199 // There is a filler object after the object.
2200 filler1 = HeapObject::FromAddress(start + kPointerSize);
2201 CHECK(obj != filler1 && filler1->IsFiller() &&
2202 filler1->Size() == kSimd128Size - kPointerSize);
2203 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2204 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2205 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2206 // There is a filler object before the object.
2207 filler1 = HeapObject::FromAddress(start);
2208 CHECK(obj != filler1 && filler1->IsFiller() &&
2209 filler1->Size() == kSimd128Size - kPointerSize);
2210
2211 if (double_misalignment) {
2212 // Test the 2 other alignments possible on 32 bit platforms.
2213 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2214 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2215 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2216 // There are filler objects before and after the object.
2217 filler1 = HeapObject::FromAddress(start);
2218 CHECK(obj != filler1 && filler1->IsFiller() &&
2219 filler1->Size() == 2 * kPointerSize);
2220 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2221 CHECK(obj != filler2 && filler2->IsFiller() &&
2222 filler2->Size() == kPointerSize);
2223 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2224 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2225 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2226 // There are filler objects before and after the object.
2227 filler1 = HeapObject::FromAddress(start);
2228 CHECK(obj != filler1 && filler1->IsFiller() &&
2229 filler1->Size() == kPointerSize);
2230 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2231 CHECK(obj != filler2 && filler2->IsFiller() &&
2232 filler2->Size() == 2 * kPointerSize);
2233 }
2234}
2235
2236
2237TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2238 CcTest::InitializeVM();
2239 HeapIterator iterator(CcTest::heap());
2240 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2241 intptr_t size_of_objects_2 = 0;
2242 for (HeapObject* obj = iterator.next();
2243 obj != NULL;
2244 obj = iterator.next()) {
2245 if (!obj->IsFreeSpace()) {
2246 size_of_objects_2 += obj->Size();
2247 }
2248 }
2249 // Delta must be within 5% of the larger result.
2250 // TODO(gc): Tighten this up by distinguishing between byte
2251 // arrays that are real and those that merely mark free space
2252 // on the heap.
2253 if (size_of_objects_1 > size_of_objects_2) {
2254 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2255 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2256 "Iterator: %" V8_PTR_PREFIX "d, "
2257 "delta: %" V8_PTR_PREFIX "d\n",
2258 size_of_objects_1, size_of_objects_2, delta);
2259 CHECK_GT(size_of_objects_1 / 20, delta);
2260 } else {
2261 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2262 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2263 "Iterator: %" V8_PTR_PREFIX "d, "
2264 "delta: %" V8_PTR_PREFIX "d\n",
2265 size_of_objects_1, size_of_objects_2, delta);
2266 CHECK_GT(size_of_objects_2 / 20, delta);
2267 }
2268}
2269
2270
2271static void FillUpNewSpace(NewSpace* new_space) {
2272 // Fill up new space to the point that it is completely full. Make sure
2273 // that the scavenger does not undo the filling.
2274 Heap* heap = new_space->heap();
2275 Isolate* isolate = heap->isolate();
2276 Factory* factory = isolate->factory();
2277 HandleScope scope(isolate);
2278 AlwaysAllocateScope always_allocate(isolate);
2279 intptr_t available = new_space->Capacity() - new_space->Size();
2280 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2281 for (intptr_t i = 0; i < number_of_fillers; i++) {
2282 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2283 }
2284}
2285
2286
2287TEST(GrowAndShrinkNewSpace) {
2288 CcTest::InitializeVM();
2289 Heap* heap = CcTest::heap();
2290 NewSpace* new_space = heap->new_space();
2291
2292 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2293 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2294 // The max size cannot exceed the reserved size, since semispaces must be
2295 // always within the reserved space. We can't test new space growing and
2296 // shrinking if the reserved size is the same as the minimum (initial) size.
2297 return;
2298 }
2299
2300 // Explicitly growing should double the space capacity.
2301 intptr_t old_capacity, new_capacity;
2302 old_capacity = new_space->TotalCapacity();
2303 new_space->Grow();
2304 new_capacity = new_space->TotalCapacity();
2305 CHECK(2 * old_capacity == new_capacity);
2306
2307 old_capacity = new_space->TotalCapacity();
2308 FillUpNewSpace(new_space);
2309 new_capacity = new_space->TotalCapacity();
2310 CHECK(old_capacity == new_capacity);
2311
2312 // Explicitly shrinking should not affect space capacity.
2313 old_capacity = new_space->TotalCapacity();
2314 new_space->Shrink();
2315 new_capacity = new_space->TotalCapacity();
2316 CHECK(old_capacity == new_capacity);
2317
2318 // Let the scavenger empty the new space.
2319 heap->CollectGarbage(NEW_SPACE);
2320 CHECK_LE(new_space->Size(), old_capacity);
2321
2322 // Explicitly shrinking should halve the space capacity.
2323 old_capacity = new_space->TotalCapacity();
2324 new_space->Shrink();
2325 new_capacity = new_space->TotalCapacity();
2326 CHECK(old_capacity == 2 * new_capacity);
2327
2328 // Consecutive shrinking should not affect space capacity.
2329 old_capacity = new_space->TotalCapacity();
2330 new_space->Shrink();
2331 new_space->Shrink();
2332 new_space->Shrink();
2333 new_capacity = new_space->TotalCapacity();
2334 CHECK(old_capacity == new_capacity);
2335}
2336
2337
2338TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2339 CcTest::InitializeVM();
2340 Heap* heap = CcTest::heap();
2341 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2342 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2343 // The max size cannot exceed the reserved size, since semispaces must be
2344 // always within the reserved space. We can't test new space growing and
2345 // shrinking if the reserved size is the same as the minimum (initial) size.
2346 return;
2347 }
2348
2349 v8::HandleScope scope(CcTest::isolate());
2350 NewSpace* new_space = heap->new_space();
2351 intptr_t old_capacity, new_capacity;
2352 old_capacity = new_space->TotalCapacity();
2353 new_space->Grow();
2354 new_capacity = new_space->TotalCapacity();
2355 CHECK(2 * old_capacity == new_capacity);
2356 FillUpNewSpace(new_space);
2357 heap->CollectAllAvailableGarbage();
2358 new_capacity = new_space->TotalCapacity();
2359 CHECK(old_capacity == new_capacity);
2360}
2361
2362
2363static int NumberOfGlobalObjects() {
2364 int count = 0;
2365 HeapIterator iterator(CcTest::heap());
2366 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2367 if (obj->IsJSGlobalObject()) count++;
2368 }
2369 return count;
2370}
2371
2372
2373// Test that we don't embed maps from foreign contexts into
2374// optimized code.
2375TEST(LeakNativeContextViaMap) {
2376 i::FLAG_allow_natives_syntax = true;
2377 v8::Isolate* isolate = CcTest::isolate();
2378 v8::HandleScope outer_scope(isolate);
2379 v8::Persistent<v8::Context> ctx1p;
2380 v8::Persistent<v8::Context> ctx2p;
2381 {
2382 v8::HandleScope scope(isolate);
2383 ctx1p.Reset(isolate, v8::Context::New(isolate));
2384 ctx2p.Reset(isolate, v8::Context::New(isolate));
2385 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2386 }
2387
2388 CcTest::heap()->CollectAllAvailableGarbage();
2389 CHECK_EQ(2, NumberOfGlobalObjects());
2390
2391 {
2392 v8::HandleScope inner_scope(isolate);
2393 CompileRun("var v = {x: 42}");
2394 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2395 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2396 v8::Local<v8::Value> v =
2397 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2398 ctx2->Enter();
2399 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2400 v8::Local<v8::Value> res = CompileRun(
2401 "function f() { return o.x; }"
2402 "for (var i = 0; i < 10; ++i) f();"
2403 "%OptimizeFunctionOnNextCall(f);"
2404 "f();");
2405 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2406 CHECK(ctx2->Global()
2407 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2408 .FromJust());
2409 ctx2->Exit();
2410 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2411 ctx1p.Reset();
2412 isolate->ContextDisposedNotification();
2413 }
2414 CcTest::heap()->CollectAllAvailableGarbage();
2415 CHECK_EQ(1, NumberOfGlobalObjects());
2416 ctx2p.Reset();
2417 CcTest::heap()->CollectAllAvailableGarbage();
2418 CHECK_EQ(0, NumberOfGlobalObjects());
2419}
2420
2421
2422// Test that we don't embed functions from foreign contexts into
2423// optimized code.
2424TEST(LeakNativeContextViaFunction) {
2425 i::FLAG_allow_natives_syntax = true;
2426 v8::Isolate* isolate = CcTest::isolate();
2427 v8::HandleScope outer_scope(isolate);
2428 v8::Persistent<v8::Context> ctx1p;
2429 v8::Persistent<v8::Context> ctx2p;
2430 {
2431 v8::HandleScope scope(isolate);
2432 ctx1p.Reset(isolate, v8::Context::New(isolate));
2433 ctx2p.Reset(isolate, v8::Context::New(isolate));
2434 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2435 }
2436
2437 CcTest::heap()->CollectAllAvailableGarbage();
2438 CHECK_EQ(2, NumberOfGlobalObjects());
2439
2440 {
2441 v8::HandleScope inner_scope(isolate);
2442 CompileRun("var v = function() { return 42; }");
2443 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2444 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2445 v8::Local<v8::Value> v =
2446 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2447 ctx2->Enter();
2448 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2449 v8::Local<v8::Value> res = CompileRun(
2450 "function f(x) { return x(); }"
2451 "for (var i = 0; i < 10; ++i) f(o);"
2452 "%OptimizeFunctionOnNextCall(f);"
2453 "f(o);");
2454 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2455 CHECK(ctx2->Global()
2456 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2457 .FromJust());
2458 ctx2->Exit();
2459 ctx1->Exit();
2460 ctx1p.Reset();
2461 isolate->ContextDisposedNotification();
2462 }
2463 CcTest::heap()->CollectAllAvailableGarbage();
2464 CHECK_EQ(1, NumberOfGlobalObjects());
2465 ctx2p.Reset();
2466 CcTest::heap()->CollectAllAvailableGarbage();
2467 CHECK_EQ(0, NumberOfGlobalObjects());
2468}
2469
2470
2471TEST(LeakNativeContextViaMapKeyed) {
2472 i::FLAG_allow_natives_syntax = true;
2473 v8::Isolate* isolate = CcTest::isolate();
2474 v8::HandleScope outer_scope(isolate);
2475 v8::Persistent<v8::Context> ctx1p;
2476 v8::Persistent<v8::Context> ctx2p;
2477 {
2478 v8::HandleScope scope(isolate);
2479 ctx1p.Reset(isolate, v8::Context::New(isolate));
2480 ctx2p.Reset(isolate, v8::Context::New(isolate));
2481 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2482 }
2483
2484 CcTest::heap()->CollectAllAvailableGarbage();
2485 CHECK_EQ(2, NumberOfGlobalObjects());
2486
2487 {
2488 v8::HandleScope inner_scope(isolate);
2489 CompileRun("var v = [42, 43]");
2490 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2491 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2492 v8::Local<v8::Value> v =
2493 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2494 ctx2->Enter();
2495 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2496 v8::Local<v8::Value> res = CompileRun(
2497 "function f() { return o[0]; }"
2498 "for (var i = 0; i < 10; ++i) f();"
2499 "%OptimizeFunctionOnNextCall(f);"
2500 "f();");
2501 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2502 CHECK(ctx2->Global()
2503 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2504 .FromJust());
2505 ctx2->Exit();
2506 ctx1->Exit();
2507 ctx1p.Reset();
2508 isolate->ContextDisposedNotification();
2509 }
2510 CcTest::heap()->CollectAllAvailableGarbage();
2511 CHECK_EQ(1, NumberOfGlobalObjects());
2512 ctx2p.Reset();
2513 CcTest::heap()->CollectAllAvailableGarbage();
2514 CHECK_EQ(0, NumberOfGlobalObjects());
2515}
2516
2517
2518TEST(LeakNativeContextViaMapProto) {
2519 i::FLAG_allow_natives_syntax = true;
2520 v8::Isolate* isolate = CcTest::isolate();
2521 v8::HandleScope outer_scope(isolate);
2522 v8::Persistent<v8::Context> ctx1p;
2523 v8::Persistent<v8::Context> ctx2p;
2524 {
2525 v8::HandleScope scope(isolate);
2526 ctx1p.Reset(isolate, v8::Context::New(isolate));
2527 ctx2p.Reset(isolate, v8::Context::New(isolate));
2528 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2529 }
2530
2531 CcTest::heap()->CollectAllAvailableGarbage();
2532 CHECK_EQ(2, NumberOfGlobalObjects());
2533
2534 {
2535 v8::HandleScope inner_scope(isolate);
2536 CompileRun("var v = { y: 42}");
2537 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2538 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2539 v8::Local<v8::Value> v =
2540 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2541 ctx2->Enter();
2542 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2543 v8::Local<v8::Value> res = CompileRun(
2544 "function f() {"
2545 " var p = {x: 42};"
2546 " p.__proto__ = o;"
2547 " return p.x;"
2548 "}"
2549 "for (var i = 0; i < 10; ++i) f();"
2550 "%OptimizeFunctionOnNextCall(f);"
2551 "f();");
2552 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2553 CHECK(ctx2->Global()
2554 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2555 .FromJust());
2556 ctx2->Exit();
2557 ctx1->Exit();
2558 ctx1p.Reset();
2559 isolate->ContextDisposedNotification();
2560 }
2561 CcTest::heap()->CollectAllAvailableGarbage();
2562 CHECK_EQ(1, NumberOfGlobalObjects());
2563 ctx2p.Reset();
2564 CcTest::heap()->CollectAllAvailableGarbage();
2565 CHECK_EQ(0, NumberOfGlobalObjects());
2566}
2567
2568
2569TEST(InstanceOfStubWriteBarrier) {
2570 i::FLAG_allow_natives_syntax = true;
2571#ifdef VERIFY_HEAP
2572 i::FLAG_verify_heap = true;
2573#endif
2574
2575 CcTest::InitializeVM();
2576 if (!CcTest::i_isolate()->use_crankshaft()) return;
2577 if (i::FLAG_force_marking_deque_overflows) return;
2578 v8::HandleScope outer_scope(CcTest::isolate());
2579 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2580
2581 {
2582 v8::HandleScope scope(CcTest::isolate());
2583 CompileRun(
2584 "function foo () { }"
2585 "function mkbar () { return new (new Function(\"\")) (); }"
2586 "function f (x) { return (x instanceof foo); }"
2587 "function g () { f(mkbar()); }"
2588 "f(new foo()); f(new foo());"
2589 "%OptimizeFunctionOnNextCall(f);"
2590 "f(new foo()); g();");
2591 }
2592
2593 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2594 marking->Stop();
2595 CcTest::heap()->StartIncrementalMarking();
2596
2597 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2598 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2599 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2600
2601 CHECK(f->IsOptimized());
2602
2603 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2604 !marking->IsStopped()) {
2605 // Discard any pending GC requests otherwise we will get GC when we enter
2606 // code below.
2607 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2608 }
2609
2610 CHECK(marking->IsMarking());
2611
2612 {
2613 v8::HandleScope scope(CcTest::isolate());
2614 v8::Local<v8::Object> global = CcTest::global();
2615 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2616 global->Get(ctx, v8_str("g")).ToLocalChecked());
2617 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2618 }
2619
2620 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2621 CcTest::heap()->CollectGarbage(OLD_SPACE);
2622}
2623
2624
2625TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2626 i::FLAG_stress_compaction = false;
2627 i::FLAG_allow_natives_syntax = true;
2628#ifdef VERIFY_HEAP
2629 i::FLAG_verify_heap = true;
2630#endif
2631
2632 CcTest::InitializeVM();
2633 if (!CcTest::i_isolate()->use_crankshaft()) return;
2634 v8::HandleScope outer_scope(CcTest::isolate());
2635 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2636
2637 {
2638 v8::HandleScope scope(CcTest::isolate());
2639 CompileRun(
2640 "function f () {"
2641 " var s = 0;"
2642 " for (var i = 0; i < 100; i++) s += i;"
2643 " return s;"
2644 "}"
2645 "f(); f();"
2646 "%OptimizeFunctionOnNextCall(f);"
2647 "f();");
2648 }
2649 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2650 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2651 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2652 CHECK(f->IsOptimized());
2653
2654 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2655 marking->Stop();
2656 CcTest::heap()->StartIncrementalMarking();
2657 // The following calls will increment CcTest::heap()->global_ic_age().
2658 CcTest::isolate()->ContextDisposedNotification();
2659 SimulateIncrementalMarking(CcTest::heap());
2660 CcTest::heap()->CollectAllGarbage();
2661 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2662 CHECK_EQ(0, f->shared()->opt_count());
2663 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2664}
2665
2666
2667TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2668 i::FLAG_stress_compaction = false;
2669 i::FLAG_allow_natives_syntax = true;
2670#ifdef VERIFY_HEAP
2671 i::FLAG_verify_heap = true;
2672#endif
2673
2674 CcTest::InitializeVM();
2675 if (!CcTest::i_isolate()->use_crankshaft()) return;
2676 v8::HandleScope outer_scope(CcTest::isolate());
2677 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2678
2679 {
2680 v8::HandleScope scope(CcTest::isolate());
2681 CompileRun(
2682 "function f () {"
2683 " var s = 0;"
2684 " for (var i = 0; i < 100; i++) s += i;"
2685 " return s;"
2686 "}"
2687 "f(); f();"
2688 "%OptimizeFunctionOnNextCall(f);"
2689 "f();");
2690 }
2691 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2692 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2693 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2694
2695 CHECK(f->IsOptimized());
2696
2697 CcTest::heap()->incremental_marking()->Stop();
2698
2699 // The following two calls will increment CcTest::heap()->global_ic_age().
2700 CcTest::isolate()->ContextDisposedNotification();
2701 CcTest::heap()->CollectAllGarbage();
2702
2703 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2704 CHECK_EQ(0, f->shared()->opt_count());
2705 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2706}
2707
2708
2709HEAP_TEST(GCFlags) {
2710 CcTest::InitializeVM();
2711 Heap* heap = CcTest::heap();
2712
2713 heap->set_current_gc_flags(Heap::kNoGCFlags);
2714 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2715
2716 // Set the flags to check whether we appropriately resets them after the GC.
2717 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2718 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2719 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2720
2721 MarkCompactCollector* collector = heap->mark_compact_collector();
2722 if (collector->sweeping_in_progress()) {
2723 collector->EnsureSweepingCompleted();
2724 }
2725
2726 IncrementalMarking* marking = heap->incremental_marking();
2727 marking->Stop();
2728 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2729 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2730
2731 heap->CollectGarbage(NEW_SPACE);
2732 // NewSpace scavenges should not overwrite the flags.
2733 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2734
2735 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2736 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2737}
2738
2739
2740TEST(IdleNotificationFinishMarking) {
2741 i::FLAG_allow_natives_syntax = true;
2742 CcTest::InitializeVM();
2743 SimulateFullSpace(CcTest::heap()->old_space());
2744 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2745 marking->Stop();
2746 CcTest::heap()->StartIncrementalMarking();
2747
2748 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2749
2750 // TODO(hpayer): We cannot write proper unit test right now for heap.
2751 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2752 // marking delay counter.
2753
2754 // Perform a huge incremental marking step but don't complete marking.
2755 intptr_t bytes_processed = 0;
2756 do {
2757 bytes_processed =
2758 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2759 IncrementalMarking::FORCE_MARKING,
2760 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2761 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2762 } while (bytes_processed);
2763
2764 // The next invocations of incremental marking are not going to complete
2765 // marking
2766 // since the completion threshold is not reached
2767 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2768 i++) {
2769 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2770 IncrementalMarking::FORCE_MARKING,
2771 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2772 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2773 }
2774
2775 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2776
2777 // The next idle notification has to finish incremental marking.
2778 const double kLongIdleTime = 1000.0;
2779 CcTest::isolate()->IdleNotificationDeadline(
2780 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2781 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2782 kLongIdleTime);
2783 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2784}
2785
2786
2787// Test that HAllocateObject will always return an object in new-space.
2788TEST(OptimizedAllocationAlwaysInNewSpace) {
2789 i::FLAG_allow_natives_syntax = true;
2790 CcTest::InitializeVM();
2791 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2792 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2793 v8::HandleScope scope(CcTest::isolate());
2794 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2795 SimulateFullSpace(CcTest::heap()->new_space());
2796 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2797 v8::Local<v8::Value> res = CompileRun(
2798 "function c(x) {"
2799 " this.x = x;"
2800 " for (var i = 0; i < 32; i++) {"
2801 " this['x' + i] = x;"
2802 " }"
2803 "}"
2804 "function f(x) { return new c(x); };"
2805 "f(1); f(2); f(3);"
2806 "%OptimizeFunctionOnNextCall(f);"
2807 "f(4);");
2808
2809 CHECK_EQ(4, res.As<v8::Object>()
2810 ->GetRealNamedProperty(ctx, v8_str("x"))
2811 .ToLocalChecked()
2812 ->Int32Value(ctx)
2813 .FromJust());
2814
2815 i::Handle<JSReceiver> o =
2816 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2817
2818 CHECK(CcTest::heap()->InNewSpace(*o));
2819}
2820
2821
2822TEST(OptimizedPretenuringAllocationFolding) {
2823 i::FLAG_allow_natives_syntax = true;
2824 i::FLAG_expose_gc = true;
2825 CcTest::InitializeVM();
2826 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2827 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2828 v8::HandleScope scope(CcTest::isolate());
2829 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2830 // Grow new space unitl maximum capacity reached.
2831 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2832 CcTest::heap()->new_space()->Grow();
2833 }
2834
2835 i::ScopedVector<char> source(1024);
2836 i::SNPrintF(
2837 source,
2838 "var number_elements = %d;"
2839 "var elements = new Array();"
2840 "function f() {"
2841 " for (var i = 0; i < number_elements; i++) {"
2842 " elements[i] = [[{}], [1.1]];"
2843 " }"
2844 " return elements[number_elements-1]"
2845 "};"
2846 "f(); gc();"
2847 "f(); f();"
2848 "%%OptimizeFunctionOnNextCall(f);"
2849 "f();",
2850 AllocationSite::kPretenureMinimumCreated);
2851
2852 v8::Local<v8::Value> res = CompileRun(source.start());
2853
2854 v8::Local<v8::Value> int_array =
2855 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2856 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2857 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2858 v8::Local<v8::Value> double_array =
2859 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2860 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2861 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2862
2863 i::Handle<JSReceiver> o =
2864 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2865 CHECK(CcTest::heap()->InOldSpace(*o));
2866 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2867 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2868 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2869 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2870}
2871
2872
2873TEST(OptimizedPretenuringObjectArrayLiterals) {
2874 i::FLAG_allow_natives_syntax = true;
2875 i::FLAG_expose_gc = true;
2876 CcTest::InitializeVM();
2877 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2878 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2879 v8::HandleScope scope(CcTest::isolate());
2880
2881 // Grow new space unitl maximum capacity reached.
2882 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2883 CcTest::heap()->new_space()->Grow();
2884 }
2885
2886 i::ScopedVector<char> source(1024);
2887 i::SNPrintF(
2888 source,
2889 "var number_elements = %d;"
2890 "var elements = new Array(number_elements);"
2891 "function f() {"
2892 " for (var i = 0; i < number_elements; i++) {"
2893 " elements[i] = [{}, {}, {}];"
2894 " }"
2895 " return elements[number_elements - 1];"
2896 "};"
2897 "f(); gc();"
2898 "f(); f();"
2899 "%%OptimizeFunctionOnNextCall(f);"
2900 "f();",
2901 AllocationSite::kPretenureMinimumCreated);
2902
2903 v8::Local<v8::Value> res = CompileRun(source.start());
2904
2905 i::Handle<JSObject> o = Handle<JSObject>::cast(
2906 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2907
2908 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2909 CHECK(CcTest::heap()->InOldSpace(*o));
2910}
2911
2912
2913TEST(OptimizedPretenuringMixedInObjectProperties) {
2914 i::FLAG_allow_natives_syntax = true;
2915 i::FLAG_expose_gc = true;
2916 CcTest::InitializeVM();
2917 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2918 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2919 v8::HandleScope scope(CcTest::isolate());
2920
2921 // Grow new space unitl maximum capacity reached.
2922 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2923 CcTest::heap()->new_space()->Grow();
2924 }
2925
2926
2927 i::ScopedVector<char> source(1024);
2928 i::SNPrintF(
2929 source,
2930 "var number_elements = %d;"
2931 "var elements = new Array(number_elements);"
2932 "function f() {"
2933 " for (var i = 0; i < number_elements; i++) {"
2934 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2935 " }"
2936 " return elements[number_elements - 1];"
2937 "};"
2938 "f(); gc();"
2939 "f(); f();"
2940 "%%OptimizeFunctionOnNextCall(f);"
2941 "f();",
2942 AllocationSite::kPretenureMinimumCreated);
2943
2944 v8::Local<v8::Value> res = CompileRun(source.start());
2945
2946 i::Handle<JSObject> o = Handle<JSObject>::cast(
2947 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2948
2949 CHECK(CcTest::heap()->InOldSpace(*o));
2950 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2951 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2952 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2953 if (!o->IsUnboxedDoubleField(idx2)) {
2954 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2955 } else {
2956 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2957 }
2958
2959 JSObject* inner_object =
2960 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2961 CHECK(CcTest::heap()->InOldSpace(inner_object));
2962 if (!inner_object->IsUnboxedDoubleField(idx1)) {
2963 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2964 } else {
2965 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2966 }
2967 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2968}
2969
2970
2971TEST(OptimizedPretenuringDoubleArrayProperties) {
2972 i::FLAG_allow_natives_syntax = true;
2973 i::FLAG_expose_gc = true;
2974 CcTest::InitializeVM();
2975 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2976 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2977 v8::HandleScope scope(CcTest::isolate());
2978
2979 // Grow new space unitl maximum capacity reached.
2980 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2981 CcTest::heap()->new_space()->Grow();
2982 }
2983
2984 i::ScopedVector<char> source(1024);
2985 i::SNPrintF(
2986 source,
2987 "var number_elements = %d;"
2988 "var elements = new Array(number_elements);"
2989 "function f() {"
2990 " for (var i = 0; i < number_elements; i++) {"
2991 " elements[i] = {a: 1.1, b: 2.2};"
2992 " }"
2993 " return elements[i - 1];"
2994 "};"
2995 "f(); gc();"
2996 "f(); f();"
2997 "%%OptimizeFunctionOnNextCall(f);"
2998 "f();",
2999 AllocationSite::kPretenureMinimumCreated);
3000
3001 v8::Local<v8::Value> res = CompileRun(source.start());
3002
3003 i::Handle<JSObject> o = Handle<JSObject>::cast(
3004 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3005
3006 CHECK(CcTest::heap()->InOldSpace(*o));
3007 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3008}
3009
3010
3011TEST(OptimizedPretenuringdoubleArrayLiterals) {
3012 i::FLAG_allow_natives_syntax = true;
3013 i::FLAG_expose_gc = true;
3014 CcTest::InitializeVM();
3015 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3016 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3017 v8::HandleScope scope(CcTest::isolate());
3018
3019 // Grow new space unitl maximum capacity reached.
3020 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3021 CcTest::heap()->new_space()->Grow();
3022 }
3023
3024 i::ScopedVector<char> source(1024);
3025 i::SNPrintF(
3026 source,
3027 "var number_elements = %d;"
3028 "var elements = new Array(number_elements);"
3029 "function f() {"
3030 " for (var i = 0; i < number_elements; i++) {"
3031 " elements[i] = [1.1, 2.2, 3.3];"
3032 " }"
3033 " return elements[number_elements - 1];"
3034 "};"
3035 "f(); gc();"
3036 "f(); f();"
3037 "%%OptimizeFunctionOnNextCall(f);"
3038 "f();",
3039 AllocationSite::kPretenureMinimumCreated);
3040
3041 v8::Local<v8::Value> res = CompileRun(source.start());
3042
3043 i::Handle<JSObject> o = Handle<JSObject>::cast(
3044 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3045
3046 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3047 CHECK(CcTest::heap()->InOldSpace(*o));
3048}
3049
3050
3051TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3052 i::FLAG_allow_natives_syntax = true;
3053 i::FLAG_expose_gc = true;
3054 CcTest::InitializeVM();
3055 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3056 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3057 v8::HandleScope scope(CcTest::isolate());
3058 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3059 // Grow new space unitl maximum capacity reached.
3060 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3061 CcTest::heap()->new_space()->Grow();
3062 }
3063
3064 i::ScopedVector<char> source(1024);
3065 i::SNPrintF(
3066 source,
3067 "var number_elements = 100;"
3068 "var elements = new Array(number_elements);"
3069 "function f() {"
3070 " for (var i = 0; i < number_elements; i++) {"
3071 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3072 " }"
3073 " return elements[number_elements - 1];"
3074 "};"
3075 "f(); gc();"
3076 "f(); f();"
3077 "%%OptimizeFunctionOnNextCall(f);"
3078 "f();");
3079
3080 v8::Local<v8::Value> res = CompileRun(source.start());
3081
3082 v8::Local<v8::Value> int_array =
3083 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3084 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
3085 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
3086 v8::Local<v8::Value> double_array =
3087 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3088 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
3089 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
3090
3091 Handle<JSObject> o = Handle<JSObject>::cast(
3092 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3093 CHECK(CcTest::heap()->InOldSpace(*o));
3094 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3095 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3096 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3097 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3098}
3099
3100
3101TEST(OptimizedPretenuringNestedObjectLiterals) {
3102 i::FLAG_allow_natives_syntax = true;
3103 i::FLAG_expose_gc = true;
3104 CcTest::InitializeVM();
3105 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3106 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3107 v8::HandleScope scope(CcTest::isolate());
3108 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3109 // Grow new space unitl maximum capacity reached.
3110 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3111 CcTest::heap()->new_space()->Grow();
3112 }
3113
3114 i::ScopedVector<char> source(1024);
3115 i::SNPrintF(
3116 source,
3117 "var number_elements = %d;"
3118 "var elements = new Array(number_elements);"
3119 "function f() {"
3120 " for (var i = 0; i < number_elements; i++) {"
3121 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3122 " }"
3123 " return elements[number_elements - 1];"
3124 "};"
3125 "f(); gc();"
3126 "f(); f();"
3127 "%%OptimizeFunctionOnNextCall(f);"
3128 "f();",
3129 AllocationSite::kPretenureMinimumCreated);
3130
3131 v8::Local<v8::Value> res = CompileRun(source.start());
3132
3133 v8::Local<v8::Value> int_array_1 =
3134 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3135 Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
3136 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
3137 v8::Local<v8::Value> int_array_2 =
3138 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3139 Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
3140 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
3141
3142 Handle<JSObject> o = Handle<JSObject>::cast(
3143 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3144 CHECK(CcTest::heap()->InOldSpace(*o));
3145 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3146 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3147 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3148 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3149}
3150
3151
3152TEST(OptimizedPretenuringNestedDoubleLiterals) {
3153 i::FLAG_allow_natives_syntax = true;
3154 i::FLAG_expose_gc = true;
3155 CcTest::InitializeVM();
3156 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3157 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3158 v8::HandleScope scope(CcTest::isolate());
3159 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3160 // Grow new space unitl maximum capacity reached.
3161 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3162 CcTest::heap()->new_space()->Grow();
3163 }
3164
3165 i::ScopedVector<char> source(1024);
3166 i::SNPrintF(
3167 source,
3168 "var number_elements = %d;"
3169 "var elements = new Array(number_elements);"
3170 "function f() {"
3171 " for (var i = 0; i < number_elements; i++) {"
3172 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3173 " }"
3174 " return elements[number_elements - 1];"
3175 "};"
3176 "f(); gc();"
3177 "f(); f();"
3178 "%%OptimizeFunctionOnNextCall(f);"
3179 "f();",
3180 AllocationSite::kPretenureMinimumCreated);
3181
3182 v8::Local<v8::Value> res = CompileRun(source.start());
3183
3184 v8::Local<v8::Value> double_array_1 =
3185 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3186 i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
3187 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
3188 v8::Local<v8::Value> double_array_2 =
3189 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3190 i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
3191 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
3192
3193 i::Handle<JSObject> o = Handle<JSObject>::cast(
3194 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3195 CHECK(CcTest::heap()->InOldSpace(*o));
3196 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3197 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3198 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3199 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3200}
3201
3202
3203// Test regular array literals allocation.
3204TEST(OptimizedAllocationArrayLiterals) {
3205 i::FLAG_allow_natives_syntax = true;
3206 CcTest::InitializeVM();
3207 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3208 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3209 v8::HandleScope scope(CcTest::isolate());
3210 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3211 v8::Local<v8::Value> res = CompileRun(
3212 "function f() {"
3213 " var numbers = new Array(1, 2, 3);"
3214 " numbers[0] = 3.14;"
3215 " return numbers;"
3216 "};"
3217 "f(); f(); f();"
3218 "%OptimizeFunctionOnNextCall(f);"
3219 "f();");
3220 CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
3221 ->Get(ctx, v8_str("0"))
3222 .ToLocalChecked()
3223 ->Int32Value(ctx)
3224 .FromJust());
3225
3226 i::Handle<JSObject> o = Handle<JSObject>::cast(
3227 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3228
3229 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3230}
3231
3232
3233static int CountMapTransitions(Map* map) {
3234 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3235}
3236
3237
3238// Test that map transitions are cleared and maps are collected with
3239// incremental marking as well.
3240TEST(Regress1465) {
3241 i::FLAG_stress_compaction = false;
3242 i::FLAG_allow_natives_syntax = true;
3243 i::FLAG_trace_incremental_marking = true;
3244 i::FLAG_retain_maps_for_n_gc = 0;
3245 CcTest::InitializeVM();
3246 v8::HandleScope scope(CcTest::isolate());
3247 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3248 static const int transitions_count = 256;
3249
3250 CompileRun("function F() {}");
3251 {
3252 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3253 for (int i = 0; i < transitions_count; i++) {
3254 EmbeddedVector<char, 64> buffer;
3255 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3256 CompileRun(buffer.start());
3257 }
3258 CompileRun("var root = new F;");
3259 }
3260
3261 i::Handle<JSReceiver> root =
3262 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3263 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3264
3265 // Count number of live transitions before marking.
3266 int transitions_before = CountMapTransitions(root->map());
3267 CompileRun("%DebugPrint(root);");
3268 CHECK_EQ(transitions_count, transitions_before);
3269
3270 SimulateIncrementalMarking(CcTest::heap());
3271 CcTest::heap()->CollectAllGarbage();
3272
3273 // Count number of live transitions after marking. Note that one transition
3274 // is left, because 'o' still holds an instance of one transition target.
3275 int transitions_after = CountMapTransitions(root->map());
3276 CompileRun("%DebugPrint(root);");
3277 CHECK_EQ(1, transitions_after);
3278}
3279
3280
3281#ifdef DEBUG
3282static void AddTransitions(int transitions_count) {
3283 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3284 for (int i = 0; i < transitions_count; i++) {
3285 EmbeddedVector<char, 64> buffer;
3286 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3287 CompileRun(buffer.start());
3288 }
3289}
3290
3291
3292static i::Handle<JSObject> GetByName(const char* name) {
3293 return i::Handle<JSObject>::cast(
3294 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3295 CcTest::global()
3296 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
3297 .ToLocalChecked())));
3298}
3299
3300
3301static void AddPropertyTo(
3302 int gc_count, Handle<JSObject> object, const char* property_name) {
3303 Isolate* isolate = CcTest::i_isolate();
3304 Factory* factory = isolate->factory();
3305 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3306 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3307 i::FLAG_gc_interval = gc_count;
3308 i::FLAG_gc_global = true;
3309 i::FLAG_retain_maps_for_n_gc = 0;
3310 CcTest::heap()->set_allocation_timeout(gc_count);
3311 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3312}
3313
3314
3315TEST(TransitionArrayShrinksDuringAllocToZero) {
3316 i::FLAG_stress_compaction = false;
3317 i::FLAG_allow_natives_syntax = true;
3318 CcTest::InitializeVM();
3319 v8::HandleScope scope(CcTest::isolate());
3320 static const int transitions_count = 10;
3321 CompileRun("function F() { }");
3322 AddTransitions(transitions_count);
3323 CompileRun("var root = new F;");
3324 Handle<JSObject> root = GetByName("root");
3325
3326 // Count number of live transitions before marking.
3327 int transitions_before = CountMapTransitions(root->map());
3328 CHECK_EQ(transitions_count, transitions_before);
3329
3330 // Get rid of o
3331 CompileRun("o = new F;"
3332 "root = new F");
3333 root = GetByName("root");
3334 AddPropertyTo(2, root, "funny");
3335 CcTest::heap()->CollectGarbage(NEW_SPACE);
3336
3337 // Count number of live transitions after marking. Note that one transition
3338 // is left, because 'o' still holds an instance of one transition target.
3339 int transitions_after = CountMapTransitions(
3340 Map::cast(root->map()->GetBackPointer()));
3341 CHECK_EQ(1, transitions_after);
3342}
3343
3344
3345TEST(TransitionArrayShrinksDuringAllocToOne) {
3346 i::FLAG_stress_compaction = false;
3347 i::FLAG_allow_natives_syntax = true;
3348 CcTest::InitializeVM();
3349 v8::HandleScope scope(CcTest::isolate());
3350 static const int transitions_count = 10;
3351 CompileRun("function F() {}");
3352 AddTransitions(transitions_count);
3353 CompileRun("var root = new F;");
3354 Handle<JSObject> root = GetByName("root");
3355
3356 // Count number of live transitions before marking.
3357 int transitions_before = CountMapTransitions(root->map());
3358 CHECK_EQ(transitions_count, transitions_before);
3359
3360 root = GetByName("root");
3361 AddPropertyTo(2, root, "funny");
3362 CcTest::heap()->CollectGarbage(NEW_SPACE);
3363
3364 // Count number of live transitions after marking. Note that one transition
3365 // is left, because 'o' still holds an instance of one transition target.
3366 int transitions_after = CountMapTransitions(
3367 Map::cast(root->map()->GetBackPointer()));
3368 CHECK_EQ(2, transitions_after);
3369}
3370
3371
3372TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3373 i::FLAG_stress_compaction = false;
3374 i::FLAG_allow_natives_syntax = true;
3375 CcTest::InitializeVM();
3376 v8::HandleScope scope(CcTest::isolate());
3377 static const int transitions_count = 10;
3378 CompileRun("function F() {}");
3379 AddTransitions(transitions_count);
3380 CompileRun("var root = new F;");
3381 Handle<JSObject> root = GetByName("root");
3382
3383 // Count number of live transitions before marking.
3384 int transitions_before = CountMapTransitions(root->map());
3385 CHECK_EQ(transitions_count, transitions_before);
3386
3387 root = GetByName("root");
3388 AddPropertyTo(0, root, "prop9");
3389 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3390
3391 // Count number of live transitions after marking. Note that one transition
3392 // is left, because 'o' still holds an instance of one transition target.
3393 int transitions_after = CountMapTransitions(
3394 Map::cast(root->map()->GetBackPointer()));
3395 CHECK_EQ(1, transitions_after);
3396}
3397
3398
3399TEST(TransitionArraySimpleToFull) {
3400 i::FLAG_stress_compaction = false;
3401 i::FLAG_allow_natives_syntax = true;
3402 CcTest::InitializeVM();
3403 v8::HandleScope scope(CcTest::isolate());
3404 static const int transitions_count = 1;
3405 CompileRun("function F() {}");
3406 AddTransitions(transitions_count);
3407 CompileRun("var root = new F;");
3408 Handle<JSObject> root = GetByName("root");
3409
3410 // Count number of live transitions before marking.
3411 int transitions_before = CountMapTransitions(root->map());
3412 CHECK_EQ(transitions_count, transitions_before);
3413
3414 CompileRun("o = new F;"
3415 "root = new F");
3416 root = GetByName("root");
3417 CHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3418 AddPropertyTo(2, root, "happy");
3419
3420 // Count number of live transitions after marking. Note that one transition
3421 // is left, because 'o' still holds an instance of one transition target.
3422 int transitions_after = CountMapTransitions(
3423 Map::cast(root->map()->GetBackPointer()));
3424 CHECK_EQ(1, transitions_after);
3425}
3426#endif // DEBUG
3427
3428
3429TEST(Regress2143a) {
3430 i::FLAG_incremental_marking = true;
3431 CcTest::InitializeVM();
3432 v8::HandleScope scope(CcTest::isolate());
3433
3434 // Prepare a map transition from the root object together with a yet
3435 // untransitioned root object.
3436 CompileRun("var root = new Object;"
3437 "root.foo = 0;"
3438 "root = new Object;");
3439
3440 SimulateIncrementalMarking(CcTest::heap());
3441
3442 // Compile a StoreIC that performs the prepared map transition. This
3443 // will restart incremental marking and should make sure the root is
3444 // marked grey again.
3445 CompileRun("function f(o) {"
3446 " o.foo = 0;"
3447 "}"
3448 "f(new Object);"
3449 "f(root);");
3450
3451 // This bug only triggers with aggressive IC clearing.
3452 CcTest::heap()->AgeInlineCaches();
3453
3454 // Explicitly request GC to perform final marking step and sweeping.
3455 CcTest::heap()->CollectAllGarbage();
3456
3457 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3458 CcTest::global()
3459 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3460 .ToLocalChecked()));
3461
3462 // The root object should be in a sane state.
3463 CHECK(root->IsJSObject());
3464 CHECK(root->map()->IsMap());
3465}
3466
3467
3468TEST(Regress2143b) {
3469 i::FLAG_incremental_marking = true;
3470 i::FLAG_allow_natives_syntax = true;
3471 CcTest::InitializeVM();
3472 v8::HandleScope scope(CcTest::isolate());
3473
3474 // Prepare a map transition from the root object together with a yet
3475 // untransitioned root object.
3476 CompileRun("var root = new Object;"
3477 "root.foo = 0;"
3478 "root = new Object;");
3479
3480 SimulateIncrementalMarking(CcTest::heap());
3481
3482 // Compile an optimized LStoreNamedField that performs the prepared
3483 // map transition. This will restart incremental marking and should
3484 // make sure the root is marked grey again.
3485 CompileRun("function f(o) {"
3486 " o.foo = 0;"
3487 "}"
3488 "f(new Object);"
3489 "f(new Object);"
3490 "%OptimizeFunctionOnNextCall(f);"
3491 "f(root);"
3492 "%DeoptimizeFunction(f);");
3493
3494 // This bug only triggers with aggressive IC clearing.
3495 CcTest::heap()->AgeInlineCaches();
3496
3497 // Explicitly request GC to perform final marking step and sweeping.
3498 CcTest::heap()->CollectAllGarbage();
3499
3500 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3501 CcTest::global()
3502 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3503 .ToLocalChecked()));
3504
3505 // The root object should be in a sane state.
3506 CHECK(root->IsJSObject());
3507 CHECK(root->map()->IsMap());
3508}
3509
3510
3511TEST(ReleaseOverReservedPages) {
3512 if (FLAG_never_compact) return;
3513 i::FLAG_trace_gc = true;
3514 // The optimizer can allocate stuff, messing up the test.
3515 i::FLAG_crankshaft = false;
3516 i::FLAG_always_opt = false;
3517 CcTest::InitializeVM();
3518 Isolate* isolate = CcTest::i_isolate();
3519 Factory* factory = isolate->factory();
3520 Heap* heap = isolate->heap();
3521 v8::HandleScope scope(CcTest::isolate());
3522 static const int number_of_test_pages = 20;
3523
3524 // Prepare many pages with low live-bytes count.
3525 PagedSpace* old_space = heap->old_space();
3526 CHECK_EQ(1, old_space->CountTotalPages());
3527 for (int i = 0; i < number_of_test_pages; i++) {
3528 AlwaysAllocateScope always_allocate(isolate);
3529 SimulateFullSpace(old_space);
3530 factory->NewFixedArray(1, TENURED);
3531 }
3532 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3533
3534 // Triggering one GC will cause a lot of garbage to be discovered but
3535 // even spread across all allocated pages.
3536 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3537 "triggered for preparation");
3538 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3539
3540 // Triggering subsequent GCs should cause at least half of the pages
3541 // to be released to the OS after at most two cycles.
3542 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3543 "triggered by test 1");
3544 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3545 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3546 "triggered by test 2");
3547 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3548
3549 // Triggering a last-resort GC should cause all pages to be released to the
3550 // OS so that other processes can seize the memory. If we get a failure here
3551 // where there are 2 pages left instead of 1, then we should increase the
3552 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3553 // first page should be small in order to reduce memory used when the VM
3554 // boots, but if the 20 small arrays don't fit on the first page then that's
3555 // an indication that it is too small.
3556 heap->CollectAllAvailableGarbage("triggered really hard");
3557 CHECK_EQ(1, old_space->CountTotalPages());
3558}
3559
3560static int forced_gc_counter = 0;
3561
3562void MockUseCounterCallback(v8::Isolate* isolate,
3563 v8::Isolate::UseCounterFeature feature) {
3564 isolate->GetCurrentContext();
3565 if (feature == v8::Isolate::kForcedGC) {
3566 forced_gc_counter++;
3567 }
3568}
3569
3570
3571TEST(CountForcedGC) {
3572 i::FLAG_expose_gc = true;
3573 CcTest::InitializeVM();
3574 Isolate* isolate = CcTest::i_isolate();
3575 v8::HandleScope scope(CcTest::isolate());
3576
3577 isolate->SetUseCounterCallback(MockUseCounterCallback);
3578
3579 forced_gc_counter = 0;
3580 const char* source = "gc();";
3581 CompileRun(source);
3582 CHECK_GT(forced_gc_counter, 0);
3583}
3584
3585
3586#ifdef OBJECT_PRINT
3587TEST(PrintSharedFunctionInfo) {
3588 CcTest::InitializeVM();
3589 v8::HandleScope scope(CcTest::isolate());
3590 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3591 const char* source = "f = function() { return 987654321; }\n"
3592 "g = function() { return 123456789; }\n";
3593 CompileRun(source);
3594 i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3595 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3596 CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3597
3598 OFStream os(stdout);
3599 g->shared()->Print(os);
3600 os << std::endl;
3601}
3602#endif // OBJECT_PRINT
3603
3604
3605TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3606 if (i::FLAG_always_opt) return;
3607 CcTest::InitializeVM();
3608 v8::HandleScope scope(CcTest::isolate());
3609 v8::Local<v8::Value> fun1, fun2;
3610 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3611 {
3612 CompileRun("function fun() {};");
3613 fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3614 }
3615
3616 {
3617 CompileRun("function fun() {};");
3618 fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3619 }
3620
3621 // Prepare function f that contains type feedback for the two closures.
3622 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3623 CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3624 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3625
3626 Handle<JSFunction> f = Handle<JSFunction>::cast(
3627 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3628 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3629
3630 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3631 FeedbackVectorHelper feedback_helper(feedback_vector);
3632
3633 int expected_slots = 2;
3634 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3635 int slot1 = 0;
3636 int slot2 = 1;
3637 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3638 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3639
3640 SimulateIncrementalMarking(CcTest::heap());
3641 CcTest::heap()->CollectAllGarbage();
3642
3643 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3644 ->cleared());
3645 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3646 ->cleared());
3647}
3648
3649
3650static Code* FindFirstIC(Code* code, Code::Kind kind) {
3651 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3652 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3653 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3654 RelocInfo* info = it.rinfo();
3655 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3656 if (target->is_inline_cache_stub() && target->kind() == kind) {
3657 return target;
3658 }
3659 }
3660 return NULL;
3661}
3662
3663
3664static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3665 InlineCacheState desired_state) {
3666 Handle<TypeFeedbackVector> vector =
3667 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3668 FeedbackVectorHelper helper(vector);
3669 FeedbackVectorSlot slot = helper.slot(slot_index);
3670 if (vector->GetKind(slot) == FeedbackVectorSlotKind::LOAD_IC) {
3671 LoadICNexus nexus(vector, slot);
3672 CHECK(nexus.StateFromFeedback() == desired_state);
3673 } else {
3674 CHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
3675 KeyedLoadICNexus nexus(vector, slot);
3676 CHECK(nexus.StateFromFeedback() == desired_state);
3677 }
3678}
3679
3680
3681static void CheckVectorICCleared(Handle<JSFunction> f, int slot_index) {
3682 Handle<TypeFeedbackVector> vector =
3683 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3684 FeedbackVectorSlot slot(slot_index);
3685 LoadICNexus nexus(vector, slot);
3686 CHECK(IC::IsCleared(&nexus));
3687}
3688
3689
3690TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3691 if (i::FLAG_always_opt) return;
3692 CcTest::InitializeVM();
3693 v8::HandleScope scope(CcTest::isolate());
3694 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3695 // Prepare function f that contains a monomorphic IC for object
3696 // originating from the same native context.
3697 CompileRun(
3698 "function fun() { this.x = 1; };"
3699 "function f(o) { return new o(); } f(fun); f(fun);");
3700 Handle<JSFunction> f = Handle<JSFunction>::cast(
3701 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3702 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3703
3704 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3705 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3706
3707 SimulateIncrementalMarking(CcTest::heap());
3708 CcTest::heap()->CollectAllGarbage();
3709
3710 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3711}
3712
3713
3714TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3715 if (i::FLAG_always_opt) return;
3716 CcTest::InitializeVM();
3717 Isolate* isolate = CcTest::i_isolate();
3718 v8::HandleScope scope(CcTest::isolate());
3719 v8::Local<v8::Value> fun1;
3720 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3721
3722 {
3723 LocalContext env;
3724 CompileRun("function fun() { this.x = 1; };");
3725 fun1 = env->Global()->Get(env.local(), v8_str("fun")).ToLocalChecked();
3726 }
3727
3728 // Prepare function f that contains a monomorphic constructor for object
3729 // originating from a different native context.
3730 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3731 CompileRun(
3732 "function fun() { this.x = 1; };"
3733 "function f(o) { return new o(); } f(fun1); f(fun1);");
3734 Handle<JSFunction> f = Handle<JSFunction>::cast(
3735 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3736 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3737
3738
3739 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3740 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3741
3742 // Fire context dispose notification.
3743 CcTest::isolate()->ContextDisposedNotification();
3744 SimulateIncrementalMarking(CcTest::heap());
3745 CcTest::heap()->CollectAllGarbage();
3746
3747 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3748 vector->Get(FeedbackVectorSlot(0)));
3749}
3750
3751
3752TEST(IncrementalMarkingPreservesMonomorphicIC) {
3753 if (i::FLAG_always_opt) return;
3754 CcTest::InitializeVM();
3755 v8::HandleScope scope(CcTest::isolate());
3756 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3757 // Prepare function f that contains a monomorphic IC for object
3758 // originating from the same native context.
3759 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3760 "function f(o) { return o.x; } f(obj); f(obj);");
3761 Handle<JSFunction> f = Handle<JSFunction>::cast(
3762 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3763 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3764
3765 CheckVectorIC(f, 0, MONOMORPHIC);
3766
3767 SimulateIncrementalMarking(CcTest::heap());
3768 CcTest::heap()->CollectAllGarbage();
3769
3770 CheckVectorIC(f, 0, MONOMORPHIC);
3771}
3772
3773
3774TEST(IncrementalMarkingClearsMonomorphicIC) {
3775 if (i::FLAG_always_opt) return;
3776 CcTest::InitializeVM();
3777 v8::HandleScope scope(CcTest::isolate());
3778 v8::Local<v8::Value> obj1;
3779 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3780
3781 {
3782 LocalContext env;
3783 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3784 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3785 }
3786
3787 // Prepare function f that contains a monomorphic IC for object
3788 // originating from a different native context.
3789 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3790 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3791 Handle<JSFunction> f = Handle<JSFunction>::cast(
3792 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3793 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3794
3795 CheckVectorIC(f, 0, MONOMORPHIC);
3796
3797 // Fire context dispose notification.
3798 CcTest::isolate()->ContextDisposedNotification();
3799 SimulateIncrementalMarking(CcTest::heap());
3800 CcTest::heap()->CollectAllGarbage();
3801
3802 CheckVectorICCleared(f, 0);
3803}
3804
3805
3806TEST(IncrementalMarkingPreservesPolymorphicIC) {
3807 if (i::FLAG_always_opt) return;
3808 CcTest::InitializeVM();
3809 v8::HandleScope scope(CcTest::isolate());
3810 v8::Local<v8::Value> obj1, obj2;
3811 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3812
3813 {
3814 LocalContext env;
3815 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3816 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3817 }
3818
3819 {
3820 LocalContext env;
3821 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3822 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3823 }
3824
3825 // Prepare function f that contains a polymorphic IC for objects
3826 // originating from two different native contexts.
3827 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3828 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3829 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3830 Handle<JSFunction> f = Handle<JSFunction>::cast(
3831 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3832 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3833
3834 CheckVectorIC(f, 0, POLYMORPHIC);
3835
3836 // Fire context dispose notification.
3837 SimulateIncrementalMarking(CcTest::heap());
3838 CcTest::heap()->CollectAllGarbage();
3839
3840 CheckVectorIC(f, 0, POLYMORPHIC);
3841}
3842
3843
3844TEST(IncrementalMarkingClearsPolymorphicIC) {
3845 if (i::FLAG_always_opt) return;
3846 CcTest::InitializeVM();
3847 v8::HandleScope scope(CcTest::isolate());
3848 v8::Local<v8::Value> obj1, obj2;
3849 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3850
3851 {
3852 LocalContext env;
3853 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3854 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3855 }
3856
3857 {
3858 LocalContext env;
3859 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3860 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3861 }
3862
3863 // Prepare function f that contains a polymorphic IC for objects
3864 // originating from two different native contexts.
3865 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3866 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3867 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3868 Handle<JSFunction> f = Handle<JSFunction>::cast(
3869 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3870 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3871
3872 CheckVectorIC(f, 0, POLYMORPHIC);
3873
3874 // Fire context dispose notification.
3875 CcTest::isolate()->ContextDisposedNotification();
3876 SimulateIncrementalMarking(CcTest::heap());
3877 CcTest::heap()->CollectAllGarbage();
3878
3879 CheckVectorICCleared(f, 0);
3880}
3881
3882
3883class SourceResource : public v8::String::ExternalOneByteStringResource {
3884 public:
3885 explicit SourceResource(const char* data)
3886 : data_(data), length_(strlen(data)) { }
3887
3888 virtual void Dispose() {
3889 i::DeleteArray(data_);
3890 data_ = NULL;
3891 }
3892
3893 const char* data() const { return data_; }
3894
3895 size_t length() const { return length_; }
3896
3897 bool IsDisposed() { return data_ == NULL; }
3898
3899 private:
3900 const char* data_;
3901 size_t length_;
3902};
3903
3904
3905void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3906 const char* accessor) {
3907 // Test that the data retained by the Error.stack accessor is released
3908 // after the first time the accessor is fired. We use external string
3909 // to check whether the data is being released since the external string
3910 // resource's callback is fired when the external string is GC'ed.
3911 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3912 v8::HandleScope scope(isolate);
3913 SourceResource* resource = new SourceResource(i::StrDup(source));
3914 {
3915 v8::HandleScope scope(isolate);
3916 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3917 v8::Local<v8::String> source_string =
3918 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3919 i_isolate->heap()->CollectAllAvailableGarbage();
3920 v8::Script::Compile(ctx, source_string)
3921 .ToLocalChecked()
3922 ->Run(ctx)
3923 .ToLocalChecked();
3924 CHECK(!resource->IsDisposed());
3925 }
3926 // i_isolate->heap()->CollectAllAvailableGarbage();
3927 CHECK(!resource->IsDisposed());
3928
3929 CompileRun(accessor);
3930 i_isolate->heap()->CollectAllAvailableGarbage();
3931
3932 // External source has been released.
3933 CHECK(resource->IsDisposed());
3934 delete resource;
3935}
3936
3937
3938UNINITIALIZED_TEST(ReleaseStackTraceData) {
3939 if (i::FLAG_always_opt) {
3940 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3941 // See: https://codereview.chromium.org/181833004/
3942 return;
3943 }
3944 FLAG_use_ic = false; // ICs retain objects.
3945 FLAG_concurrent_recompilation = false;
3946 v8::Isolate::CreateParams create_params;
3947 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3948 v8::Isolate* isolate = v8::Isolate::New(create_params);
3949 {
3950 v8::Isolate::Scope isolate_scope(isolate);
3951 v8::HandleScope handle_scope(isolate);
3952 v8::Context::New(isolate)->Enter();
3953 static const char* source1 = "var error = null; "
3954 /* Normal Error */ "try { "
3955 " throw new Error(); "
3956 "} catch (e) { "
3957 " error = e; "
3958 "} ";
3959 static const char* source2 = "var error = null; "
3960 /* Stack overflow */ "try { "
3961 " (function f() { f(); })(); "
3962 "} catch (e) { "
3963 " error = e; "
3964 "} ";
3965 static const char* source3 = "var error = null; "
3966 /* Normal Error */ "try { "
3967 /* as prototype */ " throw new Error(); "
3968 "} catch (e) { "
3969 " error = {}; "
3970 " error.__proto__ = e; "
3971 "} ";
3972 static const char* source4 = "var error = null; "
3973 /* Stack overflow */ "try { "
3974 /* as prototype */ " (function f() { f(); })(); "
3975 "} catch (e) { "
3976 " error = {}; "
3977 " error.__proto__ = e; "
3978 "} ";
3979 static const char* getter = "error.stack";
3980 static const char* setter = "error.stack = 0";
3981
3982 ReleaseStackTraceDataTest(isolate, source1, setter);
3983 ReleaseStackTraceDataTest(isolate, source2, setter);
3984 // We do not test source3 and source4 with setter, since the setter is
3985 // supposed to (untypically) write to the receiver, not the holder. This is
3986 // to emulate the behavior of a data property.
3987
3988 ReleaseStackTraceDataTest(isolate, source1, getter);
3989 ReleaseStackTraceDataTest(isolate, source2, getter);
3990 ReleaseStackTraceDataTest(isolate, source3, getter);
3991 ReleaseStackTraceDataTest(isolate, source4, getter);
3992 }
3993 isolate->Dispose();
3994}
3995
3996
3997TEST(Regress159140) {
3998 i::FLAG_allow_natives_syntax = true;
3999 CcTest::InitializeVM();
4000 Isolate* isolate = CcTest::i_isolate();
4001 LocalContext env;
4002 Heap* heap = isolate->heap();
4003 HandleScope scope(isolate);
4004
4005 // Perform one initial GC to enable code flushing.
4006 heap->CollectAllGarbage();
4007
4008 // Prepare several closures that are all eligible for code flushing
4009 // because all reachable ones are not optimized. Make sure that the
4010 // optimized code object is directly reachable through a handle so
4011 // that it is marked black during incremental marking.
4012 Handle<Code> code;
4013 {
4014 HandleScope inner_scope(isolate);
4015 CompileRun("function h(x) {}"
4016 "function mkClosure() {"
4017 " return function(x) { return x + 1; };"
4018 "}"
4019 "var f = mkClosure();"
4020 "var g = mkClosure();"
4021 "f(1); f(2);"
4022 "g(1); g(2);"
4023 "h(1); h(2);"
4024 "%OptimizeFunctionOnNextCall(f); f(3);"
4025 "%OptimizeFunctionOnNextCall(h); h(3);");
4026
4027 Handle<JSFunction> f = Handle<JSFunction>::cast(
4028 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4029 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4030 CHECK(f->is_compiled());
4031 CompileRun("f = null;");
4032
4033 Handle<JSFunction> g = Handle<JSFunction>::cast(
4034 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4035 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4036 CHECK(g->is_compiled());
4037 const int kAgingThreshold = 6;
4038 for (int i = 0; i < kAgingThreshold; i++) {
4039 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4040 }
4041
4042 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4043 }
4044
4045 // Simulate incremental marking so that the functions are enqueued as
4046 // code flushing candidates. Then optimize one function. Finally
4047 // finish the GC to complete code flushing.
4048 SimulateIncrementalMarking(heap);
4049 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4050 heap->CollectAllGarbage();
4051
4052 // Unoptimized code is missing and the deoptimizer will go ballistic.
4053 CompileRun("g('bozo');");
4054}
4055
4056
4057TEST(Regress165495) {
4058 i::FLAG_allow_natives_syntax = true;
4059 CcTest::InitializeVM();
4060 Isolate* isolate = CcTest::i_isolate();
4061 Heap* heap = isolate->heap();
4062 HandleScope scope(isolate);
4063
4064 // Perform one initial GC to enable code flushing.
4065 heap->CollectAllGarbage();
4066
4067 // Prepare an optimized closure that the optimized code map will get
4068 // populated. Then age the unoptimized code to trigger code flushing
4069 // but make sure the optimized code is unreachable.
4070 {
4071 HandleScope inner_scope(isolate);
4072 LocalContext env;
4073 CompileRun("function mkClosure() {"
4074 " return function(x) { return x + 1; };"
4075 "}"
4076 "var f = mkClosure();"
4077 "f(1); f(2);"
4078 "%OptimizeFunctionOnNextCall(f); f(3);");
4079
4080 Handle<JSFunction> f = Handle<JSFunction>::cast(
4081 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4082 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4083 CHECK(f->is_compiled());
4084 const int kAgingThreshold = 6;
4085 for (int i = 0; i < kAgingThreshold; i++) {
4086 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4087 }
4088
4089 CompileRun("f = null;");
4090 }
4091
4092 // Simulate incremental marking so that unoptimized code is flushed
4093 // even though it still is cached in the optimized code map.
4094 SimulateIncrementalMarking(heap);
4095 heap->CollectAllGarbage();
4096
4097 // Make a new closure that will get code installed from the code map.
4098 // Unoptimized code is missing and the deoptimizer will go ballistic.
4099 CompileRun("var g = mkClosure(); g('bozo');");
4100}
4101
4102
4103TEST(Regress169209) {
4104 i::FLAG_stress_compaction = false;
4105 i::FLAG_allow_natives_syntax = true;
4106
4107 CcTest::InitializeVM();
4108 Isolate* isolate = CcTest::i_isolate();
4109 Heap* heap = isolate->heap();
4110 HandleScope scope(isolate);
4111
4112 // Perform one initial GC to enable code flushing.
4113 heap->CollectAllGarbage();
4114
4115 // Prepare a shared function info eligible for code flushing for which
4116 // the unoptimized code will be replaced during optimization.
4117 Handle<SharedFunctionInfo> shared1;
4118 {
4119 HandleScope inner_scope(isolate);
4120 LocalContext env;
4121 CompileRun("function f() { return 'foobar'; }"
4122 "function g(x) { if (x) f(); }"
4123 "f();"
4124 "g(false);"
4125 "g(false);");
4126
4127 Handle<JSFunction> f = Handle<JSFunction>::cast(
4128 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4129 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4130 CHECK(f->is_compiled());
4131 const int kAgingThreshold = 6;
4132 for (int i = 0; i < kAgingThreshold; i++) {
4133 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4134 }
4135
4136 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4137 }
4138
4139 // Prepare a shared function info eligible for code flushing that will
4140 // represent the dangling tail of the candidate list.
4141 Handle<SharedFunctionInfo> shared2;
4142 {
4143 HandleScope inner_scope(isolate);
4144 LocalContext env;
4145 CompileRun("function flushMe() { return 0; }"
4146 "flushMe(1);");
4147
4148 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4149 *v8::Local<v8::Function>::Cast(CcTest::global()
4150 ->Get(env.local(), v8_str("flushMe"))
4151 .ToLocalChecked())));
4152 CHECK(f->is_compiled());
4153 const int kAgingThreshold = 6;
4154 for (int i = 0; i < kAgingThreshold; i++) {
4155 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4156 }
4157
4158 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4159 }
4160
4161 // Simulate incremental marking and collect code flushing candidates.
4162 SimulateIncrementalMarking(heap);
4163 CHECK(shared1->code()->gc_metadata() != NULL);
4164
4165 // Optimize function and make sure the unoptimized code is replaced.
4166#ifdef DEBUG
4167 FLAG_stop_at = "f";
4168#endif
4169 CompileRun("%OptimizeFunctionOnNextCall(g);"
4170 "g(false);");
4171
4172 // Finish garbage collection cycle.
4173 heap->CollectAllGarbage();
4174 CHECK(shared1->code()->gc_metadata() == NULL);
4175}
4176
4177
4178TEST(Regress169928) {
4179 i::FLAG_allow_natives_syntax = true;
4180 i::FLAG_crankshaft = false;
4181 CcTest::InitializeVM();
4182 Isolate* isolate = CcTest::i_isolate();
4183 LocalContext env;
4184 Factory* factory = isolate->factory();
4185 v8::HandleScope scope(CcTest::isolate());
4186
4187 // Some flags turn Scavenge collections into Mark-sweep collections
4188 // and hence are incompatible with this test case.
4189 if (FLAG_gc_global || FLAG_stress_compaction) return;
4190
4191 // Prepare the environment
4192 CompileRun("function fastliteralcase(literal, value) {"
4193 " literal[0] = value;"
4194 " return literal;"
4195 "}"
4196 "function get_standard_literal() {"
4197 " var literal = [1, 2, 3];"
4198 " return literal;"
4199 "}"
4200 "obj = fastliteralcase(get_standard_literal(), 1);"
4201 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4202 "obj = fastliteralcase(get_standard_literal(), 2);");
4203
4204 // prepare the heap
4205 v8::Local<v8::String> mote_code_string =
4206 v8_str("fastliteralcase(mote, 2.5);");
4207
4208 v8::Local<v8::String> array_name = v8_str("mote");
4209 CHECK(CcTest::global()
4210 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4211 .FromJust());
4212
4213 // First make sure we flip spaces
4214 CcTest::heap()->CollectGarbage(NEW_SPACE);
4215
4216 // Allocate the object.
4217 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4218 array_data->set(0, Smi::FromInt(1));
4219 array_data->set(1, Smi::FromInt(2));
4220
4221 AllocateAllButNBytes(CcTest::heap()->new_space(),
4222 JSArray::kSize + AllocationMemento::kSize +
4223 kPointerSize);
4224
4225 Handle<JSArray> array =
4226 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4227
4228 CHECK_EQ(Smi::FromInt(2), array->length());
4229 CHECK(array->HasFastSmiOrObjectElements());
4230
4231 // We need filler the size of AllocationMemento object, plus an extra
4232 // fill pointer value.
4233 HeapObject* obj = NULL;
4234 AllocationResult allocation =
4235 CcTest::heap()->new_space()->AllocateRawUnaligned(
4236 AllocationMemento::kSize + kPointerSize);
4237 CHECK(allocation.To(&obj));
4238 Address addr_obj = obj->address();
4239 CcTest::heap()->CreateFillerObjectAt(
4240 addr_obj, AllocationMemento::kSize + kPointerSize);
4241
4242 // Give the array a name, making sure not to allocate strings.
4243 v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
4244 CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
4245
4246 // This should crash with a protection violation if we are running a build
4247 // with the bug.
4248 AlwaysAllocateScope aa_scope(isolate);
4249 v8::Script::Compile(env.local(), mote_code_string)
4250 .ToLocalChecked()
4251 ->Run(env.local())
4252 .ToLocalChecked();
4253}
4254
4255
4256#ifdef DEBUG
4257TEST(Regress513507) {
4258 i::FLAG_flush_optimized_code_cache = false;
4259 i::FLAG_allow_natives_syntax = true;
4260 i::FLAG_gc_global = true;
4261 CcTest::InitializeVM();
4262 Isolate* isolate = CcTest::i_isolate();
4263 LocalContext env;
4264 Heap* heap = isolate->heap();
4265 HandleScope scope(isolate);
4266
4267 // Prepare function whose optimized code map we can use.
4268 Handle<SharedFunctionInfo> shared;
4269 {
4270 HandleScope inner_scope(isolate);
4271 CompileRun("function f() { return 1 }"
4272 "f(); %OptimizeFunctionOnNextCall(f); f();");
4273
4274 Handle<JSFunction> f = Handle<JSFunction>::cast(
4275 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4276 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4277 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4278 CompileRun("f = null");
4279 }
4280
4281 // Prepare optimized code that we can use.
4282 Handle<Code> code;
4283 {
4284 HandleScope inner_scope(isolate);
4285 CompileRun("function g() { return 2 }"
4286 "g(); %OptimizeFunctionOnNextCall(g); g();");
4287
4288 Handle<JSFunction> g = Handle<JSFunction>::cast(
4289 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4290 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4291 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4292 if (!code->is_optimized_code()) return;
4293 }
4294
4295 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4296 Handle<LiteralsArray> lit =
4297 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4298 Handle<Context> context(isolate->context());
4299
4300 // Add the new code several times to the optimized code map and also set an
4301 // allocation timeout so that expanding the code map will trigger a GC.
4302 heap->set_allocation_timeout(5);
4303 FLAG_gc_interval = 1000;
4304 for (int i = 0; i < 10; ++i) {
4305 BailoutId id = BailoutId(i);
4306 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4307 }
4308}
4309#endif // DEBUG
4310
4311
4312TEST(Regress514122) {
4313 i::FLAG_flush_optimized_code_cache = false;
4314 i::FLAG_allow_natives_syntax = true;
4315 CcTest::InitializeVM();
4316 Isolate* isolate = CcTest::i_isolate();
4317 LocalContext env;
4318 Heap* heap = isolate->heap();
4319 HandleScope scope(isolate);
4320
4321 // Perfrom one initial GC to enable code flushing.
4322 CcTest::heap()->CollectAllGarbage();
4323
4324 // Prepare function whose optimized code map we can use.
4325 Handle<SharedFunctionInfo> shared;
4326 {
4327 HandleScope inner_scope(isolate);
4328 CompileRun("function f() { return 1 }"
4329 "f(); %OptimizeFunctionOnNextCall(f); f();");
4330
4331 Handle<JSFunction> f = Handle<JSFunction>::cast(
4332 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4333 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4334 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4335 CompileRun("f = null");
4336 }
4337
4338 // Prepare optimized code that we can use.
4339 Handle<Code> code;
4340 {
4341 HandleScope inner_scope(isolate);
4342 CompileRun("function g() { return 2 }"
4343 "g(); %OptimizeFunctionOnNextCall(g); g();");
4344
4345 Handle<JSFunction> g = Handle<JSFunction>::cast(
4346 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4347 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4348 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4349 if (!code->is_optimized_code()) return;
4350 }
4351
4352 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4353 Handle<LiteralsArray> lit =
4354 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4355 Handle<Context> context(isolate->context());
4356
4357 // Add the code several times to the optimized code map.
4358 for (int i = 0; i < 3; ++i) {
4359 HandleScope inner_scope(isolate);
4360 BailoutId id = BailoutId(i);
4361 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4362 }
4363 shared->optimized_code_map()->Print();
4364
4365 // Add the code with a literals array to be evacuated.
4366 Page* evac_page;
4367 {
4368 HandleScope inner_scope(isolate);
4369 AlwaysAllocateScope always_allocate(isolate);
4370 // Make sure literal is placed on an old-space evacuation candidate.
4371 SimulateFullSpace(heap->old_space());
4372
4373 // Make sure there the number of literals is > 0.
4374 Handle<LiteralsArray> lit =
4375 LiteralsArray::New(isolate, vector, 23, TENURED);
4376
4377 evac_page = Page::FromAddress(lit->address());
4378 BailoutId id = BailoutId(100);
4379 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4380 }
4381
4382 // Heap is ready, force {lit_page} to become an evacuation candidate and
4383 // simulate incremental marking to enqueue optimized code map.
4384 FLAG_manual_evacuation_candidates_selection = true;
4385 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4386 SimulateIncrementalMarking(heap);
4387
4388 // No matter whether reachable or not, {boomer} is doomed.
4389 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4390
4391 // Add the code several times to the optimized code map. This will leave old
4392 // copies of the optimized code map unreachable but still marked.
4393 for (int i = 3; i < 6; ++i) {
4394 HandleScope inner_scope(isolate);
4395 BailoutId id = BailoutId(i);
4396 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4397 }
4398
4399 // Trigger a GC to flush out the bug.
4400 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4401 boomer->Print();
4402}
4403
4404
4405TEST(OptimizedCodeMapReuseEntries) {
4406 i::FLAG_flush_optimized_code_cache = false;
4407 i::FLAG_allow_natives_syntax = true;
4408 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
4409 // run this test.
4410 if (i::FLAG_turbo) return;
4411 CcTest::InitializeVM();
4412 v8::Isolate* v8_isolate = CcTest::isolate();
4413 Isolate* isolate = CcTest::i_isolate();
4414 Heap* heap = isolate->heap();
4415 HandleScope scope(isolate);
4416
4417 // Create 3 contexts, allow the 2nd one to be disposed, and verify that
4418 // a 4th context will re-use the weak slots in the optimized code map
4419 // to hold data, rather than expanding the map.
4420 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
4421 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
4422 v8::ScriptCompiler::Source script_source(
4423 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
4424 .ToLocalChecked());
4425 v8::Local<v8::UnboundScript> indep =
4426 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
4427 .ToLocalChecked();
4428 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
4429 // Perfrom one initial GC to enable code flushing.
4430 heap->CollectAllGarbage();
4431
4432 c1->Enter();
4433 indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
4434 CompileRun(toplevel);
4435
4436 Handle<SharedFunctionInfo> shared;
4437 Handle<JSFunction> foo = Handle<JSFunction>::cast(
4438 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4439 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
4440 CHECK(foo->shared()->is_compiled());
4441 shared = handle(foo->shared());
4442 c1->Exit();
4443
4444 {
4445 HandleScope scope(isolate);
4446 v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
4447 c2->Enter();
4448 indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
4449 CompileRun(toplevel);
4450 c2->Exit();
4451 }
4452
4453 {
4454 HandleScope scope(isolate);
4455 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
4456 c3->Enter();
4457 indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
4458 CompileRun(toplevel);
4459 c3->Exit();
4460
4461 // Now, collect garbage. Context c2 should have no roots to it, and it's
4462 // entry in the optimized code map should be free for a new context.
4463 for (int i = 0; i < 4; i++) {
4464 heap->CollectAllGarbage();
4465 }
4466
4467 Handle<FixedArray> optimized_code_map =
4468 handle(shared->optimized_code_map());
4469 // There should be 3 entries in the map.
4470 CHECK_EQ(
4471 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
4472 SharedFunctionInfo::kEntryLength));
4473 // But one of them (formerly for c2) should be cleared.
4474 int cleared_count = 0;
4475 for (int i = SharedFunctionInfo::kEntriesStart;
4476 i < optimized_code_map->length();
4477 i += SharedFunctionInfo::kEntryLength) {
4478 cleared_count +=
4479 WeakCell::cast(
4480 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4481 ->cleared()
4482 ? 1
4483 : 0;
4484 }
4485 CHECK_EQ(1, cleared_count);
4486
4487 // Verify that a new context uses the cleared entry rather than creating a
4488 // new
4489 // optimized code map array.
4490 v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
4491 c4->Enter();
4492 indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
4493 CompileRun(toplevel);
4494 c4->Exit();
4495 CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
4496
4497 // Now each entry is in use.
4498 cleared_count = 0;
4499 for (int i = SharedFunctionInfo::kEntriesStart;
4500 i < optimized_code_map->length();
4501 i += SharedFunctionInfo::kEntryLength) {
4502 cleared_count +=
4503 WeakCell::cast(
4504 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4505 ->cleared()
4506 ? 1
4507 : 0;
4508 }
4509 CHECK_EQ(0, cleared_count);
4510 }
4511}
4512
4513
4514TEST(Regress513496) {
4515 i::FLAG_flush_optimized_code_cache = false;
4516 i::FLAG_allow_natives_syntax = true;
4517 CcTest::InitializeVM();
4518 Isolate* isolate = CcTest::i_isolate();
4519 Heap* heap = isolate->heap();
4520 HandleScope scope(isolate);
4521
4522 // Perfrom one initial GC to enable code flushing.
4523 CcTest::heap()->CollectAllGarbage();
4524
4525 // Prepare an optimized closure with containing an inlined function. Then age
4526 // the inlined unoptimized code to trigger code flushing but make sure the
4527 // outer optimized code is kept in the optimized code map.
4528 Handle<SharedFunctionInfo> shared;
4529 {
4530 LocalContext context;
4531 HandleScope inner_scope(isolate);
4532 CompileRun(
4533 "function g(x) { return x + 1 }"
4534 "function mkClosure() {"
4535 " return function(x) { return g(x); };"
4536 "}"
4537 "var f = mkClosure();"
4538 "f(1); f(2);"
4539 "%OptimizeFunctionOnNextCall(f); f(3);");
4540
4541 Handle<JSFunction> g = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4542 *v8::Local<v8::Function>::Cast(CcTest::global()
4543 ->Get(context.local(), v8_str("g"))
4544 .ToLocalChecked())));
4545 CHECK(g->shared()->is_compiled());
4546 const int kAgingThreshold = 6;
4547 for (int i = 0; i < kAgingThreshold; i++) {
4548 g->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4549 }
4550
4551 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4552 *v8::Local<v8::Function>::Cast(CcTest::global()
4553 ->Get(context.local(), v8_str("f"))
4554 .ToLocalChecked())));
4555 CHECK(f->is_compiled());
4556 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4557 CompileRun("f = null");
4558 }
4559
4560 // Lookup the optimized code and keep it alive.
4561 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4562 isolate->context()->native_context(), BailoutId::None());
4563 Handle<Code> optimized_code(result.code, isolate);
4564
4565 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4566 // though the optimized code for 'f' is reachable via the optimized code map.
4567 heap->CollectAllGarbage();
4568
4569 // Make a new closure that will get code installed from the code map.
4570 // Unoptimized code is missing and the deoptimizer will go ballistic.
4571 CompileRun("var h = mkClosure(); h('bozo');");
4572}
4573
4574
4575TEST(LargeObjectSlotRecording) {
4576 FLAG_manual_evacuation_candidates_selection = true;
4577 CcTest::InitializeVM();
4578 Isolate* isolate = CcTest::i_isolate();
4579 Heap* heap = isolate->heap();
4580 HandleScope scope(isolate);
4581
4582 // Create an object on an evacuation candidate.
4583 SimulateFullSpace(heap->old_space());
4584 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4585 Page* evac_page = Page::FromAddress(lit->address());
4586 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4587 FixedArray* old_location = *lit;
4588
4589 // Allocate a large object.
4590 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB);
4591 CHECK(size > Page::kMaxRegularHeapObjectSize);
4592 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
4593 CHECK(heap->lo_space()->Contains(*lo));
4594
4595 // Start incremental marking to active write barrier.
4596 SimulateIncrementalMarking(heap, false);
4597 heap->incremental_marking()->AdvanceIncrementalMarking(
4598 10000000, 10000000, IncrementalMarking::IdleStepActions());
4599
4600 // Create references from the large object to the object on the evacuation
4601 // candidate.
4602 const int kStep = size / 10;
4603 for (int i = 0; i < size; i += kStep) {
4604 lo->set(i, *lit);
4605 CHECK(lo->get(i) == old_location);
4606 }
4607
4608 // Move the evaucation candidate object.
4609 CcTest::heap()->CollectAllGarbage();
4610
4611 // Verify that the pointers in the large object got updated.
4612 for (int i = 0; i < size; i += kStep) {
4613 CHECK_EQ(lo->get(i), *lit);
4614 CHECK(lo->get(i) != old_location);
4615 }
4616}
4617
4618
4619class DummyVisitor : public ObjectVisitor {
4620 public:
4621 void VisitPointers(Object** start, Object** end) override {}
4622};
4623
4624
4625TEST(DeferredHandles) {
4626 CcTest::InitializeVM();
4627 Isolate* isolate = CcTest::i_isolate();
4628 Heap* heap = isolate->heap();
4629 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4630 HandleScopeData* data = isolate->handle_scope_data();
4631 Handle<Object> init(heap->empty_string(), isolate);
4632 while (data->next < data->limit) {
4633 Handle<Object> obj(heap->empty_string(), isolate);
4634 }
4635 // An entire block of handles has been filled.
4636 // Next handle would require a new block.
4637 CHECK(data->next == data->limit);
4638
4639 DeferredHandleScope deferred(isolate);
4640 DummyVisitor visitor;
4641 isolate->handle_scope_implementer()->Iterate(&visitor);
4642 delete deferred.Detach();
4643}
4644
4645
4646TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4647 CcTest::InitializeVM();
4648 v8::HandleScope scope(CcTest::isolate());
4649 CompileRun("function f(n) {"
4650 " var a = new Array(n);"
4651 " for (var i = 0; i < n; i += 100) a[i] = i;"
4652 "};"
4653 "f(10 * 1024 * 1024);");
4654 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4655 if (marking->IsStopped()) {
4656 CcTest::heap()->StartIncrementalMarking();
4657 }
4658 // This big step should be sufficient to mark the whole array.
4659 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4660 CHECK(marking->IsComplete() ||
4661 marking->IsReadyToOverApproximateWeakClosure());
4662}
4663
4664
4665TEST(DisableInlineAllocation) {
4666 i::FLAG_allow_natives_syntax = true;
4667 CcTest::InitializeVM();
4668 v8::HandleScope scope(CcTest::isolate());
4669 CompileRun("function test() {"
4670 " var x = [];"
4671 " for (var i = 0; i < 10; i++) {"
4672 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4673 " }"
4674 "}"
4675 "function run() {"
4676 " %OptimizeFunctionOnNextCall(test);"
4677 " test();"
4678 " %DeoptimizeFunction(test);"
4679 "}");
4680
4681 // Warm-up with inline allocation enabled.
4682 CompileRun("test(); test(); run();");
4683
4684 // Run test with inline allocation disabled.
4685 CcTest::heap()->DisableInlineAllocation();
4686 CompileRun("run()");
4687
4688 // Run test with inline allocation re-enabled.
4689 CcTest::heap()->EnableInlineAllocation();
4690 CompileRun("run()");
4691}
4692
4693
4694static int AllocationSitesCount(Heap* heap) {
4695 int count = 0;
4696 for (Object* site = heap->allocation_sites_list();
4697 !(site->IsUndefined());
4698 site = AllocationSite::cast(site)->weak_next()) {
4699 count++;
4700 }
4701 return count;
4702}
4703
4704
4705TEST(EnsureAllocationSiteDependentCodesProcessed) {
4706 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4707 i::FLAG_allow_natives_syntax = true;
4708 CcTest::InitializeVM();
4709 Isolate* isolate = CcTest::i_isolate();
4710 v8::internal::Heap* heap = CcTest::heap();
4711 GlobalHandles* global_handles = isolate->global_handles();
4712
4713 if (!isolate->use_crankshaft()) return;
4714
4715 // The allocation site at the head of the list is ours.
4716 Handle<AllocationSite> site;
4717 {
4718 LocalContext context;
4719 v8::HandleScope scope(context->GetIsolate());
4720
4721 int count = AllocationSitesCount(heap);
4722 CompileRun("var bar = function() { return (new Array()); };"
4723 "var a = bar();"
4724 "bar();"
4725 "bar();");
4726
4727 // One allocation site should have been created.
4728 int new_count = AllocationSitesCount(heap);
4729 CHECK_EQ(new_count, (count + 1));
4730 site = Handle<AllocationSite>::cast(
4731 global_handles->Create(
4732 AllocationSite::cast(heap->allocation_sites_list())));
4733
4734 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4735
4736 CHECK_EQ(DependentCode::kAllocationSiteTransitionChangedGroup,
4737 site->dependent_code()->group());
4738 CHECK_EQ(1, site->dependent_code()->count());
4739 CHECK(site->dependent_code()->object_at(0)->IsWeakCell());
4740 Code* function_bar = Code::cast(
4741 WeakCell::cast(site->dependent_code()->object_at(0))->value());
4742 Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
4743 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4744 CcTest::global()
4745 ->Get(context.local(), v8_str("bar"))
4746 .ToLocalChecked())));
4747 CHECK_EQ(bar_handle->code(), function_bar);
4748 }
4749
4750 // Now make sure that a gc should get rid of the function, even though we
4751 // still have the allocation site alive.
4752 for (int i = 0; i < 4; i++) {
4753 heap->CollectAllGarbage();
4754 }
4755
4756 // The site still exists because of our global handle, but the code is no
4757 // longer referred to by dependent_code().
4758 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4759 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4760}
4761
4762
4763TEST(CellsInOptimizedCodeAreWeak) {
4764 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4765 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4766 i::FLAG_allow_natives_syntax = true;
4767 CcTest::InitializeVM();
4768 Isolate* isolate = CcTest::i_isolate();
4769 v8::internal::Heap* heap = CcTest::heap();
4770
4771 if (!isolate->use_crankshaft()) return;
4772 HandleScope outer_scope(heap->isolate());
4773 Handle<Code> code;
4774 {
4775 LocalContext context;
4776 HandleScope scope(heap->isolate());
4777
4778 CompileRun(
4779 "bar = (function() {"
4780 " function bar() {"
4781 " return foo(1);"
4782 " };"
4783 " var foo = function(x) { with (x) { return 1 + x; } };"
4784 " %NeverOptimizeFunction(foo);"
4785 " bar(foo);"
4786 " bar(foo);"
4787 " bar(foo);"
4788 " %OptimizeFunctionOnNextCall(bar);"
4789 " bar(foo);"
4790 " return bar;})();");
4791
4792 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4793 *v8::Local<v8::Function>::Cast(CcTest::global()
4794 ->Get(context.local(), v8_str("bar"))
4795 .ToLocalChecked())));
4796 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4797 }
4798
4799 // Now make sure that a gc should get rid of the function
4800 for (int i = 0; i < 4; i++) {
4801 heap->CollectAllGarbage();
4802 }
4803
4804 CHECK(code->marked_for_deoptimization());
4805}
4806
4807
4808TEST(ObjectsInOptimizedCodeAreWeak) {
4809 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4810 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4811 i::FLAG_allow_natives_syntax = true;
4812 CcTest::InitializeVM();
4813 Isolate* isolate = CcTest::i_isolate();
4814 v8::internal::Heap* heap = CcTest::heap();
4815
4816 if (!isolate->use_crankshaft()) return;
4817 HandleScope outer_scope(heap->isolate());
4818 Handle<Code> code;
4819 {
4820 LocalContext context;
4821 HandleScope scope(heap->isolate());
4822
4823 CompileRun(
4824 "function bar() {"
4825 " return foo(1);"
4826 "};"
4827 "function foo(x) { with (x) { return 1 + x; } };"
4828 "%NeverOptimizeFunction(foo);"
4829 "bar();"
4830 "bar();"
4831 "bar();"
4832 "%OptimizeFunctionOnNextCall(bar);"
4833 "bar();");
4834
4835 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4836 *v8::Local<v8::Function>::Cast(CcTest::global()
4837 ->Get(context.local(), v8_str("bar"))
4838 .ToLocalChecked())));
4839 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4840 }
4841
4842 // Now make sure that a gc should get rid of the function
4843 for (int i = 0; i < 4; i++) {
4844 heap->CollectAllGarbage();
4845 }
4846
4847 CHECK(code->marked_for_deoptimization());
4848}
4849
4850
4851TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4852 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4853 if (!i::FLAG_incremental_marking) return;
4854 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4855 i::FLAG_allow_natives_syntax = true;
4856 i::FLAG_compilation_cache = false;
4857 i::FLAG_retain_maps_for_n_gc = 0;
4858 CcTest::InitializeVM();
4859 Isolate* isolate = CcTest::i_isolate();
4860
4861 // Do not run for no-snap builds.
4862 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4863
4864 v8::internal::Heap* heap = CcTest::heap();
4865
4866 // Get a clean slate regarding optimized functions on the heap.
4867 i::Deoptimizer::DeoptimizeAll(isolate);
4868 heap->CollectAllGarbage();
4869
4870 if (!isolate->use_crankshaft()) return;
4871 HandleScope outer_scope(heap->isolate());
4872 for (int i = 0; i < 3; i++) {
4873 SimulateIncrementalMarking(heap);
4874 {
4875 LocalContext context;
4876 HandleScope scope(heap->isolate());
4877 EmbeddedVector<char, 256> source;
4878 SNPrintF(source,
4879 "function bar%d() {"
4880 " return foo%d(1);"
4881 "};"
4882 "function foo%d(x) { with (x) { return 1 + x; } };"
4883 "bar%d();"
4884 "bar%d();"
4885 "bar%d();"
4886 "%%OptimizeFunctionOnNextCall(bar%d);"
4887 "bar%d();",
4888 i, i, i, i, i, i, i, i);
4889 CompileRun(source.start());
4890 }
4891 heap->CollectAllGarbage();
4892 }
4893 int elements = 0;
4894 if (heap->weak_object_to_code_table()->IsHashTable()) {
4895 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4896 elements = t->NumberOfElements();
4897 }
4898 CHECK_EQ(0, elements);
4899}
4900
4901
4902static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4903 const char* name) {
4904 EmbeddedVector<char, 256> source;
4905 SNPrintF(source,
4906 "function %s() { return 0; }"
4907 "%s(); %s();"
4908 "%%OptimizeFunctionOnNextCall(%s);"
4909 "%s();", name, name, name, name, name);
4910 CompileRun(source.start());
4911 i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4912 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4913 CcTest::global()
4914 ->Get(isolate->GetCurrentContext(), v8_str(name))
4915 .ToLocalChecked())));
4916 return fun;
4917}
4918
4919
4920static int GetCodeChainLength(Code* code) {
4921 int result = 0;
4922 while (code->next_code_link()->IsCode()) {
4923 result++;
4924 code = Code::cast(code->next_code_link());
4925 }
4926 return result;
4927}
4928
4929
4930TEST(NextCodeLinkIsWeak) {
4931 i::FLAG_always_opt = false;
4932 i::FLAG_allow_natives_syntax = true;
4933 CcTest::InitializeVM();
4934 Isolate* isolate = CcTest::i_isolate();
4935 v8::internal::Heap* heap = CcTest::heap();
4936
4937 if (!isolate->use_crankshaft()) return;
4938 HandleScope outer_scope(heap->isolate());
4939 Handle<Code> code;
4940 heap->CollectAllAvailableGarbage();
4941 int code_chain_length_before, code_chain_length_after;
4942 {
4943 HandleScope scope(heap->isolate());
4944 Handle<JSFunction> mortal =
4945 OptimizeDummyFunction(CcTest::isolate(), "mortal");
4946 Handle<JSFunction> immortal =
4947 OptimizeDummyFunction(CcTest::isolate(), "immortal");
4948 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4949 code_chain_length_before = GetCodeChainLength(immortal->code());
4950 // Keep the immortal code and let the mortal code die.
4951 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4952 CompileRun("mortal = null; immortal = null;");
4953 }
4954 heap->CollectAllAvailableGarbage();
4955 // Now mortal code should be dead.
4956 code_chain_length_after = GetCodeChainLength(*code);
4957 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4958}
4959
4960
4961static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4962 i::byte buffer[i::Assembler::kMinimalBufferSize];
4963 MacroAssembler masm(isolate, buffer, sizeof(buffer),
4964 v8::internal::CodeObjectRequired::kYes);
4965 CodeDesc desc;
4966 masm.Push(isolate->factory()->undefined_value());
4967 masm.Drop(1);
4968 masm.GetCode(&desc);
4969 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4970 Handle<Code> code = isolate->factory()->NewCode(
4971 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4972 CHECK(code->IsCode());
4973 return code;
4974}
4975
4976
4977TEST(NextCodeLinkIsWeak2) {
4978 i::FLAG_allow_natives_syntax = true;
4979 CcTest::InitializeVM();
4980 Isolate* isolate = CcTest::i_isolate();
4981 v8::internal::Heap* heap = CcTest::heap();
4982
4983 if (!isolate->use_crankshaft()) return;
4984 HandleScope outer_scope(heap->isolate());
4985 heap->CollectAllAvailableGarbage();
4986 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4987 Handle<Code> new_head;
4988 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4989 {
4990 HandleScope scope(heap->isolate());
4991 Handle<Code> immortal = DummyOptimizedCode(isolate);
4992 Handle<Code> mortal = DummyOptimizedCode(isolate);
4993 mortal->set_next_code_link(*old_head);
4994 immortal->set_next_code_link(*mortal);
4995 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4996 new_head = scope.CloseAndEscape(immortal);
4997 }
4998 heap->CollectAllAvailableGarbage();
4999 // Now mortal code should be dead.
5000 CHECK_EQ(*old_head, new_head->next_code_link());
5001}
5002
5003
5004static bool weak_ic_cleared = false;
5005
5006static void ClearWeakIC(
5007 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5008 printf("clear weak is called\n");
5009 weak_ic_cleared = true;
5010 data.GetParameter()->Reset();
5011}
5012
5013
5014TEST(WeakFunctionInConstructor) {
5015 if (i::FLAG_always_opt) return;
5016 i::FLAG_stress_compaction = false;
5017 CcTest::InitializeVM();
5018 v8::Isolate* isolate = CcTest::isolate();
5019 LocalContext env;
5020 v8::HandleScope scope(isolate);
5021 CompileRun(
5022 "function createObj(obj) {"
5023 " return new obj();"
5024 "}");
5025 i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
5026 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
5027 CcTest::global()
5028 ->Get(env.local(), v8_str("createObj"))
5029 .ToLocalChecked())));
5030
5031 v8::Persistent<v8::Object> garbage;
5032 {
5033 v8::HandleScope scope(isolate);
5034 const char* source =
5035 " (function() {"
5036 " function hat() { this.x = 5; }"
5037 " createObj(hat);"
5038 " createObj(hat);"
5039 " return hat;"
5040 " })();";
5041 garbage.Reset(isolate, CompileRun(env.local(), source)
5042 .ToLocalChecked()
5043 ->ToObject(env.local())
5044 .ToLocalChecked());
5045 }
5046 weak_ic_cleared = false;
5047 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5048 Heap* heap = CcTest::i_isolate()->heap();
5049 heap->CollectAllGarbage();
5050 CHECK(weak_ic_cleared);
5051
5052 // We've determined the constructor in createObj has had it's weak cell
5053 // cleared. Now, verify that one additional call with a new function
5054 // allows monomorphicity.
5055 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5056 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5057 for (int i = 0; i < 20; i++) {
5058 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5059 CHECK(slot_value->IsWeakCell());
5060 if (WeakCell::cast(slot_value)->cleared()) break;
5061 heap->CollectAllGarbage();
5062 }
5063
5064 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5065 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5066 CompileRun(
5067 "function coat() { this.x = 6; }"
5068 "createObj(coat);");
5069 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5070 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5071}
5072
5073
5074// Checks that the value returned by execution of the source is weak.
5075void CheckWeakness(const char* source) {
5076 i::FLAG_stress_compaction = false;
5077 CcTest::InitializeVM();
5078 v8::Isolate* isolate = CcTest::isolate();
5079 LocalContext env;
5080 v8::HandleScope scope(isolate);
5081 v8::Persistent<v8::Object> garbage;
5082 {
5083 v8::HandleScope scope(isolate);
5084 garbage.Reset(isolate, CompileRun(env.local(), source)
5085 .ToLocalChecked()
5086 ->ToObject(env.local())
5087 .ToLocalChecked());
5088 }
5089 weak_ic_cleared = false;
5090 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5091 Heap* heap = CcTest::i_isolate()->heap();
5092 heap->CollectAllGarbage();
5093 CHECK(weak_ic_cleared);
5094}
5095
5096
5097// Each of the following "weak IC" tests creates an IC that embeds a map with
5098// the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5099TEST(WeakMapInMonomorphicLoadIC) {
5100 CheckWeakness("function loadIC(obj) {"
5101 " return obj.name;"
5102 "}"
5103 " (function() {"
5104 " var proto = {'name' : 'weak'};"
5105 " var obj = Object.create(proto);"
5106 " loadIC(obj);"
5107 " loadIC(obj);"
5108 " loadIC(obj);"
5109 " return proto;"
5110 " })();");
5111}
5112
5113
5114TEST(WeakMapInPolymorphicLoadIC) {
5115 CheckWeakness(
5116 "function loadIC(obj) {"
5117 " return obj.name;"
5118 "}"
5119 " (function() {"
5120 " var proto = {'name' : 'weak'};"
5121 " var obj = Object.create(proto);"
5122 " loadIC(obj);"
5123 " loadIC(obj);"
5124 " loadIC(obj);"
5125 " var poly = Object.create(proto);"
5126 " poly.x = true;"
5127 " loadIC(poly);"
5128 " return proto;"
5129 " })();");
5130}
5131
5132
5133TEST(WeakMapInMonomorphicKeyedLoadIC) {
5134 CheckWeakness("function keyedLoadIC(obj, field) {"
5135 " return obj[field];"
5136 "}"
5137 " (function() {"
5138 " var proto = {'name' : 'weak'};"
5139 " var obj = Object.create(proto);"
5140 " keyedLoadIC(obj, 'name');"
5141 " keyedLoadIC(obj, 'name');"
5142 " keyedLoadIC(obj, 'name');"
5143 " return proto;"
5144 " })();");
5145}
5146
5147
5148TEST(WeakMapInPolymorphicKeyedLoadIC) {
5149 CheckWeakness(
5150 "function keyedLoadIC(obj, field) {"
5151 " return obj[field];"
5152 "}"
5153 " (function() {"
5154 " var proto = {'name' : 'weak'};"
5155 " var obj = Object.create(proto);"
5156 " keyedLoadIC(obj, 'name');"
5157 " keyedLoadIC(obj, 'name');"
5158 " keyedLoadIC(obj, 'name');"
5159 " var poly = Object.create(proto);"
5160 " poly.x = true;"
5161 " keyedLoadIC(poly, 'name');"
5162 " return proto;"
5163 " })();");
5164}
5165
5166
5167TEST(WeakMapInMonomorphicStoreIC) {
5168 CheckWeakness("function storeIC(obj, value) {"
5169 " obj.name = value;"
5170 "}"
5171 " (function() {"
5172 " var proto = {'name' : 'weak'};"
5173 " var obj = Object.create(proto);"
5174 " storeIC(obj, 'x');"
5175 " storeIC(obj, 'x');"
5176 " storeIC(obj, 'x');"
5177 " return proto;"
5178 " })();");
5179}
5180
5181
5182TEST(WeakMapInPolymorphicStoreIC) {
5183 CheckWeakness(
5184 "function storeIC(obj, value) {"
5185 " obj.name = value;"
5186 "}"
5187 " (function() {"
5188 " var proto = {'name' : 'weak'};"
5189 " var obj = Object.create(proto);"
5190 " storeIC(obj, 'x');"
5191 " storeIC(obj, 'x');"
5192 " storeIC(obj, 'x');"
5193 " var poly = Object.create(proto);"
5194 " poly.x = true;"
5195 " storeIC(poly, 'x');"
5196 " return proto;"
5197 " })();");
5198}
5199
5200
5201TEST(WeakMapInMonomorphicKeyedStoreIC) {
5202 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5203 " obj[field] = value;"
5204 "}"
5205 " (function() {"
5206 " var proto = {'name' : 'weak'};"
5207 " var obj = Object.create(proto);"
5208 " keyedStoreIC(obj, 'x');"
5209 " keyedStoreIC(obj, 'x');"
5210 " keyedStoreIC(obj, 'x');"
5211 " return proto;"
5212 " })();");
5213}
5214
5215
5216TEST(WeakMapInPolymorphicKeyedStoreIC) {
5217 CheckWeakness(
5218 "function keyedStoreIC(obj, field, value) {"
5219 " obj[field] = value;"
5220 "}"
5221 " (function() {"
5222 " var proto = {'name' : 'weak'};"
5223 " var obj = Object.create(proto);"
5224 " keyedStoreIC(obj, 'x');"
5225 " keyedStoreIC(obj, 'x');"
5226 " keyedStoreIC(obj, 'x');"
5227 " var poly = Object.create(proto);"
5228 " poly.x = true;"
5229 " keyedStoreIC(poly, 'x');"
5230 " return proto;"
5231 " })();");
5232}
5233
5234
5235TEST(WeakMapInMonomorphicCompareNilIC) {
5236 CheckWeakness("function compareNilIC(obj) {"
5237 " return obj == null;"
5238 "}"
5239 " (function() {"
5240 " var proto = {'name' : 'weak'};"
5241 " var obj = Object.create(proto);"
5242 " compareNilIC(obj);"
5243 " compareNilIC(obj);"
5244 " compareNilIC(obj);"
5245 " return proto;"
5246 " })();");
5247}
5248
5249
5250Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5251 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5252 Handle<Object> obj =
5253 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5254 return Handle<JSFunction>::cast(obj);
5255}
5256
5257
5258void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5259 int slot_index, InlineCacheState state) {
5260 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5261 kind == Code::CALL_IC) {
5262 TypeFeedbackVector* vector = shared->feedback_vector();
5263 FeedbackVectorSlot slot(slot_index);
5264 if (kind == Code::LOAD_IC) {
5265 LoadICNexus nexus(vector, slot);
5266 CHECK_EQ(nexus.StateFromFeedback(), state);
5267 } else if (kind == Code::KEYED_LOAD_IC) {
5268 KeyedLoadICNexus nexus(vector, slot);
5269 CHECK_EQ(nexus.StateFromFeedback(), state);
5270 } else if (kind == Code::CALL_IC) {
5271 CallICNexus nexus(vector, slot);
5272 CHECK_EQ(nexus.StateFromFeedback(), state);
5273 }
5274 } else {
5275 Code* ic = FindFirstIC(code, kind);
5276 CHECK(ic->is_inline_cache_stub());
5277 CHECK(ic->ic_state() == state);
5278 }
5279}
5280
5281
5282TEST(MonomorphicStaysMonomorphicAfterGC) {
5283 if (FLAG_always_opt) return;
5284 CcTest::InitializeVM();
5285 Isolate* isolate = CcTest::i_isolate();
5286 Heap* heap = isolate->heap();
5287 v8::HandleScope scope(CcTest::isolate());
5288 CompileRun(
5289 "function loadIC(obj) {"
5290 " return obj.name;"
5291 "}"
5292 "function testIC() {"
5293 " var proto = {'name' : 'weak'};"
5294 " var obj = Object.create(proto);"
5295 " loadIC(obj);"
5296 " loadIC(obj);"
5297 " loadIC(obj);"
5298 " return proto;"
5299 "};");
5300 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5301 {
5302 v8::HandleScope scope(CcTest::isolate());
5303 CompileRun("(testIC())");
5304 }
5305 heap->CollectAllGarbage();
5306 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5307 {
5308 v8::HandleScope scope(CcTest::isolate());
5309 CompileRun("(testIC())");
5310 }
5311 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5312}
5313
5314
5315TEST(PolymorphicStaysPolymorphicAfterGC) {
5316 if (FLAG_always_opt) return;
5317 CcTest::InitializeVM();
5318 Isolate* isolate = CcTest::i_isolate();
5319 Heap* heap = isolate->heap();
5320 v8::HandleScope scope(CcTest::isolate());
5321 CompileRun(
5322 "function loadIC(obj) {"
5323 " return obj.name;"
5324 "}"
5325 "function testIC() {"
5326 " var proto = {'name' : 'weak'};"
5327 " var obj = Object.create(proto);"
5328 " loadIC(obj);"
5329 " loadIC(obj);"
5330 " loadIC(obj);"
5331 " var poly = Object.create(proto);"
5332 " poly.x = true;"
5333 " loadIC(poly);"
5334 " return proto;"
5335 "};");
5336 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5337 {
5338 v8::HandleScope scope(CcTest::isolate());
5339 CompileRun("(testIC())");
5340 }
5341 heap->CollectAllGarbage();
5342 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5343 {
5344 v8::HandleScope scope(CcTest::isolate());
5345 CompileRun("(testIC())");
5346 }
5347 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5348}
5349
5350
5351TEST(WeakCell) {
5352 CcTest::InitializeVM();
5353 Isolate* isolate = CcTest::i_isolate();
5354 v8::internal::Heap* heap = CcTest::heap();
5355 v8::internal::Factory* factory = isolate->factory();
5356
5357 HandleScope outer_scope(isolate);
5358 Handle<WeakCell> weak_cell1;
5359 {
5360 HandleScope inner_scope(isolate);
5361 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5362 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5363 }
5364
5365 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5366 Handle<WeakCell> weak_cell2;
5367 {
5368 HandleScope inner_scope(isolate);
5369 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5370 }
5371 CHECK(weak_cell1->value()->IsFixedArray());
5372 CHECK_EQ(*survivor, weak_cell2->value());
5373 heap->CollectGarbage(NEW_SPACE);
5374 CHECK(weak_cell1->value()->IsFixedArray());
5375 CHECK_EQ(*survivor, weak_cell2->value());
5376 heap->CollectGarbage(NEW_SPACE);
5377 CHECK(weak_cell1->value()->IsFixedArray());
5378 CHECK_EQ(*survivor, weak_cell2->value());
5379 heap->CollectAllAvailableGarbage();
5380 CHECK(weak_cell1->cleared());
5381 CHECK_EQ(*survivor, weak_cell2->value());
5382}
5383
5384
5385TEST(WeakCellsWithIncrementalMarking) {
5386 CcTest::InitializeVM();
5387 Isolate* isolate = CcTest::i_isolate();
5388 v8::internal::Heap* heap = CcTest::heap();
5389 v8::internal::Factory* factory = isolate->factory();
5390
5391 const int N = 16;
5392 HandleScope outer_scope(isolate);
5393 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5394 Handle<WeakCell> weak_cells[N];
5395
5396 for (int i = 0; i < N; i++) {
5397 HandleScope inner_scope(isolate);
5398 Handle<HeapObject> value =
5399 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5400 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5401 CHECK(weak_cell->value()->IsFixedArray());
5402 IncrementalMarking* marking = heap->incremental_marking();
5403 if (marking->IsStopped()) {
5404 heap->StartIncrementalMarking();
5405 }
5406 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5407 heap->CollectGarbage(NEW_SPACE);
5408 CHECK(weak_cell->value()->IsFixedArray());
5409 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5410 }
5411 heap->CollectAllGarbage();
5412 CHECK_EQ(*survivor, weak_cells[0]->value());
5413 for (int i = 1; i < N; i++) {
5414 CHECK(weak_cells[i]->cleared());
5415 }
5416}
5417
5418
5419#ifdef DEBUG
5420TEST(AddInstructionChangesNewSpacePromotion) {
5421 i::FLAG_allow_natives_syntax = true;
5422 i::FLAG_expose_gc = true;
5423 i::FLAG_stress_compaction = true;
5424 i::FLAG_gc_interval = 1000;
5425 CcTest::InitializeVM();
5426 if (!i::FLAG_allocation_site_pretenuring) return;
5427 v8::HandleScope scope(CcTest::isolate());
5428 Isolate* isolate = CcTest::i_isolate();
5429 Heap* heap = isolate->heap();
5430 LocalContext env;
5431 CompileRun(
5432 "function add(a, b) {"
5433 " return a + b;"
5434 "}"
5435 "add(1, 2);"
5436 "add(\"a\", \"b\");"
5437 "var oldSpaceObject;"
5438 "gc();"
5439 "function crash(x) {"
5440 " var object = {a: null, b: null};"
5441 " var result = add(1.5, x | 0);"
5442 " object.a = result;"
5443 " oldSpaceObject = object;"
5444 " return object;"
5445 "}"
5446 "crash(1);"
5447 "crash(1);"
5448 "%OptimizeFunctionOnNextCall(crash);"
5449 "crash(1);");
5450
5451 v8::Local<v8::Object> global = CcTest::global();
5452 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5453 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5454 v8::Local<v8::Value> args1[] = {v8_num(1)};
5455 heap->DisableInlineAllocation();
5456 heap->set_allocation_timeout(1);
5457 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5458 heap->CollectAllGarbage();
5459}
5460
5461
5462void OnFatalErrorExpectOOM(const char* location, const char* message) {
5463 // Exit with 0 if the location matches our expectation.
5464 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5465}
5466
5467
5468TEST(CEntryStubOOM) {
5469 i::FLAG_allow_natives_syntax = true;
5470 CcTest::InitializeVM();
5471 v8::HandleScope scope(CcTest::isolate());
5472 CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
5473
5474 v8::Local<v8::Value> result = CompileRun(
5475 "%SetFlags('--gc-interval=1');"
5476 "var a = [];"
5477 "a.__proto__ = [];"
5478 "a.unshift(1)");
5479
5480 CHECK(result->IsNumber());
5481}
5482
5483#endif // DEBUG
5484
5485
5486static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5487
5488
5489static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5490 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5491}
5492
5493
5494UNINITIALIZED_TEST(Regress538257) {
5495 i::FLAG_manual_evacuation_candidates_selection = true;
5496 v8::Isolate::CreateParams create_params;
5497 // Set heap limits.
5498 create_params.constraints.set_max_semi_space_size(1 * Page::kPageSize / MB);
5499 create_params.constraints.set_max_old_space_size(6 * Page::kPageSize / MB);
5500 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5501 v8::Isolate* isolate = v8::Isolate::New(create_params);
5502 isolate->Enter();
5503 {
5504 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5505 HandleScope handle_scope(i_isolate);
5506 PagedSpace* old_space = i_isolate->heap()->old_space();
5507 const int kMaxObjects = 10000;
5508 const int kFixedArrayLen = 512;
5509 Handle<FixedArray> objects[kMaxObjects];
5510 for (int i = 0; (i < kMaxObjects) && old_space->CanExpand(Page::kPageSize);
5511 i++) {
5512 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5513 Page::FromAddress(objects[i]->address())
5514 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5515 }
5516 SimulateFullSpace(old_space);
5517 i_isolate->heap()->CollectGarbage(OLD_SPACE);
5518 // If we get this far, we've successfully aborted compaction. Any further
5519 // allocations might trigger OOM.
5520 }
5521 isolate->Exit();
5522 isolate->Dispose();
5523}
5524
5525
5526TEST(Regress357137) {
5527 CcTest::InitializeVM();
5528 v8::Isolate* isolate = CcTest::isolate();
5529 v8::HandleScope hscope(isolate);
5530 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5531 global->Set(
5532 v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
5533 .ToLocalChecked(),
5534 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5535 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5536 CHECK(!context.IsEmpty());
5537 v8::Context::Scope cscope(context);
5538
5539 v8::Local<v8::Value> result = CompileRun(
5540 "var locals = '';"
5541 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5542 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5543 "interrupt();" // This triggers a fake stack overflow in f.
5544 "f()()");
5545 CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
5546}
5547
5548
5549TEST(Regress507979) {
5550 const int kFixedArrayLen = 10;
5551 CcTest::InitializeVM();
5552 Isolate* isolate = CcTest::i_isolate();
5553 Heap* heap = isolate->heap();
5554 HandleScope handle_scope(isolate);
5555
5556 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5557 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5558 CHECK(heap->InNewSpace(o1->address()));
5559 CHECK(heap->InNewSpace(o2->address()));
5560
5561 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5562
5563 // Replace parts of an object placed before a live object with a filler. This
5564 // way the filler object shares the mark bits with the following live object.
5565 o1->Shrink(kFixedArrayLen - 1);
5566
5567 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5568 // Let's not optimize the loop away.
5569 CHECK(obj->address() != nullptr);
5570 }
5571}
5572
5573
5574TEST(ArrayShiftSweeping) {
5575 i::FLAG_expose_gc = true;
5576 CcTest::InitializeVM();
5577 v8::HandleScope scope(CcTest::isolate());
5578 Isolate* isolate = CcTest::i_isolate();
5579 Heap* heap = isolate->heap();
5580
5581 v8::Local<v8::Value> result = CompileRun(
5582 "var array = new Array(400);"
5583 "var tmp = new Array(1000);"
5584 "array[0] = 10;"
5585 "gc();"
5586 "gc();"
5587 "array.shift();"
5588 "array;");
5589
5590 Handle<JSObject> o = Handle<JSObject>::cast(
5591 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)));
5592 CHECK(heap->InOldSpace(o->elements()));
5593 CHECK(heap->InOldSpace(*o));
5594 Page* page = Page::FromAddress(o->elements()->address());
5595 CHECK(page->parallel_sweeping_state().Value() <=
5596 MemoryChunk::kSweepingFinalize ||
5597 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
5598}
5599
5600
5601UNINITIALIZED_TEST(PromotionQueue) {
5602 i::FLAG_expose_gc = true;
5603 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5604 i::FLAG_min_semi_space_size = i::FLAG_max_semi_space_size;
5605 v8::Isolate::CreateParams create_params;
5606 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5607 v8::Isolate* isolate = v8::Isolate::New(create_params);
5608 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5609 {
5610 v8::Isolate::Scope isolate_scope(isolate);
5611 v8::HandleScope handle_scope(isolate);
5612 v8::Context::New(isolate)->Enter();
5613 Heap* heap = i_isolate->heap();
5614 NewSpace* new_space = heap->new_space();
5615
5616 // In this test we will try to overwrite the promotion queue which is at the
5617 // end of to-space. To actually make that possible, we need at least two
5618 // semi-space pages and take advantage of fragmentation.
5619 // (1) Use a semi-space consisting of two pages.
5620 // (2) Create a few small long living objects and call the scavenger to
5621 // move them to the other semi-space.
5622 // (3) Create a huge object, i.e., remainder of first semi-space page and
5623 // create another huge object which should be of maximum allocatable memory
5624 // size of the second semi-space page.
5625 // (4) Call the scavenger again.
5626 // What will happen is: the scavenger will promote the objects created in
5627 // (2) and will create promotion queue entries at the end of the second
5628 // semi-space page during the next scavenge when it promotes the objects to
5629 // the old generation. The first allocation of (3) will fill up the first
5630 // semi-space page. The second allocation in (3) will not fit into the
5631 // first semi-space page, but it will overwrite the promotion queue which
5632 // are in the second semi-space page. If the right guards are in place, the
5633 // promotion queue will be evacuated in that case.
5634
5635
5636 CHECK(new_space->IsAtMaximumCapacity());
5637 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5638
5639 // Call the scavenger two times to get an empty new space
5640 heap->CollectGarbage(NEW_SPACE);
5641 heap->CollectGarbage(NEW_SPACE);
5642
5643 // First create a few objects which will survive a scavenge, and will get
5644 // promoted to the old generation later on. These objects will create
5645 // promotion queue entries at the end of the second semi-space page.
5646 const int number_handles = 12;
5647 Handle<FixedArray> handles[number_handles];
5648 for (int i = 0; i < number_handles; i++) {
5649 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5650 }
5651
5652 heap->CollectGarbage(NEW_SPACE);
5653 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5654
5655 // Fill-up the first semi-space page.
5656 FillUpOnePage(new_space);
5657
5658 // Create a small object to initialize the bump pointer on the second
5659 // semi-space page.
5660 Handle<FixedArray> small =
5661 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5662 CHECK(heap->InNewSpace(*small));
5663
5664 // Fill-up the second semi-space page.
5665 FillUpOnePage(new_space);
5666
5667 // This scavenge will corrupt memory if the promotion queue is not
5668 // evacuated.
5669 heap->CollectGarbage(NEW_SPACE);
5670 }
5671 isolate->Dispose();
5672}
5673
5674
5675TEST(Regress388880) {
5676 i::FLAG_expose_gc = true;
5677 CcTest::InitializeVM();
5678 v8::HandleScope scope(CcTest::isolate());
5679 Isolate* isolate = CcTest::i_isolate();
5680 Factory* factory = isolate->factory();
5681 Heap* heap = isolate->heap();
5682
5683 Handle<Map> map1 = Map::Create(isolate, 1);
5684 Handle<Map> map2 =
5685 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
5686 HeapType::Any(isolate), NONE, Representation::Tagged(),
5687 OMIT_TRANSITION).ToHandleChecked();
5688
5689 int desired_offset = Page::kPageSize - map1->instance_size();
5690
5691 // Allocate padding objects in old pointer space so, that object allocated
5692 // afterwards would end at the end of the page.
5693 SimulateFullSpace(heap->old_space());
5694 int padding_size = desired_offset - Page::kObjectStartOffset;
5695 CreatePadding(heap, padding_size, TENURED);
5696
5697 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5698 o->set_properties(*factory->empty_fixed_array());
5699
5700 // Ensure that the object allocated where we need it.
5701 Page* page = Page::FromAddress(o->address());
5702 CHECK_EQ(desired_offset, page->Offset(o->address()));
5703
5704 // Now we have an object right at the end of the page.
5705
5706 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5707 // that would cause crash.
5708 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5709 marking->Stop();
5710 CcTest::heap()->StartIncrementalMarking();
5711 CHECK(marking->IsMarking());
5712
5713 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5714 // when it calls heap->AdjustLiveBytes(...).
5715 JSObject::MigrateToMap(o, map2);
5716}
5717
5718
5719TEST(Regress3631) {
5720 i::FLAG_expose_gc = true;
5721 CcTest::InitializeVM();
5722 v8::HandleScope scope(CcTest::isolate());
5723 Isolate* isolate = CcTest::i_isolate();
5724 Heap* heap = isolate->heap();
5725 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5726 v8::Local<v8::Value> result = CompileRun(
5727 "var weak_map = new WeakMap();"
5728 "var future_keys = [];"
5729 "for (var i = 0; i < 50; i++) {"
5730 " var key = {'k' : i + 0.1};"
5731 " weak_map.set(key, 1);"
5732 " future_keys.push({'x' : i + 0.2});"
5733 "}"
5734 "weak_map");
5735 if (marking->IsStopped()) {
5736 CcTest::heap()->StartIncrementalMarking();
5737 }
5738 // Incrementally mark the backing store.
5739 Handle<JSReceiver> obj =
5740 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5741 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5742 while (!Marking::IsBlack(
5743 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5744 !marking->IsStopped()) {
5745 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5746 }
5747 // Stash the backing store in a handle.
5748 Handle<Object> save(weak_map->table(), isolate);
5749 // The following line will update the backing store.
5750 CompileRun(
5751 "for (var i = 0; i < 50; i++) {"
5752 " weak_map.set(future_keys[i], i);"
5753 "}");
5754 heap->incremental_marking()->set_should_hurry(true);
5755 heap->CollectGarbage(OLD_SPACE);
5756}
5757
5758
5759TEST(Regress442710) {
5760 CcTest::InitializeVM();
5761 Isolate* isolate = CcTest::i_isolate();
5762 Heap* heap = isolate->heap();
5763 Factory* factory = isolate->factory();
5764
5765 HandleScope sc(isolate);
5766 Handle<JSGlobalObject> global(
5767 CcTest::i_isolate()->context()->global_object());
5768 Handle<JSArray> array = factory->NewJSArray(2);
5769
5770 Handle<String> name = factory->InternalizeUtf8String("testArray");
5771 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5772 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5773 heap->CollectGarbage(OLD_SPACE);
5774}
5775
5776
5777HEAP_TEST(NumberStringCacheSize) {
5778 // Test that the number-string cache has not been resized in the snapshot.
5779 CcTest::InitializeVM();
5780 Isolate* isolate = CcTest::i_isolate();
5781 if (!isolate->snapshot_available()) return;
5782 Heap* heap = isolate->heap();
5783 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5784 heap->number_string_cache()->length());
5785}
5786
5787
5788TEST(Regress3877) {
5789 CcTest::InitializeVM();
5790 Isolate* isolate = CcTest::i_isolate();
5791 Heap* heap = isolate->heap();
5792 Factory* factory = isolate->factory();
5793 HandleScope scope(isolate);
5794 CompileRun("function cls() { this.x = 10; }");
5795 Handle<WeakCell> weak_prototype;
5796 {
5797 HandleScope inner_scope(isolate);
5798 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5799 Handle<JSReceiver> proto =
5800 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5801 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5802 }
5803 CHECK(!weak_prototype->cleared());
5804 CompileRun(
5805 "var a = { };"
5806 "a.x = new cls();"
5807 "cls.prototype = null;");
5808 for (int i = 0; i < 4; i++) {
5809 heap->CollectAllGarbage();
5810 }
5811 // The map of a.x keeps prototype alive
5812 CHECK(!weak_prototype->cleared());
5813 // Change the map of a.x and make the previous map garbage collectable.
5814 CompileRun("a.x.__proto__ = {};");
5815 for (int i = 0; i < 4; i++) {
5816 heap->CollectAllGarbage();
5817 }
5818 CHECK(weak_prototype->cleared());
5819}
5820
5821
5822Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5823 HandleScope inner_scope(isolate);
5824 Handle<Map> map = Map::Create(isolate, 1);
5825 v8::Local<v8::Value> result =
5826 CompileRun("(function () { return {x : 10}; })();");
5827 Handle<JSReceiver> proto =
5828 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5829 Map::SetPrototype(map, proto);
5830 heap->AddRetainedMap(map);
5831 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5832}
5833
5834
5835void CheckMapRetainingFor(int n) {
5836 FLAG_retain_maps_for_n_gc = n;
5837 Isolate* isolate = CcTest::i_isolate();
5838 Heap* heap = isolate->heap();
5839 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5840 CHECK(!weak_cell->cleared());
5841 for (int i = 0; i < n; i++) {
5842 SimulateIncrementalMarking(heap);
5843 heap->CollectGarbage(OLD_SPACE);
5844 }
5845 CHECK(!weak_cell->cleared());
5846 SimulateIncrementalMarking(heap);
5847 heap->CollectGarbage(OLD_SPACE);
5848 CHECK(weak_cell->cleared());
5849}
5850
5851
5852TEST(MapRetaining) {
5853 CcTest::InitializeVM();
5854 v8::HandleScope scope(CcTest::isolate());
5855 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5856 CheckMapRetainingFor(0);
5857 CheckMapRetainingFor(1);
5858 CheckMapRetainingFor(7);
5859}
5860
5861
5862TEST(RegressArrayListGC) {
5863 FLAG_retain_maps_for_n_gc = 1;
5864 FLAG_incremental_marking = 0;
5865 FLAG_gc_global = true;
5866 CcTest::InitializeVM();
5867 v8::HandleScope scope(CcTest::isolate());
5868 Isolate* isolate = CcTest::i_isolate();
5869 Heap* heap = isolate->heap();
5870 AddRetainedMap(isolate, heap);
5871 Handle<Map> map = Map::Create(isolate, 1);
5872 heap->CollectGarbage(OLD_SPACE);
5873 // Force GC in old space on next addition of retained map.
5874 Map::WeakCellForMap(map);
5875 SimulateFullSpace(CcTest::heap()->new_space());
5876 for (int i = 0; i < 10; i++) {
5877 heap->AddRetainedMap(map);
5878 }
5879 heap->CollectGarbage(OLD_SPACE);
5880}
5881
5882
5883#ifdef DEBUG
5884TEST(PathTracer) {
5885 CcTest::InitializeVM();
5886 v8::HandleScope scope(CcTest::isolate());
5887
5888 v8::Local<v8::Value> result = CompileRun("'abc'");
5889 Handle<Object> o = v8::Utils::OpenHandle(*result);
5890 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5891}
5892#endif // DEBUG
5893
5894
5895TEST(WritableVsImmortalRoots) {
5896 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5897 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5898 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5899 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5900 // A root value can be writable, immortal, or neither, but not both.
5901 CHECK(!immortal || !writable);
5902 }
5903}
5904
5905
5906static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5907 int initial_length,
5908 int elements_to_trim) {
5909 v8::HandleScope scope(CcTest::isolate());
5910 Isolate* isolate = CcTest::i_isolate();
5911 Factory* factory = isolate->factory();
5912 Heap* heap = isolate->heap();
5913
5914 Handle<FixedTypedArrayBase> array =
5915 factory->NewFixedTypedArray(initial_length, type, true);
5916 int old_size = array->size();
5917 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5918 elements_to_trim);
5919
5920 // Check that free space filler is at the right place and did not smash the
5921 // array header.
5922 CHECK(array->IsFixedArrayBase());
5923 CHECK_EQ(initial_length - elements_to_trim, array->length());
5924 int new_size = array->size();
5925 if (new_size != old_size) {
5926 // Free space filler should be created in this case.
5927 Address next_obj_address = array->address() + array->size();
5928 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5929 }
5930 heap->CollectAllAvailableGarbage();
5931}
5932
5933
5934TEST(Regress472513) {
5935 CcTest::InitializeVM();
5936 v8::HandleScope scope(CcTest::isolate());
5937
5938 // The combination of type/initial_length/elements_to_trim triggered
5939 // typed array header smashing with free space filler (crbug/472513).
5940
5941 // 64-bit cases.
5942 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5943 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5944 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5945 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5946 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5947 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5948
5949 // 32-bit cases.
5950 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5951 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5952 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5953 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5954 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5955}
5956
5957
5958TEST(WeakFixedArray) {
5959 CcTest::InitializeVM();
5960 v8::HandleScope scope(CcTest::isolate());
5961
5962 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5963 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5964 array->Remove(number);
5965 array->Compact<WeakFixedArray::NullCallback>();
5966 WeakFixedArray::Add(array, number);
5967}
5968
5969
5970TEST(PreprocessStackTrace) {
5971 // Do not automatically trigger early GC.
5972 FLAG_gc_interval = -1;
5973 CcTest::InitializeVM();
5974 v8::HandleScope scope(CcTest::isolate());
5975 v8::TryCatch try_catch(CcTest::isolate());
5976 CompileRun("throw new Error();");
5977 CHECK(try_catch.HasCaught());
5978 Isolate* isolate = CcTest::i_isolate();
5979 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5980 Handle<Name> key = isolate->factory()->stack_trace_symbol();
5981 Handle<Object> stack_trace =
5982 JSObject::GetProperty(exception, key).ToHandleChecked();
5983 Handle<Object> code =
5984 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5985 CHECK(code->IsCode());
5986
5987 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
5988
5989 Handle<Object> pos =
5990 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5991 CHECK(pos->IsSmi());
5992
5993 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5994 int array_length = Smi::cast(stack_trace_array->length())->value();
5995 for (int i = 0; i < array_length; i++) {
5996 Handle<Object> element =
5997 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5998 CHECK(!element->IsCode());
5999 }
6000}
6001
6002
6003static bool utils_has_been_collected = false;
6004
6005static void UtilsHasBeenCollected(
6006 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
6007 utils_has_been_collected = true;
6008 data.GetParameter()->Reset();
6009}
6010
6011
6012TEST(BootstrappingExports) {
6013 // Expose utils object and delete it to observe that it is indeed
6014 // being garbage-collected.
6015 FLAG_expose_natives_as = "utils";
6016 CcTest::InitializeVM();
6017 v8::Isolate* isolate = CcTest::isolate();
6018 LocalContext env;
6019
6020 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
6021
6022 utils_has_been_collected = false;
6023
6024 v8::Persistent<v8::Object> utils;
6025
6026 {
6027 v8::HandleScope scope(isolate);
6028 v8::Local<v8::String> name = v8_str("utils");
6029 utils.Reset(isolate, CcTest::global()
6030 ->Get(env.local(), name)
6031 .ToLocalChecked()
6032 ->ToObject(env.local())
6033 .ToLocalChecked());
6034 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
6035 }
6036
6037 utils.SetWeak(&utils, UtilsHasBeenCollected,
6038 v8::WeakCallbackType::kParameter);
6039
6040 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6041
6042 CHECK(utils_has_been_collected);
6043}
6044
6045
6046TEST(Regress1878) {
6047 FLAG_allow_natives_syntax = true;
6048 CcTest::InitializeVM();
6049 v8::Isolate* isolate = CcTest::isolate();
6050 v8::HandleScope scope(isolate);
6051 v8::Local<v8::Function> constructor = v8::Utils::CallableToLocal(
6052 CcTest::i_isolate()->internal_array_function());
6053 LocalContext env;
6054 CHECK(CcTest::global()
6055 ->Set(env.local(), v8_str("InternalArray"), constructor)
6056 .FromJust());
6057
6058 v8::TryCatch try_catch(isolate);
6059
6060 CompileRun(
6061 "var a = Array();"
6062 "for (var i = 0; i < 1000; i++) {"
6063 " var ai = new InternalArray(10000);"
6064 " if (%HaveSameMap(ai, a)) throw Error();"
6065 " if (!%HasFastObjectElements(ai)) throw Error();"
6066 "}"
6067 "for (var i = 0; i < 1000; i++) {"
6068 " var ai = new InternalArray(10000);"
6069 " if (%HaveSameMap(ai, a)) throw Error();"
6070 " if (!%HasFastObjectElements(ai)) throw Error();"
6071 "}");
6072
6073 CHECK(!try_catch.HasCaught());
6074}
6075
6076
6077void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6078 CHECK(bytes >= FixedArray::kHeaderSize);
6079 CHECK(bytes % kPointerSize == 0);
6080 Factory* factory = isolate->factory();
6081 HandleScope scope(isolate);
6082 AlwaysAllocateScope always_allocate(isolate);
6083 int elements =
6084 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6085 Handle<FixedArray> array = factory->NewFixedArray(
6086 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6087 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6088 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6089}
6090
6091
6092TEST(NewSpaceAllocationCounter) {
6093 CcTest::InitializeVM();
6094 v8::HandleScope scope(CcTest::isolate());
6095 Isolate* isolate = CcTest::i_isolate();
6096 Heap* heap = isolate->heap();
6097 size_t counter1 = heap->NewSpaceAllocationCounter();
6098 heap->CollectGarbage(NEW_SPACE);
6099 const size_t kSize = 1024;
6100 AllocateInSpace(isolate, kSize, NEW_SPACE);
6101 size_t counter2 = heap->NewSpaceAllocationCounter();
6102 CHECK_EQ(kSize, counter2 - counter1);
6103 heap->CollectGarbage(NEW_SPACE);
6104 size_t counter3 = heap->NewSpaceAllocationCounter();
6105 CHECK_EQ(0U, counter3 - counter2);
6106 // Test counter overflow.
6107 size_t max_counter = -1;
6108 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6109 size_t start = heap->NewSpaceAllocationCounter();
6110 for (int i = 0; i < 20; i++) {
6111 AllocateInSpace(isolate, kSize, NEW_SPACE);
6112 size_t counter = heap->NewSpaceAllocationCounter();
6113 CHECK_EQ(kSize, counter - start);
6114 start = counter;
6115 }
6116}
6117
6118
6119TEST(OldSpaceAllocationCounter) {
6120 CcTest::InitializeVM();
6121 v8::HandleScope scope(CcTest::isolate());
6122 Isolate* isolate = CcTest::i_isolate();
6123 Heap* heap = isolate->heap();
6124 size_t counter1 = heap->OldGenerationAllocationCounter();
6125 heap->CollectGarbage(NEW_SPACE);
6126 heap->CollectGarbage(NEW_SPACE);
6127 const size_t kSize = 1024;
6128 AllocateInSpace(isolate, kSize, OLD_SPACE);
6129 size_t counter2 = heap->OldGenerationAllocationCounter();
6130 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6131 CHECK_LE(kSize, counter2 - counter1);
6132 heap->CollectGarbage(NEW_SPACE);
6133 size_t counter3 = heap->OldGenerationAllocationCounter();
6134 CHECK_EQ(0u, counter3 - counter2);
6135 AllocateInSpace(isolate, kSize, OLD_SPACE);
6136 heap->CollectGarbage(OLD_SPACE);
6137 size_t counter4 = heap->OldGenerationAllocationCounter();
6138 CHECK_LE(kSize, counter4 - counter3);
6139 // Test counter overflow.
6140 size_t max_counter = -1;
6141 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6142 size_t start = heap->OldGenerationAllocationCounter();
6143 for (int i = 0; i < 20; i++) {
6144 AllocateInSpace(isolate, kSize, OLD_SPACE);
6145 size_t counter = heap->OldGenerationAllocationCounter();
6146 CHECK_LE(kSize, counter - start);
6147 start = counter;
6148 }
6149}
6150
6151
6152TEST(NewSpaceAllocationThroughput) {
6153 CcTest::InitializeVM();
6154 v8::HandleScope scope(CcTest::isolate());
6155 Isolate* isolate = CcTest::i_isolate();
6156 Heap* heap = isolate->heap();
6157 GCTracer* tracer = heap->tracer();
6158 int time1 = 100;
6159 size_t counter1 = 1000;
6160 tracer->SampleAllocation(time1, counter1, 0);
6161 int time2 = 200;
6162 size_t counter2 = 2000;
6163 tracer->SampleAllocation(time2, counter2, 0);
6164 size_t throughput =
6165 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6166 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6167 int time3 = 1000;
6168 size_t counter3 = 30000;
6169 tracer->SampleAllocation(time3, counter3, 0);
6170 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6171 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6172}
6173
6174
6175TEST(NewSpaceAllocationThroughput2) {
6176 CcTest::InitializeVM();
6177 v8::HandleScope scope(CcTest::isolate());
6178 Isolate* isolate = CcTest::i_isolate();
6179 Heap* heap = isolate->heap();
6180 GCTracer* tracer = heap->tracer();
6181 int time1 = 100;
6182 size_t counter1 = 1000;
6183 tracer->SampleAllocation(time1, counter1, 0);
6184 int time2 = 200;
6185 size_t counter2 = 2000;
6186 tracer->SampleAllocation(time2, counter2, 0);
6187 size_t throughput =
6188 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6189 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6190 int time3 = 1000;
6191 size_t counter3 = 30000;
6192 tracer->SampleAllocation(time3, counter3, 0);
6193 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6194 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6195}
6196
6197
6198static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6199 Isolate* isolate = CcTest::i_isolate();
6200 Object* message =
6201 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6202 CHECK(message->IsTheHole());
6203}
6204
6205
6206TEST(MessageObjectLeak) {
6207 CcTest::InitializeVM();
6208 v8::Isolate* isolate = CcTest::isolate();
6209 v8::HandleScope scope(isolate);
6210 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6211 global->Set(
6212 v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
6213 .ToLocalChecked(),
6214 v8::FunctionTemplate::New(isolate, CheckLeak));
6215 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6216 v8::Context::Scope cscope(context);
6217
6218 const char* test =
6219 "try {"
6220 " throw 'message 1';"
6221 "} catch (e) {"
6222 "}"
6223 "check();"
6224 "L: try {"
6225 " throw 'message 2';"
6226 "} finally {"
6227 " break L;"
6228 "}"
6229 "check();";
6230 CompileRun(test);
6231
6232 const char* flag = "--turbo-filter=*";
6233 FlagList::SetFlagsFromString(flag, StrLength(flag));
6234 FLAG_always_opt = true;
6235 FLAG_turbo_try_finally = true;
6236
6237 CompileRun(test);
6238}
6239
6240
6241static void CheckEqualSharedFunctionInfos(
6242 const v8::FunctionCallbackInfo<v8::Value>& args) {
6243 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6244 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6245 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6246 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6247 CHECK(fun1->shared() == fun2->shared());
6248}
6249
6250
6251static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6252 Isolate* isolate = CcTest::i_isolate();
6253 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6254 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6255 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6256 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6257 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6258}
6259
6260
6261TEST(CanonicalSharedFunctionInfo) {
6262 CcTest::InitializeVM();
6263 v8::Isolate* isolate = CcTest::isolate();
6264 v8::HandleScope scope(isolate);
6265 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6266 global->Set(isolate, "check", v8::FunctionTemplate::New(
6267 isolate, CheckEqualSharedFunctionInfos));
6268 global->Set(isolate, "remove",
6269 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6270 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6271 v8::Context::Scope cscope(context);
6272 CompileRun(
6273 "function f() { return function g() {}; }"
6274 "var g1 = f();"
6275 "remove(f);"
6276 "var g2 = f();"
6277 "check(g1, g2);");
6278
6279 CompileRun(
6280 "function f() { return (function() { return function g() {}; })(); }"
6281 "var g1 = f();"
6282 "remove(f);"
6283 "var g2 = f();"
6284 "check(g1, g2);");
6285}
6286
6287
6288TEST(OldGenerationAllocationThroughput) {
6289 CcTest::InitializeVM();
6290 v8::HandleScope scope(CcTest::isolate());
6291 Isolate* isolate = CcTest::i_isolate();
6292 Heap* heap = isolate->heap();
6293 GCTracer* tracer = heap->tracer();
6294 int time1 = 100;
6295 size_t counter1 = 1000;
6296 tracer->SampleAllocation(time1, 0, counter1);
6297 int time2 = 200;
6298 size_t counter2 = 2000;
6299 tracer->SampleAllocation(time2, 0, counter2);
6300 size_t throughput =
6301 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6302 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6303 int time3 = 1000;
6304 size_t counter3 = 30000;
6305 tracer->SampleAllocation(time3, 0, counter3);
6306 throughput =
6307 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6308 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6309}
6310
6311
6312TEST(AllocationThroughput) {
6313 CcTest::InitializeVM();
6314 v8::HandleScope scope(CcTest::isolate());
6315 Isolate* isolate = CcTest::i_isolate();
6316 Heap* heap = isolate->heap();
6317 GCTracer* tracer = heap->tracer();
6318 int time1 = 100;
6319 size_t counter1 = 1000;
6320 tracer->SampleAllocation(time1, counter1, counter1);
6321 int time2 = 200;
6322 size_t counter2 = 2000;
6323 tracer->SampleAllocation(time2, counter2, counter2);
6324 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6325 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6326 int time3 = 1000;
6327 size_t counter3 = 30000;
6328 tracer->SampleAllocation(time3, counter3, counter3);
6329 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6330 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6331}
6332
6333
6334TEST(ContextMeasure) {
6335 CcTest::InitializeVM();
6336 v8::HandleScope scope(CcTest::isolate());
6337 Isolate* isolate = CcTest::i_isolate();
6338 LocalContext context;
6339
6340 int size_upper_limit = 0;
6341 int count_upper_limit = 0;
6342 HeapIterator it(CcTest::heap());
6343 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6344 size_upper_limit += obj->Size();
6345 count_upper_limit++;
6346 }
6347
6348 ContextMeasure measure(*isolate->native_context());
6349
6350 PrintF("Context size : %d bytes\n", measure.Size());
6351 PrintF("Context object count: %d\n", measure.Count());
6352
6353 CHECK_LE(1000, measure.Count());
6354 CHECK_LE(50000, measure.Size());
6355
6356 CHECK_LE(measure.Count(), count_upper_limit);
6357 CHECK_LE(measure.Size(), size_upper_limit);
6358}
6359
6360
6361TEST(ScriptIterator) {
6362 CcTest::InitializeVM();
6363 v8::HandleScope scope(CcTest::isolate());
6364 Isolate* isolate = CcTest::i_isolate();
6365 Heap* heap = CcTest::heap();
6366 LocalContext context;
6367
6368 heap->CollectAllGarbage();
6369
6370 int script_count = 0;
6371 {
6372 HeapIterator it(heap);
6373 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6374 if (obj->IsScript()) script_count++;
6375 }
6376 }
6377
6378 {
6379 Script::Iterator iterator(isolate);
6380 while (iterator.Next()) script_count--;
6381 }
6382
6383 CHECK_EQ(0, script_count);
6384}
6385
6386
6387TEST(SharedFunctionInfoIterator) {
6388 CcTest::InitializeVM();
6389 v8::HandleScope scope(CcTest::isolate());
6390 Isolate* isolate = CcTest::i_isolate();
6391 Heap* heap = CcTest::heap();
6392 LocalContext context;
6393
6394 heap->CollectAllGarbage();
6395 heap->CollectAllGarbage();
6396
6397 int sfi_count = 0;
6398 {
6399 HeapIterator it(heap);
6400 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6401 if (!obj->IsSharedFunctionInfo()) continue;
6402 sfi_count++;
6403 }
6404 }
6405
6406 {
6407 SharedFunctionInfo::Iterator iterator(isolate);
6408 while (iterator.Next()) sfi_count--;
6409 }
6410
6411 CHECK_EQ(0, sfi_count);
6412}
6413
6414
6415template <typename T>
6416static UniqueId MakeUniqueId(const Persistent<T>& p) {
6417 return UniqueId(reinterpret_cast<uintptr_t>(*v8::Utils::OpenPersistent(p)));
6418}
6419
6420
6421TEST(Regress519319) {
6422 CcTest::InitializeVM();
6423 v8::Isolate* isolate = CcTest::isolate();
6424 v8::HandleScope scope(isolate);
6425 Heap* heap = CcTest::heap();
6426 LocalContext context;
6427
6428 v8::Persistent<Value> parent;
6429 v8::Persistent<Value> child;
6430
6431 parent.Reset(isolate, v8::Object::New(isolate));
6432 child.Reset(isolate, v8::Object::New(isolate));
6433
6434 SimulateFullSpace(heap->old_space());
6435 heap->CollectGarbage(OLD_SPACE);
6436 {
6437 UniqueId id = MakeUniqueId(parent);
6438 isolate->SetObjectGroupId(parent, id);
6439 isolate->SetReferenceFromGroup(id, child);
6440 }
6441 // The CollectGarbage call above starts sweeper threads.
6442 // The crash will happen if the following two functions
6443 // are called before sweeping finishes.
6444 heap->StartIncrementalMarking();
6445 heap->FinalizeIncrementalMarkingIfComplete("test");
6446}
6447
6448
6449HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6450 CcTest::InitializeVM();
6451 v8::HandleScope scope(CcTest::isolate());
6452 Heap* heap = CcTest::heap();
6453 Isolate* isolate = CcTest::i_isolate();
6454 MemoryReducer* memory_reducer = heap->memory_reducer_;
6455 memory_reducer->SampleAndGetJsCallsPerMs(0);
6456 isolate->IncrementJsCallsFromApiCounter();
6457 isolate->IncrementJsCallsFromApiCounter();
6458 isolate->IncrementJsCallsFromApiCounter();
6459 double calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(1);
6460 CheckDoubleEquals(3, calls_per_ms);
6461
6462 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(2);
6463 CheckDoubleEquals(0, calls_per_ms);
6464
6465 isolate->IncrementJsCallsFromApiCounter();
6466 isolate->IncrementJsCallsFromApiCounter();
6467 isolate->IncrementJsCallsFromApiCounter();
6468 isolate->IncrementJsCallsFromApiCounter();
6469 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(4);
6470 CheckDoubleEquals(2, calls_per_ms);
6471}
6472
6473
6474} // namespace internal
6475} // namespace v8