blob: 424e9870d8a0ddd14b5c474bbf50a2c6717ef15e [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include <stdlib.h>
29#include <utility>
30
31#include "src/compilation-cache.h"
32#include "src/context-measure.h"
33#include "src/deoptimizer.h"
34#include "src/execution.h"
35#include "src/factory.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010036#include "src/field-type.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037#include "src/global-handles.h"
38#include "src/heap/gc-tracer.h"
39#include "src/heap/memory-reducer.h"
40#include "src/ic/ic.h"
41#include "src/macro-assembler.h"
42#include "src/regexp/jsregexp.h"
43#include "src/snapshot/snapshot.h"
44#include "test/cctest/cctest.h"
45#include "test/cctest/heap/heap-tester.h"
46#include "test/cctest/heap/utils-inl.h"
47#include "test/cctest/test-feedback-vector.h"
48
49
50namespace v8 {
51namespace internal {
52
53static void CheckMap(Map* map, int type, int instance_size) {
54 CHECK(map->IsHeapObject());
55#ifdef DEBUG
56 CHECK(CcTest::heap()->Contains(map));
57#endif
58 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
59 CHECK_EQ(type, map->instance_type());
60 CHECK_EQ(instance_size, map->instance_size());
61}
62
63
64TEST(HeapMaps) {
65 CcTest::InitializeVM();
66 Heap* heap = CcTest::heap();
67 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
68 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
69#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
70 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
71 SIMD128_TYPES(SIMD128_TYPE)
72#undef SIMD128_TYPE
73 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
74 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
75}
76
77
78static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
79 CHECK(obj->IsOddball());
80 Handle<Object> handle(obj, isolate);
81 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
82 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
83}
84
85
86static void CheckSmi(Isolate* isolate, int value, const char* string) {
87 Handle<Object> handle(Smi::FromInt(value), isolate);
88 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
89 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
90}
91
92
93static void CheckNumber(Isolate* isolate, double value, const char* string) {
94 Handle<Object> number = isolate->factory()->NewNumber(value);
95 CHECK(number->IsNumber());
96 Handle<Object> print_string =
97 Object::ToString(isolate, number).ToHandleChecked();
98 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
99}
100
101
102static void CheckFindCodeObject(Isolate* isolate) {
103 // Test FindCodeObject
104#define __ assm.
105
106 Assembler assm(isolate, NULL, 0);
107
108 __ nop(); // supported on all architectures
109
110 CodeDesc desc;
111 assm.GetCode(&desc);
112 Handle<Code> code = isolate->factory()->NewCode(
113 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
114 CHECK(code->IsCode());
115
116 HeapObject* obj = HeapObject::cast(*code);
117 Address obj_addr = obj->address();
118
119 for (int i = 0; i < obj->Size(); i += kPointerSize) {
120 Object* found = isolate->FindCodeObject(obj_addr + i);
121 CHECK_EQ(*code, found);
122 }
123
124 Handle<Code> copy = isolate->factory()->NewCode(
125 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
126 HeapObject* obj_copy = HeapObject::cast(*copy);
127 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
128 obj_copy->Size() / 2);
129 CHECK(not_right != *code);
130}
131
132
133TEST(HandleNull) {
134 CcTest::InitializeVM();
135 Isolate* isolate = CcTest::i_isolate();
136 HandleScope outer_scope(isolate);
137 LocalContext context;
138 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
139 CHECK(!n.is_null());
140}
141
142
143TEST(HeapObjects) {
144 CcTest::InitializeVM();
145 Isolate* isolate = CcTest::i_isolate();
146 Factory* factory = isolate->factory();
147 Heap* heap = isolate->heap();
148
149 HandleScope sc(isolate);
150 Handle<Object> value = factory->NewNumber(1.000123);
151 CHECK(value->IsHeapNumber());
152 CHECK(value->IsNumber());
153 CHECK_EQ(1.000123, value->Number());
154
155 value = factory->NewNumber(1.0);
156 CHECK(value->IsSmi());
157 CHECK(value->IsNumber());
158 CHECK_EQ(1.0, value->Number());
159
160 value = factory->NewNumberFromInt(1024);
161 CHECK(value->IsSmi());
162 CHECK(value->IsNumber());
163 CHECK_EQ(1024.0, value->Number());
164
165 value = factory->NewNumberFromInt(Smi::kMinValue);
166 CHECK(value->IsSmi());
167 CHECK(value->IsNumber());
168 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
169
170 value = factory->NewNumberFromInt(Smi::kMaxValue);
171 CHECK(value->IsSmi());
172 CHECK(value->IsNumber());
173 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
174
175#if !defined(V8_TARGET_ARCH_64_BIT)
176 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
177 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
178 CHECK(value->IsHeapNumber());
179 CHECK(value->IsNumber());
180 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
181#endif
182
183 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
184 CHECK(value->IsHeapNumber());
185 CHECK(value->IsNumber());
186 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
187 value->Number());
188
189 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
190 CHECK(value->IsHeapNumber());
191 CHECK(value->IsNumber());
192 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
193 value->Number());
194
195 // nan oddball checks
196 CHECK(factory->nan_value()->IsNumber());
197 CHECK(std::isnan(factory->nan_value()->Number()));
198
199 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
200 CHECK(s->IsString());
201 CHECK_EQ(10, s->length());
202
203 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
204 Handle<JSGlobalObject> global(
205 CcTest::i_isolate()->context()->global_object());
206 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
207
208 // Check ToString for oddballs
209 CheckOddball(isolate, heap->true_value(), "true");
210 CheckOddball(isolate, heap->false_value(), "false");
211 CheckOddball(isolate, heap->null_value(), "null");
212 CheckOddball(isolate, heap->undefined_value(), "undefined");
213
214 // Check ToString for Smis
215 CheckSmi(isolate, 0, "0");
216 CheckSmi(isolate, 42, "42");
217 CheckSmi(isolate, -42, "-42");
218
219 // Check ToString for Numbers
220 CheckNumber(isolate, 1.1, "1.1");
221
222 CheckFindCodeObject(isolate);
223}
224
225
226template <typename T, typename LANE_TYPE, int LANES>
227static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
228 LANE_TYPE other_value) {
229 // Check against lane_values, and check that all lanes can be set to
230 // other_value without disturbing the other lanes.
231 for (int i = 0; i < LANES; i++) {
232 CHECK_EQ(lane_values[i], value->get_lane(i));
233 }
234 for (int i = 0; i < LANES; i++) {
235 value->set_lane(i, other_value); // change the value
236 for (int j = 0; j < LANES; j++) {
237 if (i != j)
238 CHECK_EQ(lane_values[j], value->get_lane(j));
239 else
240 CHECK_EQ(other_value, value->get_lane(j));
241 }
242 value->set_lane(i, lane_values[i]); // restore the lane
243 }
244 CHECK(value->BooleanValue()); // SIMD values are 'true'.
245}
246
247
248TEST(SimdObjects) {
249 CcTest::InitializeVM();
250 Isolate* isolate = CcTest::i_isolate();
251 Factory* factory = isolate->factory();
252
253 HandleScope sc(isolate);
254
255 // Float32x4
256 {
257 float lanes[4] = {1, 2, 3, 4};
258 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
259 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
260
261 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
262 CHECK(value->IsFloat32x4());
263 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
264
265 // Check special lane values.
266 value->set_lane(1, -0.0);
267 CHECK_EQ(-0.0f, value->get_lane(1));
268 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
269 value->set_lane(2, quiet_NaN);
270 CHECK(std::isnan(value->get_lane(2)));
271 value->set_lane(3, signaling_NaN);
272 CHECK(std::isnan(value->get_lane(3)));
273
274#ifdef OBJECT_PRINT
275 // Check value printing.
276 {
277 value = factory->NewFloat32x4(lanes);
278 std::ostringstream os;
279 value->Float32x4Print(os);
280 CHECK_EQ("1, 2, 3, 4", os.str());
281 }
282 {
283 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
284 value = factory->NewFloat32x4(special_lanes);
285 std::ostringstream os;
286 value->Float32x4Print(os);
287 // Value printing doesn't preserve signed zeroes.
288 CHECK_EQ("0, 0, NaN, NaN", os.str());
289 }
290#endif // OBJECT_PRINT
291 }
292 // Int32x4
293 {
294 int32_t lanes[4] = {1, 2, 3, 4};
295
296 Handle<Int32x4> value = factory->NewInt32x4(lanes);
297 CHECK(value->IsInt32x4());
298 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
299
300#ifdef OBJECT_PRINT
301 std::ostringstream os;
302 value->Int32x4Print(os);
303 CHECK_EQ("1, 2, 3, 4", os.str());
304#endif // OBJECT_PRINT
305 }
306 // Uint32x4
307 {
308 uint32_t lanes[4] = {1, 2, 3, 4};
309
310 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
311 CHECK(value->IsUint32x4());
312 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
313
314#ifdef OBJECT_PRINT
315 std::ostringstream os;
316 value->Uint32x4Print(os);
317 CHECK_EQ("1, 2, 3, 4", os.str());
318#endif // OBJECT_PRINT
319 }
320 // Bool32x4
321 {
322 bool lanes[4] = {true, false, true, false};
323
324 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
325 CHECK(value->IsBool32x4());
326 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
327
328#ifdef OBJECT_PRINT
329 std::ostringstream os;
330 value->Bool32x4Print(os);
331 CHECK_EQ("true, false, true, false", os.str());
332#endif // OBJECT_PRINT
333 }
334 // Int16x8
335 {
336 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
337
338 Handle<Int16x8> value = factory->NewInt16x8(lanes);
339 CHECK(value->IsInt16x8());
340 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
341
342#ifdef OBJECT_PRINT
343 std::ostringstream os;
344 value->Int16x8Print(os);
345 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
346#endif // OBJECT_PRINT
347 }
348 // Uint16x8
349 {
350 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
351
352 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
353 CHECK(value->IsUint16x8());
354 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
355
356#ifdef OBJECT_PRINT
357 std::ostringstream os;
358 value->Uint16x8Print(os);
359 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
360#endif // OBJECT_PRINT
361 }
362 // Bool16x8
363 {
364 bool lanes[8] = {true, false, true, false, true, false, true, false};
365
366 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
367 CHECK(value->IsBool16x8());
368 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
369
370#ifdef OBJECT_PRINT
371 std::ostringstream os;
372 value->Bool16x8Print(os);
373 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
374#endif // OBJECT_PRINT
375 }
376 // Int8x16
377 {
378 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
379
380 Handle<Int8x16> value = factory->NewInt8x16(lanes);
381 CHECK(value->IsInt8x16());
382 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
383
384#ifdef OBJECT_PRINT
385 std::ostringstream os;
386 value->Int8x16Print(os);
387 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
388#endif // OBJECT_PRINT
389 }
390 // Uint8x16
391 {
392 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
393
394 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
395 CHECK(value->IsUint8x16());
396 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
397
398#ifdef OBJECT_PRINT
399 std::ostringstream os;
400 value->Uint8x16Print(os);
401 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
402#endif // OBJECT_PRINT
403 }
404 // Bool8x16
405 {
406 bool lanes[16] = {true, false, true, false, true, false, true, false,
407 true, false, true, false, true, false, true, false};
408
409 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
410 CHECK(value->IsBool8x16());
411 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
412
413#ifdef OBJECT_PRINT
414 std::ostringstream os;
415 value->Bool8x16Print(os);
416 CHECK_EQ(
417 "true, false, true, false, true, false, true, false, true, false, "
418 "true, false, true, false, true, false",
419 os.str());
420#endif // OBJECT_PRINT
421 }
422}
423
424
425TEST(Tagging) {
426 CcTest::InitializeVM();
427 int request = 24;
428 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
429 CHECK(Smi::FromInt(42)->IsSmi());
430 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
431 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
432}
433
434
435TEST(GarbageCollection) {
436 CcTest::InitializeVM();
437 Isolate* isolate = CcTest::i_isolate();
438 Heap* heap = isolate->heap();
439 Factory* factory = isolate->factory();
440
441 HandleScope sc(isolate);
442 // Check GC.
443 heap->CollectGarbage(NEW_SPACE);
444
445 Handle<JSGlobalObject> global(
446 CcTest::i_isolate()->context()->global_object());
447 Handle<String> name = factory->InternalizeUtf8String("theFunction");
448 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
449 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
450 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
451 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
452 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
453
454 {
455 HandleScope inner_scope(isolate);
456 // Allocate a function and keep it in global object's property.
457 Handle<JSFunction> function = factory->NewFunction(name);
458 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
459 // Allocate an object. Unrooted after leaving the scope.
460 Handle<JSObject> obj = factory->NewJSObject(function);
461 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
462 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
463
464 CHECK_EQ(Smi::FromInt(23),
465 *Object::GetProperty(obj, prop_name).ToHandleChecked());
466 CHECK_EQ(Smi::FromInt(24),
467 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
468 }
469
470 heap->CollectGarbage(NEW_SPACE);
471
472 // Function should be alive.
473 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
474 // Check function is retained.
475 Handle<Object> func_value =
476 Object::GetProperty(global, name).ToHandleChecked();
477 CHECK(func_value->IsJSFunction());
478 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
479
480 {
481 HandleScope inner_scope(isolate);
482 // Allocate another object, make it reachable from global.
483 Handle<JSObject> obj = factory->NewJSObject(function);
484 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
485 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
486 }
487
488 // After gc, it should survive.
489 heap->CollectGarbage(NEW_SPACE);
490
491 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
492 Handle<Object> obj =
493 Object::GetProperty(global, obj_name).ToHandleChecked();
494 CHECK(obj->IsJSObject());
495 CHECK_EQ(Smi::FromInt(23),
496 *Object::GetProperty(obj, prop_name).ToHandleChecked());
497}
498
499
500static void VerifyStringAllocation(Isolate* isolate, const char* string) {
501 HandleScope scope(isolate);
502 Handle<String> s = isolate->factory()->NewStringFromUtf8(
503 CStrVector(string)).ToHandleChecked();
504 CHECK_EQ(StrLength(string), s->length());
505 for (int index = 0; index < s->length(); index++) {
506 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
507 }
508}
509
510
511TEST(String) {
512 CcTest::InitializeVM();
513 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
514
515 VerifyStringAllocation(isolate, "a");
516 VerifyStringAllocation(isolate, "ab");
517 VerifyStringAllocation(isolate, "abc");
518 VerifyStringAllocation(isolate, "abcd");
519 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
520}
521
522
523TEST(LocalHandles) {
524 CcTest::InitializeVM();
525 Isolate* isolate = CcTest::i_isolate();
526 Factory* factory = isolate->factory();
527
528 v8::HandleScope scope(CcTest::isolate());
529 const char* name = "Kasper the spunky";
530 Handle<String> string = factory->NewStringFromAsciiChecked(name);
531 CHECK_EQ(StrLength(name), string->length());
532}
533
534
535TEST(GlobalHandles) {
536 CcTest::InitializeVM();
537 Isolate* isolate = CcTest::i_isolate();
538 Heap* heap = isolate->heap();
539 Factory* factory = isolate->factory();
540 GlobalHandles* global_handles = isolate->global_handles();
541
542 Handle<Object> h1;
543 Handle<Object> h2;
544 Handle<Object> h3;
545 Handle<Object> h4;
546
547 {
548 HandleScope scope(isolate);
549
550 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
551 Handle<Object> u = factory->NewNumber(1.12344);
552
553 h1 = global_handles->Create(*i);
554 h2 = global_handles->Create(*u);
555 h3 = global_handles->Create(*i);
556 h4 = global_handles->Create(*u);
557 }
558
559 // after gc, it should survive
560 heap->CollectGarbage(NEW_SPACE);
561
562 CHECK((*h1)->IsString());
563 CHECK((*h2)->IsHeapNumber());
564 CHECK((*h3)->IsString());
565 CHECK((*h4)->IsHeapNumber());
566
567 CHECK_EQ(*h3, *h1);
568 GlobalHandles::Destroy(h1.location());
569 GlobalHandles::Destroy(h3.location());
570
571 CHECK_EQ(*h4, *h2);
572 GlobalHandles::Destroy(h2.location());
573 GlobalHandles::Destroy(h4.location());
574}
575
576
577static bool WeakPointerCleared = false;
578
579static void TestWeakGlobalHandleCallback(
580 const v8::WeakCallbackData<v8::Value, void>& data) {
581 std::pair<v8::Persistent<v8::Value>*, int>* p =
582 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
583 data.GetParameter());
584 if (p->second == 1234) WeakPointerCleared = true;
585 p->first->Reset();
586}
587
588
589TEST(WeakGlobalHandlesScavenge) {
590 i::FLAG_stress_compaction = false;
591 CcTest::InitializeVM();
592 Isolate* isolate = CcTest::i_isolate();
593 Heap* heap = isolate->heap();
594 Factory* factory = isolate->factory();
595 GlobalHandles* global_handles = isolate->global_handles();
596
597 WeakPointerCleared = false;
598
599 Handle<Object> h1;
600 Handle<Object> h2;
601
602 {
603 HandleScope scope(isolate);
604
605 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
606 Handle<Object> u = factory->NewNumber(1.12344);
607
608 h1 = global_handles->Create(*i);
609 h2 = global_handles->Create(*u);
610 }
611
612 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
613 GlobalHandles::MakeWeak(h2.location(),
614 reinterpret_cast<void*>(&handle_and_id),
615 &TestWeakGlobalHandleCallback);
616
617 // Scavenge treats weak pointers as normal roots.
618 heap->CollectGarbage(NEW_SPACE);
619
620 CHECK((*h1)->IsString());
621 CHECK((*h2)->IsHeapNumber());
622
623 CHECK(!WeakPointerCleared);
624 CHECK(!global_handles->IsNearDeath(h2.location()));
625 CHECK(!global_handles->IsNearDeath(h1.location()));
626
627 GlobalHandles::Destroy(h1.location());
628 GlobalHandles::Destroy(h2.location());
629}
630
631
632TEST(WeakGlobalHandlesMark) {
633 CcTest::InitializeVM();
634 Isolate* isolate = CcTest::i_isolate();
635 Heap* heap = isolate->heap();
636 Factory* factory = isolate->factory();
637 GlobalHandles* global_handles = isolate->global_handles();
638
639 WeakPointerCleared = false;
640
641 Handle<Object> h1;
642 Handle<Object> h2;
643
644 {
645 HandleScope scope(isolate);
646
647 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
648 Handle<Object> u = factory->NewNumber(1.12344);
649
650 h1 = global_handles->Create(*i);
651 h2 = global_handles->Create(*u);
652 }
653
654 // Make sure the objects are promoted.
655 heap->CollectGarbage(OLD_SPACE);
656 heap->CollectGarbage(NEW_SPACE);
657 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
658
659 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
660 GlobalHandles::MakeWeak(h2.location(),
661 reinterpret_cast<void*>(&handle_and_id),
662 &TestWeakGlobalHandleCallback);
663 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
664 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
665
666 // Incremental marking potentially marked handles before they turned weak.
667 heap->CollectAllGarbage();
668
669 CHECK((*h1)->IsString());
670
671 CHECK(WeakPointerCleared);
672 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
673
674 GlobalHandles::Destroy(h1.location());
675}
676
677
678TEST(DeleteWeakGlobalHandle) {
679 i::FLAG_stress_compaction = false;
680 CcTest::InitializeVM();
681 Isolate* isolate = CcTest::i_isolate();
682 Heap* heap = isolate->heap();
683 Factory* factory = isolate->factory();
684 GlobalHandles* global_handles = isolate->global_handles();
685
686 WeakPointerCleared = false;
687
688 Handle<Object> h;
689
690 {
691 HandleScope scope(isolate);
692
693 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
694 h = global_handles->Create(*i);
695 }
696
697 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
698 GlobalHandles::MakeWeak(h.location(),
699 reinterpret_cast<void*>(&handle_and_id),
700 &TestWeakGlobalHandleCallback);
701
702 // Scanvenge does not recognize weak reference.
703 heap->CollectGarbage(NEW_SPACE);
704
705 CHECK(!WeakPointerCleared);
706
707 // Mark-compact treats weak reference properly.
708 heap->CollectGarbage(OLD_SPACE);
709
710 CHECK(WeakPointerCleared);
711}
712
713
714TEST(BytecodeArray) {
715 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
716 static const int kRawBytesSize = sizeof(kRawBytes);
717 static const int kFrameSize = 32;
718 static const int kParameterCount = 2;
719
720 i::FLAG_manual_evacuation_candidates_selection = true;
721 CcTest::InitializeVM();
722 Isolate* isolate = CcTest::i_isolate();
723 Heap* heap = isolate->heap();
724 Factory* factory = isolate->factory();
725 HandleScope scope(isolate);
726
727 SimulateFullSpace(heap->old_space());
728 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
729 for (int i = 0; i < 5; i++) {
730 Handle<Object> number = factory->NewHeapNumber(i);
731 constant_pool->set(i, *number);
732 }
733
734 // Allocate and initialize BytecodeArray
735 Handle<BytecodeArray> array = factory->NewBytecodeArray(
736 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
737
738 CHECK(array->IsBytecodeArray());
739 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
740 CHECK_EQ(array->frame_size(), kFrameSize);
741 CHECK_EQ(array->parameter_count(), kParameterCount);
742 CHECK_EQ(array->constant_pool(), *constant_pool);
743 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
744 CHECK_GE(array->address() + array->BytecodeArraySize(),
745 array->GetFirstBytecodeAddress() + array->length());
746 for (int i = 0; i < kRawBytesSize; i++) {
747 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
748 CHECK_EQ(array->get(i), kRawBytes[i]);
749 }
750
751 FixedArray* old_constant_pool_address = *constant_pool;
752
753 // Perform a full garbage collection and force the constant pool to be on an
754 // evacuation candidate.
755 Page* evac_page = Page::FromAddress(constant_pool->address());
756 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
757 heap->CollectAllGarbage();
758
759 // BytecodeArray should survive.
760 CHECK_EQ(array->length(), kRawBytesSize);
761 CHECK_EQ(array->frame_size(), kFrameSize);
762 for (int i = 0; i < kRawBytesSize; i++) {
763 CHECK_EQ(array->get(i), kRawBytes[i]);
764 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
765 }
766
767 // Constant pool should have been migrated.
768 CHECK_EQ(array->constant_pool(), *constant_pool);
769 CHECK_NE(array->constant_pool(), old_constant_pool_address);
770}
771
772
773static const char* not_so_random_string_table[] = {
774 "abstract",
775 "boolean",
776 "break",
777 "byte",
778 "case",
779 "catch",
780 "char",
781 "class",
782 "const",
783 "continue",
784 "debugger",
785 "default",
786 "delete",
787 "do",
788 "double",
789 "else",
790 "enum",
791 "export",
792 "extends",
793 "false",
794 "final",
795 "finally",
796 "float",
797 "for",
798 "function",
799 "goto",
800 "if",
801 "implements",
802 "import",
803 "in",
804 "instanceof",
805 "int",
806 "interface",
807 "long",
808 "native",
809 "new",
810 "null",
811 "package",
812 "private",
813 "protected",
814 "public",
815 "return",
816 "short",
817 "static",
818 "super",
819 "switch",
820 "synchronized",
821 "this",
822 "throw",
823 "throws",
824 "transient",
825 "true",
826 "try",
827 "typeof",
828 "var",
829 "void",
830 "volatile",
831 "while",
832 "with",
833 0
834};
835
836
837static void CheckInternalizedStrings(const char** strings) {
838 Isolate* isolate = CcTest::i_isolate();
839 Factory* factory = isolate->factory();
840 for (const char* string = *strings; *strings != 0; string = *strings++) {
841 HandleScope scope(isolate);
842 Handle<String> a =
843 isolate->factory()->InternalizeUtf8String(CStrVector(string));
844 // InternalizeUtf8String may return a failure if a GC is needed.
845 CHECK(a->IsInternalizedString());
846 Handle<String> b = factory->InternalizeUtf8String(string);
847 CHECK_EQ(*b, *a);
848 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
849 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
850 CHECK_EQ(*b, *a);
851 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
852 }
853}
854
855
856TEST(StringTable) {
857 CcTest::InitializeVM();
858
859 v8::HandleScope sc(CcTest::isolate());
860 CheckInternalizedStrings(not_so_random_string_table);
861 CheckInternalizedStrings(not_so_random_string_table);
862}
863
864
865TEST(FunctionAllocation) {
866 CcTest::InitializeVM();
867 Isolate* isolate = CcTest::i_isolate();
868 Factory* factory = isolate->factory();
869
870 v8::HandleScope sc(CcTest::isolate());
871 Handle<String> name = factory->InternalizeUtf8String("theFunction");
872 Handle<JSFunction> function = factory->NewFunction(name);
873
874 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
875 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
876
877 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
878 Handle<JSObject> obj = factory->NewJSObject(function);
879 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
880 CHECK_EQ(Smi::FromInt(23),
881 *Object::GetProperty(obj, prop_name).ToHandleChecked());
882 // Check that we can add properties to function objects.
883 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
884 CHECK_EQ(Smi::FromInt(24),
885 *Object::GetProperty(function, prop_name).ToHandleChecked());
886}
887
888
889TEST(ObjectProperties) {
890 CcTest::InitializeVM();
891 Isolate* isolate = CcTest::i_isolate();
892 Factory* factory = isolate->factory();
893
894 v8::HandleScope sc(CcTest::isolate());
895 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
896 Handle<Object> object = Object::GetProperty(
897 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
898 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
899 Handle<JSObject> obj = factory->NewJSObject(constructor);
900 Handle<String> first = factory->InternalizeUtf8String("first");
901 Handle<String> second = factory->InternalizeUtf8String("second");
902
903 Handle<Smi> one(Smi::FromInt(1), isolate);
904 Handle<Smi> two(Smi::FromInt(2), isolate);
905
906 // check for empty
907 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
908
909 // add first
910 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
911 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
912
913 // delete first
914 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
915 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
916
917 // add first and then second
918 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
919 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
920 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
921 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
922
923 // delete first and then second
924 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
925 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
926 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
927 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
928 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
929
930 // add first and then second
931 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
932 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
933 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
934 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
935
936 // delete second and then first
937 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
938 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
939 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
940 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
941 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
942
943 // check string and internalized string match
944 const char* string1 = "fisk";
945 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
946 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
947 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
948 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
949
950 // check internalized string and string match
951 const char* string2 = "fugl";
952 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
953 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
954 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
955 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
956}
957
958
959TEST(JSObjectMaps) {
960 CcTest::InitializeVM();
961 Isolate* isolate = CcTest::i_isolate();
962 Factory* factory = isolate->factory();
963
964 v8::HandleScope sc(CcTest::isolate());
965 Handle<String> name = factory->InternalizeUtf8String("theFunction");
966 Handle<JSFunction> function = factory->NewFunction(name);
967
968 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
969 Handle<JSObject> obj = factory->NewJSObject(function);
970 Handle<Map> initial_map(function->initial_map());
971
972 // Set a propery
973 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
974 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
975 CHECK_EQ(Smi::FromInt(23),
976 *Object::GetProperty(obj, prop_name).ToHandleChecked());
977
978 // Check the map has changed
979 CHECK(*initial_map != obj->map());
980}
981
982
983TEST(JSArray) {
984 CcTest::InitializeVM();
985 Isolate* isolate = CcTest::i_isolate();
986 Factory* factory = isolate->factory();
987
988 v8::HandleScope sc(CcTest::isolate());
989 Handle<String> name = factory->InternalizeUtf8String("Array");
990 Handle<Object> fun_obj = Object::GetProperty(
991 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
992 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
993
994 // Allocate the object.
995 Handle<Object> element;
996 Handle<JSObject> object = factory->NewJSObject(function);
997 Handle<JSArray> array = Handle<JSArray>::cast(object);
998 // We just initialized the VM, no heap allocation failure yet.
999 JSArray::Initialize(array, 0);
1000
1001 // Set array length to 0.
1002 JSArray::SetLength(array, 0);
1003 CHECK_EQ(Smi::FromInt(0), array->length());
1004 // Must be in fast mode.
1005 CHECK(array->HasFastSmiOrObjectElements());
1006
1007 // array[length] = name.
1008 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
1009 CHECK_EQ(Smi::FromInt(1), array->length());
1010 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1011 CHECK_EQ(*element, *name);
1012
1013 // Set array length with larger than smi value.
1014 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1015
1016 uint32_t int_length = 0;
1017 CHECK(array->length()->ToArrayIndex(&int_length));
1018 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1019 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1020
1021 // array[length] = name.
1022 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1023 uint32_t new_int_length = 0;
1024 CHECK(array->length()->ToArrayIndex(&new_int_length));
1025 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1026 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1027 CHECK_EQ(*element, *name);
1028 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1029 CHECK_EQ(*element, *name);
1030}
1031
1032
1033TEST(JSObjectCopy) {
1034 CcTest::InitializeVM();
1035 Isolate* isolate = CcTest::i_isolate();
1036 Factory* factory = isolate->factory();
1037
1038 v8::HandleScope sc(CcTest::isolate());
1039 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1040 Handle<Object> object = Object::GetProperty(
1041 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1042 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1043 Handle<JSObject> obj = factory->NewJSObject(constructor);
1044 Handle<String> first = factory->InternalizeUtf8String("first");
1045 Handle<String> second = factory->InternalizeUtf8String("second");
1046
1047 Handle<Smi> one(Smi::FromInt(1), isolate);
1048 Handle<Smi> two(Smi::FromInt(2), isolate);
1049
1050 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1051 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1052
1053 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1054 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1055
1056 // Make the clone.
1057 Handle<Object> value1, value2;
1058 Handle<JSObject> clone = factory->CopyJSObject(obj);
1059 CHECK(!clone.is_identical_to(obj));
1060
1061 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1062 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1063 CHECK_EQ(*value1, *value2);
1064 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1065 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1066 CHECK_EQ(*value1, *value2);
1067
1068 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1069 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1070 CHECK_EQ(*value1, *value2);
1071 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1072 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1073 CHECK_EQ(*value1, *value2);
1074
1075 // Flip the values.
1076 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1077 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1078
1079 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1080 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1081
1082 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1083 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1084 CHECK_EQ(*value1, *value2);
1085 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1086 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1087 CHECK_EQ(*value1, *value2);
1088
1089 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1090 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1091 CHECK_EQ(*value1, *value2);
1092 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1093 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1094 CHECK_EQ(*value1, *value2);
1095}
1096
1097
1098TEST(StringAllocation) {
1099 CcTest::InitializeVM();
1100 Isolate* isolate = CcTest::i_isolate();
1101 Factory* factory = isolate->factory();
1102
1103 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1104 for (int length = 0; length < 100; length++) {
1105 v8::HandleScope scope(CcTest::isolate());
1106 char* non_one_byte = NewArray<char>(3 * length + 1);
1107 char* one_byte = NewArray<char>(length + 1);
1108 non_one_byte[3 * length] = 0;
1109 one_byte[length] = 0;
1110 for (int i = 0; i < length; i++) {
1111 one_byte[i] = 'a';
1112 non_one_byte[3 * i] = chars[0];
1113 non_one_byte[3 * i + 1] = chars[1];
1114 non_one_byte[3 * i + 2] = chars[2];
1115 }
1116 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1117 Vector<const char>(non_one_byte, 3 * length));
1118 CHECK_EQ(length, non_one_byte_sym->length());
1119 Handle<String> one_byte_sym =
1120 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1121 CHECK_EQ(length, one_byte_sym->length());
1122 Handle<String> non_one_byte_str =
1123 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1124 .ToHandleChecked();
1125 non_one_byte_str->Hash();
1126 CHECK_EQ(length, non_one_byte_str->length());
1127 Handle<String> one_byte_str =
1128 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1129 .ToHandleChecked();
1130 one_byte_str->Hash();
1131 CHECK_EQ(length, one_byte_str->length());
1132 DeleteArray(non_one_byte);
1133 DeleteArray(one_byte);
1134 }
1135}
1136
1137
1138static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1139 // Count the number of objects found in the heap.
1140 int found_count = 0;
1141 HeapIterator iterator(heap);
1142 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1143 for (int i = 0; i < size; i++) {
1144 if (*objs[i] == obj) {
1145 found_count++;
1146 }
1147 }
1148 }
1149 return found_count;
1150}
1151
1152
1153TEST(Iteration) {
1154 CcTest::InitializeVM();
1155 Isolate* isolate = CcTest::i_isolate();
1156 Factory* factory = isolate->factory();
1157 v8::HandleScope scope(CcTest::isolate());
1158
1159 // Array of objects to scan haep for.
1160 const int objs_count = 6;
1161 Handle<Object> objs[objs_count];
1162 int next_objs_index = 0;
1163
1164 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1165 objs[next_objs_index++] = factory->NewJSArray(10);
1166 objs[next_objs_index++] =
Ben Murdochda12d292016-06-02 14:46:10 +01001167 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, TENURED);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001168
1169 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1170 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1171 objs[next_objs_index++] =
1172 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1173
1174 // Allocate a large string (for large object space).
1175 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1176 char* str = new char[large_size];
1177 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1178 str[large_size - 1] = '\0';
1179 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1180 delete[] str;
1181
1182 // Add a Map object to look for.
1183 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1184
1185 CHECK_EQ(objs_count, next_objs_index);
1186 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1187}
1188
1189
1190UNINITIALIZED_TEST(TestCodeFlushing) {
1191 // If we do not flush code this test is invalid.
1192 if (!FLAG_flush_code) return;
1193 i::FLAG_allow_natives_syntax = true;
1194 i::FLAG_optimize_for_size = false;
1195 v8::Isolate::CreateParams create_params;
1196 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1197 v8::Isolate* isolate = v8::Isolate::New(create_params);
1198 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1199 isolate->Enter();
1200 Factory* factory = i_isolate->factory();
1201 {
1202 v8::HandleScope scope(isolate);
1203 v8::Context::New(isolate)->Enter();
1204 const char* source =
1205 "function foo() {"
1206 " var x = 42;"
1207 " var y = 42;"
1208 " var z = x + y;"
1209 "};"
1210 "foo()";
1211 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1212
1213 // This compile will add the code to the compilation cache.
1214 {
1215 v8::HandleScope scope(isolate);
1216 CompileRun(source);
1217 }
1218
1219 // Check function is compiled.
1220 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1221 foo_name).ToHandleChecked();
1222 CHECK(func_value->IsJSFunction());
1223 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1224 CHECK(function->shared()->is_compiled());
1225
1226 // The code will survive at least two GCs.
1227 i_isolate->heap()->CollectAllGarbage();
1228 i_isolate->heap()->CollectAllGarbage();
1229 CHECK(function->shared()->is_compiled());
1230
1231 // Simulate several GCs that use full marking.
1232 const int kAgingThreshold = 6;
1233 for (int i = 0; i < kAgingThreshold; i++) {
1234 i_isolate->heap()->CollectAllGarbage();
1235 }
1236
1237 // foo should no longer be in the compilation cache
1238 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1239 CHECK(!function->is_compiled() || function->IsOptimized());
1240 // Call foo to get it recompiled.
1241 CompileRun("foo()");
1242 CHECK(function->shared()->is_compiled());
1243 CHECK(function->is_compiled());
1244 }
1245 isolate->Exit();
1246 isolate->Dispose();
1247}
1248
1249
1250TEST(TestCodeFlushingPreAged) {
1251 // If we do not flush code this test is invalid.
1252 if (!FLAG_flush_code) return;
1253 i::FLAG_allow_natives_syntax = true;
1254 i::FLAG_optimize_for_size = true;
1255 CcTest::InitializeVM();
1256 Isolate* isolate = CcTest::i_isolate();
1257 Factory* factory = isolate->factory();
1258 v8::HandleScope scope(CcTest::isolate());
1259 const char* source = "function foo() {"
1260 " var x = 42;"
1261 " var y = 42;"
1262 " var z = x + y;"
1263 "};"
1264 "foo()";
1265 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1266
1267 // Compile foo, but don't run it.
1268 { v8::HandleScope scope(CcTest::isolate());
1269 CompileRun(source);
1270 }
1271
1272 // Check function is compiled.
1273 Handle<Object> func_value =
1274 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1275 CHECK(func_value->IsJSFunction());
1276 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1277 CHECK(function->shared()->is_compiled());
1278
1279 // The code has been run so will survive at least one GC.
1280 CcTest::heap()->CollectAllGarbage();
1281 CHECK(function->shared()->is_compiled());
1282
1283 // The code was only run once, so it should be pre-aged and collected on the
1284 // next GC.
1285 CcTest::heap()->CollectAllGarbage();
1286 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1287
1288 // Execute the function again twice, and ensure it is reset to the young age.
1289 { v8::HandleScope scope(CcTest::isolate());
1290 CompileRun("foo();"
1291 "foo();");
1292 }
1293
1294 // The code will survive at least two GC now that it is young again.
1295 CcTest::heap()->CollectAllGarbage();
1296 CcTest::heap()->CollectAllGarbage();
1297 CHECK(function->shared()->is_compiled());
1298
1299 // Simulate several GCs that use full marking.
1300 const int kAgingThreshold = 6;
1301 for (int i = 0; i < kAgingThreshold; i++) {
1302 CcTest::heap()->CollectAllGarbage();
1303 }
1304
1305 // foo should no longer be in the compilation cache
1306 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1307 CHECK(!function->is_compiled() || function->IsOptimized());
1308 // Call foo to get it recompiled.
1309 CompileRun("foo()");
1310 CHECK(function->shared()->is_compiled());
1311 CHECK(function->is_compiled());
1312}
1313
1314
1315TEST(TestCodeFlushingIncremental) {
1316 // If we do not flush code this test is invalid.
1317 if (!FLAG_flush_code) return;
1318 i::FLAG_allow_natives_syntax = true;
1319 i::FLAG_optimize_for_size = false;
1320 CcTest::InitializeVM();
1321 Isolate* isolate = CcTest::i_isolate();
1322 Factory* factory = isolate->factory();
1323 v8::HandleScope scope(CcTest::isolate());
1324 const char* source = "function foo() {"
1325 " var x = 42;"
1326 " var y = 42;"
1327 " var z = x + y;"
1328 "};"
1329 "foo()";
1330 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1331
1332 // This compile will add the code to the compilation cache.
1333 { v8::HandleScope scope(CcTest::isolate());
1334 CompileRun(source);
1335 }
1336
1337 // Check function is compiled.
1338 Handle<Object> func_value =
1339 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1340 CHECK(func_value->IsJSFunction());
1341 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1342 CHECK(function->shared()->is_compiled());
1343
1344 // The code will survive at least two GCs.
1345 CcTest::heap()->CollectAllGarbage();
1346 CcTest::heap()->CollectAllGarbage();
1347 CHECK(function->shared()->is_compiled());
1348
1349 // Simulate several GCs that use incremental marking.
1350 const int kAgingThreshold = 6;
1351 for (int i = 0; i < kAgingThreshold; i++) {
1352 SimulateIncrementalMarking(CcTest::heap());
1353 CcTest::heap()->CollectAllGarbage();
1354 }
1355 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1356 CHECK(!function->is_compiled() || function->IsOptimized());
1357
1358 // This compile will compile the function again.
1359 { v8::HandleScope scope(CcTest::isolate());
1360 CompileRun("foo();");
1361 }
1362
1363 // Simulate several GCs that use incremental marking but make sure
1364 // the loop breaks once the function is enqueued as a candidate.
1365 for (int i = 0; i < kAgingThreshold; i++) {
1366 SimulateIncrementalMarking(CcTest::heap());
1367 if (!function->next_function_link()->IsUndefined()) break;
1368 CcTest::heap()->CollectAllGarbage();
1369 }
1370
1371 // Force optimization while incremental marking is active and while
1372 // the function is enqueued as a candidate.
1373 { v8::HandleScope scope(CcTest::isolate());
1374 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1375 }
1376
1377 // Simulate one final GC to make sure the candidate queue is sane.
1378 CcTest::heap()->CollectAllGarbage();
1379 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1380 CHECK(function->is_compiled() || !function->IsOptimized());
1381}
1382
1383
1384TEST(TestCodeFlushingIncrementalScavenge) {
1385 // If we do not flush code this test is invalid.
1386 if (!FLAG_flush_code) return;
1387 i::FLAG_allow_natives_syntax = true;
1388 i::FLAG_optimize_for_size = false;
1389 CcTest::InitializeVM();
1390 Isolate* isolate = CcTest::i_isolate();
1391 Factory* factory = isolate->factory();
1392 v8::HandleScope scope(CcTest::isolate());
1393 const char* source = "var foo = function() {"
1394 " var x = 42;"
1395 " var y = 42;"
1396 " var z = x + y;"
1397 "};"
1398 "foo();"
1399 "var bar = function() {"
1400 " var x = 23;"
1401 "};"
1402 "bar();";
1403 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1404 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1405
1406 // Perfrom one initial GC to enable code flushing.
1407 CcTest::heap()->CollectAllGarbage();
1408
1409 // This compile will add the code to the compilation cache.
1410 { v8::HandleScope scope(CcTest::isolate());
1411 CompileRun(source);
1412 }
1413
1414 // Check functions are compiled.
1415 Handle<Object> func_value =
1416 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1417 CHECK(func_value->IsJSFunction());
1418 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1419 CHECK(function->shared()->is_compiled());
1420 Handle<Object> func_value2 =
1421 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1422 CHECK(func_value2->IsJSFunction());
1423 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1424 CHECK(function2->shared()->is_compiled());
1425
1426 // Clear references to functions so that one of them can die.
1427 { v8::HandleScope scope(CcTest::isolate());
1428 CompileRun("foo = 0; bar = 0;");
1429 }
1430
1431 // Bump the code age so that flushing is triggered while the function
1432 // object is still located in new-space.
1433 const int kAgingThreshold = 6;
1434 for (int i = 0; i < kAgingThreshold; i++) {
1435 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1436 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1437 }
1438
1439 // Simulate incremental marking so that the functions are enqueued as
1440 // code flushing candidates. Then kill one of the functions. Finally
1441 // perform a scavenge while incremental marking is still running.
1442 SimulateIncrementalMarking(CcTest::heap());
1443 *function2.location() = NULL;
1444 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1445
1446 // Simulate one final GC to make sure the candidate queue is sane.
1447 CcTest::heap()->CollectAllGarbage();
1448 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1449 CHECK(!function->is_compiled() || function->IsOptimized());
1450}
1451
1452
1453TEST(TestCodeFlushingIncrementalAbort) {
1454 // If we do not flush code this test is invalid.
1455 if (!FLAG_flush_code) return;
1456 i::FLAG_allow_natives_syntax = true;
1457 i::FLAG_optimize_for_size = false;
1458 CcTest::InitializeVM();
1459 Isolate* isolate = CcTest::i_isolate();
1460 Factory* factory = isolate->factory();
1461 Heap* heap = isolate->heap();
1462 v8::HandleScope scope(CcTest::isolate());
1463 const char* source = "function foo() {"
1464 " var x = 42;"
1465 " var y = 42;"
1466 " var z = x + y;"
1467 "};"
1468 "foo()";
1469 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1470
1471 // This compile will add the code to the compilation cache.
1472 { v8::HandleScope scope(CcTest::isolate());
1473 CompileRun(source);
1474 }
1475
1476 // Check function is compiled.
1477 Handle<Object> func_value =
1478 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1479 CHECK(func_value->IsJSFunction());
1480 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1481 CHECK(function->shared()->is_compiled());
1482
1483 // The code will survive at least two GCs.
1484 heap->CollectAllGarbage();
1485 heap->CollectAllGarbage();
1486 CHECK(function->shared()->is_compiled());
1487
1488 // Bump the code age so that flushing is triggered.
1489 const int kAgingThreshold = 6;
1490 for (int i = 0; i < kAgingThreshold; i++) {
1491 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1492 }
1493
1494 // Simulate incremental marking so that the function is enqueued as
1495 // code flushing candidate.
1496 SimulateIncrementalMarking(heap);
1497
1498 // Enable the debugger and add a breakpoint while incremental marking
1499 // is running so that incremental marking aborts and code flushing is
1500 // disabled.
1501 int position = 0;
1502 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1503 EnableDebugger(CcTest::isolate());
1504 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1505 isolate->debug()->ClearAllBreakPoints();
1506 DisableDebugger(CcTest::isolate());
1507
1508 // Force optimization now that code flushing is disabled.
1509 { v8::HandleScope scope(CcTest::isolate());
1510 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1511 }
1512
1513 // Simulate one final GC to make sure the candidate queue is sane.
1514 heap->CollectAllGarbage();
1515 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1516 CHECK(function->is_compiled() || !function->IsOptimized());
1517}
1518
Ben Murdoch097c5b22016-05-18 11:27:45 +01001519TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1520 // Turn off always_opt because it interferes with running the built-in for
1521 // the last call to g().
1522 i::FLAG_always_opt = false;
1523 i::FLAG_allow_natives_syntax = true;
1524 CcTest::InitializeVM();
1525 Isolate* isolate = CcTest::i_isolate();
1526 Factory* factory = isolate->factory();
1527 Heap* heap = isolate->heap();
1528 v8::HandleScope scope(CcTest::isolate());
1529
1530 CompileRun(
1531 "function make_closure(x) {"
1532 " return function() { return x + 3 };"
1533 "}"
1534 "var f = make_closure(5); f();"
1535 "var g = make_closure(5);");
1536
1537 // Check f is compiled.
1538 Handle<String> f_name = factory->InternalizeUtf8String("f");
1539 Handle<Object> f_value =
1540 Object::GetProperty(isolate->global_object(), f_name).ToHandleChecked();
1541 Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1542 CHECK(f_function->is_compiled());
1543
1544 // Check g is not compiled.
1545 Handle<String> g_name = factory->InternalizeUtf8String("g");
1546 Handle<Object> g_value =
1547 Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked();
1548 Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1549 // TODO(mvstanton): change to check that g is *not* compiled when optimized
1550 // cache
1551 // map lookup moves to the compile lazy builtin.
1552 CHECK(g_function->is_compiled());
1553
1554 SimulateIncrementalMarking(heap);
1555 CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1556
1557 // g should now have available an optimized function, unmarked by gc. The
1558 // CompileLazy built-in will discover it and install it in the closure, and
1559 // the incremental write barrier should be used.
1560 CompileRun("g();");
1561 CHECK(g_function->is_compiled());
1562}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563
1564TEST(CompilationCacheCachingBehavior) {
1565 // If we do not flush code, or have the compilation cache turned off, this
1566 // test is invalid.
1567 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1568 return;
1569 }
1570 CcTest::InitializeVM();
1571 Isolate* isolate = CcTest::i_isolate();
1572 Factory* factory = isolate->factory();
1573 Heap* heap = isolate->heap();
1574 CompilationCache* compilation_cache = isolate->compilation_cache();
Ben Murdochda12d292016-06-02 14:46:10 +01001575 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576
1577 v8::HandleScope scope(CcTest::isolate());
1578 const char* raw_source =
1579 "function foo() {"
1580 " var x = 42;"
1581 " var y = 42;"
1582 " var z = x + y;"
1583 "};"
1584 "foo()";
1585 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1586 Handle<Context> native_context = isolate->native_context();
1587
1588 {
1589 v8::HandleScope scope(CcTest::isolate());
1590 CompileRun(raw_source);
1591 }
1592
1593 // On first compilation, only a hash is inserted in the code cache. We can't
1594 // find that value.
1595 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1596 source, Handle<Object>(), 0, 0,
1597 v8::ScriptOriginOptions(false, true, false), native_context,
1598 language_mode);
1599 CHECK(info.is_null());
1600
1601 {
1602 v8::HandleScope scope(CcTest::isolate());
1603 CompileRun(raw_source);
1604 }
1605
1606 // On second compilation, the hash is replaced by a real cache entry mapping
1607 // the source to the shared function info containing the code.
1608 info = compilation_cache->LookupScript(
1609 source, Handle<Object>(), 0, 0,
1610 v8::ScriptOriginOptions(false, true, false), native_context,
1611 language_mode);
1612 CHECK(!info.is_null());
1613
1614 // Check that the code cache entry survives at least on GC.
1615 // (Unless --optimize-for-size, in which case it might get collected
1616 // immediately.)
1617 if (!FLAG_optimize_for_size) {
1618 heap->CollectAllGarbage();
1619 info = compilation_cache->LookupScript(
1620 source, Handle<Object>(), 0, 0,
1621 v8::ScriptOriginOptions(false, true, false), native_context,
1622 language_mode);
1623 CHECK(!info.is_null());
1624 }
1625
1626 // Progress code age until it's old and ready for GC.
1627 while (!info.ToHandleChecked()->code()->IsOld()) {
1628 // To guarantee progress, we have to MakeOlder with different parities.
1629 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
1630 // always NO_MARKING_PARITY and the code age only progresses if the parity
1631 // is different.
1632 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
1633 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
1634 }
1635
1636 heap->CollectAllGarbage();
1637 // Ensure code aging cleared the entry from the cache.
1638 info = compilation_cache->LookupScript(
1639 source, Handle<Object>(), 0, 0,
1640 v8::ScriptOriginOptions(false, true, false), native_context,
1641 language_mode);
1642 CHECK(info.is_null());
1643
1644 {
1645 v8::HandleScope scope(CcTest::isolate());
1646 CompileRun(raw_source);
1647 }
1648
1649 // On first compilation, only a hash is inserted in the code cache. We can't
1650 // find that value.
1651 info = compilation_cache->LookupScript(
1652 source, Handle<Object>(), 0, 0,
1653 v8::ScriptOriginOptions(false, true, false), native_context,
1654 language_mode);
1655 CHECK(info.is_null());
1656
1657 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1658 compilation_cache->MarkCompactPrologue();
1659 }
1660
1661 {
1662 v8::HandleScope scope(CcTest::isolate());
1663 CompileRun(raw_source);
1664 }
1665
1666 // If we aged the cache before caching the script, ensure that we didn't cache
1667 // on next compilation.
1668 info = compilation_cache->LookupScript(
1669 source, Handle<Object>(), 0, 0,
1670 v8::ScriptOriginOptions(false, true, false), native_context,
1671 language_mode);
1672 CHECK(info.is_null());
1673}
1674
1675
1676static void OptimizeEmptyFunction(const char* name) {
1677 HandleScope scope(CcTest::i_isolate());
1678 EmbeddedVector<char, 256> source;
1679 SNPrintF(source,
1680 "function %s() { return 0; }"
1681 "%s(); %s();"
1682 "%%OptimizeFunctionOnNextCall(%s);"
1683 "%s();",
1684 name, name, name, name, name);
1685 CompileRun(source.start());
1686}
1687
1688
1689// Count the number of native contexts in the weak list of native contexts.
1690int CountNativeContexts() {
1691 int count = 0;
1692 Object* object = CcTest::heap()->native_contexts_list();
1693 while (!object->IsUndefined()) {
1694 count++;
1695 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1696 }
1697 return count;
1698}
1699
1700
1701// Count the number of user functions in the weak list of optimized
1702// functions attached to a native context.
1703static int CountOptimizedUserFunctions(v8::Local<v8::Context> context) {
1704 int count = 0;
1705 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1706 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1707 while (object->IsJSFunction() &&
1708 !JSFunction::cast(object)->shared()->IsBuiltin()) {
1709 count++;
1710 object = JSFunction::cast(object)->next_function_link();
1711 }
1712 return count;
1713}
1714
1715
1716TEST(TestInternalWeakLists) {
1717 FLAG_always_opt = false;
1718 FLAG_allow_natives_syntax = true;
1719 v8::V8::Initialize();
1720
1721 // Some flags turn Scavenge collections into Mark-sweep collections
1722 // and hence are incompatible with this test case.
1723 if (FLAG_gc_global || FLAG_stress_compaction) return;
1724 FLAG_retain_maps_for_n_gc = 0;
1725
1726 static const int kNumTestContexts = 10;
1727
1728 Isolate* isolate = CcTest::i_isolate();
1729 Heap* heap = isolate->heap();
1730 HandleScope scope(isolate);
1731 v8::Local<v8::Context> ctx[kNumTestContexts];
1732 if (!isolate->use_crankshaft()) return;
1733
1734 CHECK_EQ(0, CountNativeContexts());
1735
1736 // Create a number of global contests which gets linked together.
1737 for (int i = 0; i < kNumTestContexts; i++) {
1738 ctx[i] = v8::Context::New(CcTest::isolate());
1739
1740 // Collect garbage that might have been created by one of the
1741 // installed extensions.
1742 isolate->compilation_cache()->Clear();
1743 heap->CollectAllGarbage();
1744
1745 CHECK_EQ(i + 1, CountNativeContexts());
1746
1747 ctx[i]->Enter();
1748
1749 // Create a handle scope so no function objects get stuck in the outer
1750 // handle scope.
1751 HandleScope scope(isolate);
1752 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1753 OptimizeEmptyFunction("f1");
1754 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1755 OptimizeEmptyFunction("f2");
1756 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1757 OptimizeEmptyFunction("f3");
1758 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1759 OptimizeEmptyFunction("f4");
1760 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1761 OptimizeEmptyFunction("f5");
1762 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1763
1764 // Remove function f1, and
1765 CompileRun("f1=null");
1766
1767 // Scavenge treats these references as strong.
1768 for (int j = 0; j < 10; j++) {
1769 CcTest::heap()->CollectGarbage(NEW_SPACE);
1770 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1771 }
1772
1773 // Mark compact handles the weak references.
1774 isolate->compilation_cache()->Clear();
1775 heap->CollectAllGarbage();
1776 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1777
1778 // Get rid of f3 and f5 in the same way.
1779 CompileRun("f3=null");
1780 for (int j = 0; j < 10; j++) {
1781 CcTest::heap()->CollectGarbage(NEW_SPACE);
1782 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1783 }
1784 CcTest::heap()->CollectAllGarbage();
1785 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1786 CompileRun("f5=null");
1787 for (int j = 0; j < 10; j++) {
1788 CcTest::heap()->CollectGarbage(NEW_SPACE);
1789 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1790 }
1791 CcTest::heap()->CollectAllGarbage();
1792 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1793
1794 ctx[i]->Exit();
1795 }
1796
1797 // Force compilation cache cleanup.
1798 CcTest::heap()->NotifyContextDisposed(true);
1799 CcTest::heap()->CollectAllGarbage();
1800
1801 // Dispose the native contexts one by one.
1802 for (int i = 0; i < kNumTestContexts; i++) {
1803 // TODO(dcarney): is there a better way to do this?
1804 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1805 *unsafe = CcTest::heap()->undefined_value();
1806 ctx[i].Clear();
1807
1808 // Scavenge treats these references as strong.
1809 for (int j = 0; j < 10; j++) {
1810 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1811 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1812 }
1813
1814 // Mark compact handles the weak references.
1815 CcTest::heap()->CollectAllGarbage();
1816 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1817 }
1818
1819 CHECK_EQ(0, CountNativeContexts());
1820}
1821
1822
1823// Count the number of native contexts in the weak list of native contexts
1824// causing a GC after the specified number of elements.
1825static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1826 Heap* heap = isolate->heap();
1827 int count = 0;
1828 Handle<Object> object(heap->native_contexts_list(), isolate);
1829 while (!object->IsUndefined()) {
1830 count++;
1831 if (count == n) heap->CollectAllGarbage();
1832 object =
1833 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1834 isolate);
1835 }
1836 return count;
1837}
1838
1839
1840// Count the number of user functions in the weak list of optimized
1841// functions attached to a native context causing a GC after the
1842// specified number of elements.
1843static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1844 int n) {
1845 int count = 0;
1846 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1847 Isolate* isolate = icontext->GetIsolate();
1848 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1849 isolate);
1850 while (object->IsJSFunction() &&
1851 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1852 count++;
1853 if (count == n) isolate->heap()->CollectAllGarbage();
1854 object = Handle<Object>(
1855 Object::cast(JSFunction::cast(*object)->next_function_link()),
1856 isolate);
1857 }
1858 return count;
1859}
1860
1861
1862TEST(TestInternalWeakListsTraverseWithGC) {
1863 FLAG_always_opt = false;
1864 FLAG_allow_natives_syntax = true;
1865 v8::V8::Initialize();
1866
1867 static const int kNumTestContexts = 10;
1868
1869 Isolate* isolate = CcTest::i_isolate();
1870 HandleScope scope(isolate);
1871 v8::Local<v8::Context> ctx[kNumTestContexts];
1872 if (!isolate->use_crankshaft()) return;
1873
1874 CHECK_EQ(0, CountNativeContexts());
1875
1876 // Create an number of contexts and check the length of the weak list both
1877 // with and without GCs while iterating the list.
1878 for (int i = 0; i < kNumTestContexts; i++) {
1879 ctx[i] = v8::Context::New(CcTest::isolate());
1880 CHECK_EQ(i + 1, CountNativeContexts());
1881 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1882 }
1883
1884 ctx[0]->Enter();
1885
1886 // Compile a number of functions the length of the weak list of optimized
1887 // functions both with and without GCs while iterating the list.
1888 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1889 OptimizeEmptyFunction("f1");
1890 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1891 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1892 OptimizeEmptyFunction("f2");
1893 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1894 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1895 OptimizeEmptyFunction("f3");
1896 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1897 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1898 OptimizeEmptyFunction("f4");
1899 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1900 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1901 OptimizeEmptyFunction("f5");
1902 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1903 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1904
1905 ctx[0]->Exit();
1906}
1907
1908
1909TEST(TestSizeOfRegExpCode) {
1910 if (!FLAG_regexp_optimization) return;
1911
1912 v8::V8::Initialize();
1913
1914 Isolate* isolate = CcTest::i_isolate();
1915 HandleScope scope(isolate);
1916
1917 LocalContext context;
1918
1919 // Adjust source below and this check to match
1920 // RegExpImple::kRegExpTooLargeToOptimize.
1921 CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1922
1923 // Compile a regexp that is much larger if we are using regexp optimizations.
1924 CompileRun(
1925 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1926 "var half_size_reg_exp;"
1927 "while (reg_exp_source.length < 20 * 1024) {"
1928 " half_size_reg_exp = reg_exp_source;"
1929 " reg_exp_source = reg_exp_source + reg_exp_source;"
1930 "}"
1931 // Flatten string.
1932 "reg_exp_source.match(/f/);");
1933
1934 // Get initial heap size after several full GCs, which will stabilize
1935 // the heap size and return with sweeping finished completely.
1936 CcTest::heap()->CollectAllGarbage();
1937 CcTest::heap()->CollectAllGarbage();
1938 CcTest::heap()->CollectAllGarbage();
1939 CcTest::heap()->CollectAllGarbage();
1940 CcTest::heap()->CollectAllGarbage();
1941 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1942 if (collector->sweeping_in_progress()) {
1943 collector->EnsureSweepingCompleted();
1944 }
1945 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1946
1947 CompileRun("'foo'.match(reg_exp_source);");
1948 CcTest::heap()->CollectAllGarbage();
1949 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1950
1951 CompileRun("'foo'.match(half_size_reg_exp);");
1952 CcTest::heap()->CollectAllGarbage();
1953 int size_with_optimized_regexp =
1954 static_cast<int>(CcTest::heap()->SizeOfObjects());
1955
1956 int size_of_regexp_code = size_with_regexp - initial_size;
1957
1958 // On some platforms the debug-code flag causes huge amounts of regexp code
1959 // to be emitted, breaking this test.
1960 if (!FLAG_debug_code) {
1961 CHECK_LE(size_of_regexp_code, 1 * MB);
1962 }
1963
1964 // Small regexp is half the size, but compiles to more than twice the code
1965 // due to the optimization steps.
1966 CHECK_GE(size_with_optimized_regexp,
1967 size_with_regexp + size_of_regexp_code * 2);
1968}
1969
1970
1971HEAP_TEST(TestSizeOfObjects) {
1972 v8::V8::Initialize();
1973
1974 // Get initial heap size after several full GCs, which will stabilize
1975 // the heap size and return with sweeping finished completely.
1976 CcTest::heap()->CollectAllGarbage();
1977 CcTest::heap()->CollectAllGarbage();
1978 CcTest::heap()->CollectAllGarbage();
1979 CcTest::heap()->CollectAllGarbage();
1980 CcTest::heap()->CollectAllGarbage();
1981 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1982 if (collector->sweeping_in_progress()) {
1983 collector->EnsureSweepingCompleted();
1984 }
1985 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1986
1987 {
1988 // Allocate objects on several different old-space pages so that
1989 // concurrent sweeper threads will be busy sweeping the old space on
1990 // subsequent GC runs.
1991 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1992 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1993 for (int i = 1; i <= 100; i++) {
1994 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1995 CHECK_EQ(initial_size + i * filler_size,
1996 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1997 }
1998 }
1999
2000 // The heap size should go back to initial size after a full GC, even
2001 // though sweeping didn't finish yet.
2002 CcTest::heap()->CollectAllGarbage();
2003
2004 // Normally sweeping would not be complete here, but no guarantees.
2005
2006 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2007
2008 // Waiting for sweeper threads should not change heap size.
2009 if (collector->sweeping_in_progress()) {
2010 collector->EnsureSweepingCompleted();
2011 }
2012 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2013}
2014
2015
2016TEST(TestAlignmentCalculations) {
2017 // Maximum fill amounts are consistent.
2018 int maximum_double_misalignment = kDoubleSize - kPointerSize;
2019 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
2020 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
2021 CHECK_EQ(0, max_word_fill);
2022 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
2023 CHECK_EQ(maximum_double_misalignment, max_double_fill);
2024 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
2025 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
2026 int max_simd128_unaligned_fill =
2027 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
2028 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
2029
2030 Address base = static_cast<Address>(NULL);
2031 int fill = 0;
2032
2033 // Word alignment never requires fill.
2034 fill = Heap::GetFillToAlign(base, kWordAligned);
2035 CHECK_EQ(0, fill);
2036 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
2037 CHECK_EQ(0, fill);
2038
2039 // No fill is required when address is double aligned.
2040 fill = Heap::GetFillToAlign(base, kDoubleAligned);
2041 CHECK_EQ(0, fill);
2042 // Fill is required if address is not double aligned.
2043 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
2044 CHECK_EQ(maximum_double_misalignment, fill);
2045 // kDoubleUnaligned has the opposite fill amounts.
2046 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2047 CHECK_EQ(maximum_double_misalignment, fill);
2048 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2049 CHECK_EQ(0, fill);
2050
2051 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2052 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2053 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2054 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2055 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2056 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2057 CHECK_EQ(kPointerSize, fill);
2058 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2059 CHECK_EQ(0, fill);
2060}
2061
2062
2063static HeapObject* NewSpaceAllocateAligned(int size,
2064 AllocationAlignment alignment) {
2065 Heap* heap = CcTest::heap();
2066 AllocationResult allocation =
2067 heap->new_space()->AllocateRawAligned(size, alignment);
2068 HeapObject* obj = NULL;
2069 allocation.To(&obj);
Ben Murdochda12d292016-06-02 14:46:10 +01002070 heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002071 return obj;
2072}
2073
2074
2075// Get new space allocation into the desired alignment.
2076static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2077 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2078 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2079 if (fill) {
2080 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2081 }
2082 return *top_addr;
2083}
2084
2085
2086TEST(TestAlignedAllocation) {
2087 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2088 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2089 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2090 Address start;
2091 HeapObject* obj;
2092 HeapObject* filler;
2093 if (double_misalignment) {
2094 // Allocate a pointer sized object that must be double aligned at an
2095 // aligned address.
2096 start = AlignNewSpace(kDoubleAligned, 0);
2097 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2098 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2099 // There is no filler.
2100 CHECK_EQ(kPointerSize, *top_addr - start);
2101
2102 // Allocate a second pointer sized object that must be double aligned at an
2103 // unaligned address.
2104 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2105 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2106 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2107 // There is a filler object before the object.
2108 filler = HeapObject::FromAddress(start);
2109 CHECK(obj != filler && filler->IsFiller() &&
2110 filler->Size() == kPointerSize);
2111 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2112
2113 // Similarly for kDoubleUnaligned.
2114 start = AlignNewSpace(kDoubleUnaligned, 0);
2115 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2116 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2117 CHECK_EQ(kPointerSize, *top_addr - start);
2118 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2119 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2120 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2121 // There is a filler object before the object.
2122 filler = HeapObject::FromAddress(start);
2123 CHECK(obj != filler && filler->IsFiller() &&
2124 filler->Size() == kPointerSize);
2125 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2126 }
2127
2128 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2129 // on platform.
2130 start = AlignNewSpace(kSimd128Unaligned, 0);
2131 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2132 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2133 // There is no filler.
2134 CHECK_EQ(kPointerSize, *top_addr - start);
2135 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2136 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2137 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2138 // There is a filler object before the object.
2139 filler = HeapObject::FromAddress(start);
2140 CHECK(obj != filler && filler->IsFiller() &&
2141 filler->Size() == kSimd128Size - kPointerSize);
2142 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2143
2144 if (double_misalignment) {
2145 // Test the 2 other alignments possible on 32 bit platforms.
2146 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2147 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2148 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2149 // There is a filler object before the object.
2150 filler = HeapObject::FromAddress(start);
2151 CHECK(obj != filler && filler->IsFiller() &&
2152 filler->Size() == 2 * kPointerSize);
2153 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2154 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2155 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2156 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2157 // There is a filler object before the object.
2158 filler = HeapObject::FromAddress(start);
2159 CHECK(obj != filler && filler->IsFiller() &&
2160 filler->Size() == kPointerSize);
2161 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2162 }
2163}
2164
2165
2166static HeapObject* OldSpaceAllocateAligned(int size,
2167 AllocationAlignment alignment) {
2168 Heap* heap = CcTest::heap();
2169 AllocationResult allocation =
2170 heap->old_space()->AllocateRawAligned(size, alignment);
2171 HeapObject* obj = NULL;
2172 allocation.To(&obj);
Ben Murdochda12d292016-06-02 14:46:10 +01002173 heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002174 return obj;
2175}
2176
2177
2178// Get old space allocation into the desired alignment.
2179static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2180 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2181 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2182 int allocation = fill + offset;
2183 if (allocation) {
2184 OldSpaceAllocateAligned(allocation, kWordAligned);
2185 }
2186 Address top = *top_addr;
2187 // Now force the remaining allocation onto the free list.
2188 CcTest::heap()->old_space()->EmptyAllocationInfo();
2189 return top;
2190}
2191
2192
2193// Test the case where allocation must be done from the free list, so filler
2194// may precede or follow the object.
2195TEST(TestAlignedOverAllocation) {
2196 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2197 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2198 Address start;
2199 HeapObject* obj;
2200 HeapObject* filler1;
2201 HeapObject* filler2;
2202 if (double_misalignment) {
2203 start = AlignOldSpace(kDoubleAligned, 0);
2204 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2205 // The object is aligned, and a filler object is created after.
2206 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2207 filler1 = HeapObject::FromAddress(start + kPointerSize);
2208 CHECK(obj != filler1 && filler1->IsFiller() &&
2209 filler1->Size() == kPointerSize);
2210 // Try the opposite alignment case.
2211 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2212 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2213 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2214 filler1 = HeapObject::FromAddress(start);
2215 CHECK(obj != filler1);
2216 CHECK(filler1->IsFiller());
2217 CHECK(filler1->Size() == kPointerSize);
2218 CHECK(obj != filler1 && filler1->IsFiller() &&
2219 filler1->Size() == kPointerSize);
2220
2221 // Similarly for kDoubleUnaligned.
2222 start = AlignOldSpace(kDoubleUnaligned, 0);
2223 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2224 // The object is aligned, and a filler object is created after.
2225 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2226 filler1 = HeapObject::FromAddress(start + kPointerSize);
2227 CHECK(obj != filler1 && filler1->IsFiller() &&
2228 filler1->Size() == kPointerSize);
2229 // Try the opposite alignment case.
2230 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2231 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2232 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2233 filler1 = HeapObject::FromAddress(start);
2234 CHECK(obj != filler1 && filler1->IsFiller() &&
2235 filler1->Size() == kPointerSize);
2236 }
2237
2238 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2239 // on platform.
2240 start = AlignOldSpace(kSimd128Unaligned, 0);
2241 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2242 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2243 // There is a filler object after the object.
2244 filler1 = HeapObject::FromAddress(start + kPointerSize);
2245 CHECK(obj != filler1 && filler1->IsFiller() &&
2246 filler1->Size() == kSimd128Size - kPointerSize);
2247 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2248 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2249 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2250 // There is a filler object before the object.
2251 filler1 = HeapObject::FromAddress(start);
2252 CHECK(obj != filler1 && filler1->IsFiller() &&
2253 filler1->Size() == kSimd128Size - kPointerSize);
2254
2255 if (double_misalignment) {
2256 // Test the 2 other alignments possible on 32 bit platforms.
2257 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2258 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2259 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2260 // There are filler objects before and after the object.
2261 filler1 = HeapObject::FromAddress(start);
2262 CHECK(obj != filler1 && filler1->IsFiller() &&
2263 filler1->Size() == 2 * kPointerSize);
2264 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2265 CHECK(obj != filler2 && filler2->IsFiller() &&
2266 filler2->Size() == kPointerSize);
2267 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2268 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2269 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2270 // There are filler objects before and after the object.
2271 filler1 = HeapObject::FromAddress(start);
2272 CHECK(obj != filler1 && filler1->IsFiller() &&
2273 filler1->Size() == kPointerSize);
2274 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2275 CHECK(obj != filler2 && filler2->IsFiller() &&
2276 filler2->Size() == 2 * kPointerSize);
2277 }
2278}
2279
2280
2281TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2282 CcTest::InitializeVM();
2283 HeapIterator iterator(CcTest::heap());
2284 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2285 intptr_t size_of_objects_2 = 0;
2286 for (HeapObject* obj = iterator.next();
2287 obj != NULL;
2288 obj = iterator.next()) {
2289 if (!obj->IsFreeSpace()) {
2290 size_of_objects_2 += obj->Size();
2291 }
2292 }
2293 // Delta must be within 5% of the larger result.
2294 // TODO(gc): Tighten this up by distinguishing between byte
2295 // arrays that are real and those that merely mark free space
2296 // on the heap.
2297 if (size_of_objects_1 > size_of_objects_2) {
2298 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2299 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2300 "Iterator: %" V8_PTR_PREFIX "d, "
2301 "delta: %" V8_PTR_PREFIX "d\n",
2302 size_of_objects_1, size_of_objects_2, delta);
2303 CHECK_GT(size_of_objects_1 / 20, delta);
2304 } else {
2305 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2306 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2307 "Iterator: %" V8_PTR_PREFIX "d, "
2308 "delta: %" V8_PTR_PREFIX "d\n",
2309 size_of_objects_1, size_of_objects_2, delta);
2310 CHECK_GT(size_of_objects_2 / 20, delta);
2311 }
2312}
2313
2314
2315static void FillUpNewSpace(NewSpace* new_space) {
2316 // Fill up new space to the point that it is completely full. Make sure
2317 // that the scavenger does not undo the filling.
2318 Heap* heap = new_space->heap();
2319 Isolate* isolate = heap->isolate();
2320 Factory* factory = isolate->factory();
2321 HandleScope scope(isolate);
2322 AlwaysAllocateScope always_allocate(isolate);
2323 intptr_t available = new_space->Capacity() - new_space->Size();
2324 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2325 for (intptr_t i = 0; i < number_of_fillers; i++) {
2326 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2327 }
2328}
2329
2330
2331TEST(GrowAndShrinkNewSpace) {
2332 CcTest::InitializeVM();
2333 Heap* heap = CcTest::heap();
2334 NewSpace* new_space = heap->new_space();
2335
Ben Murdochda12d292016-06-02 14:46:10 +01002336 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002337 return;
2338 }
2339
2340 // Explicitly growing should double the space capacity.
2341 intptr_t old_capacity, new_capacity;
2342 old_capacity = new_space->TotalCapacity();
2343 new_space->Grow();
2344 new_capacity = new_space->TotalCapacity();
2345 CHECK(2 * old_capacity == new_capacity);
2346
2347 old_capacity = new_space->TotalCapacity();
2348 FillUpNewSpace(new_space);
2349 new_capacity = new_space->TotalCapacity();
2350 CHECK(old_capacity == new_capacity);
2351
2352 // Explicitly shrinking should not affect space capacity.
2353 old_capacity = new_space->TotalCapacity();
2354 new_space->Shrink();
2355 new_capacity = new_space->TotalCapacity();
2356 CHECK(old_capacity == new_capacity);
2357
2358 // Let the scavenger empty the new space.
2359 heap->CollectGarbage(NEW_SPACE);
2360 CHECK_LE(new_space->Size(), old_capacity);
2361
2362 // Explicitly shrinking should halve the space capacity.
2363 old_capacity = new_space->TotalCapacity();
2364 new_space->Shrink();
2365 new_capacity = new_space->TotalCapacity();
2366 CHECK(old_capacity == 2 * new_capacity);
2367
2368 // Consecutive shrinking should not affect space capacity.
2369 old_capacity = new_space->TotalCapacity();
2370 new_space->Shrink();
2371 new_space->Shrink();
2372 new_space->Shrink();
2373 new_capacity = new_space->TotalCapacity();
2374 CHECK(old_capacity == new_capacity);
2375}
2376
2377
2378TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2379 CcTest::InitializeVM();
2380 Heap* heap = CcTest::heap();
Ben Murdochda12d292016-06-02 14:46:10 +01002381 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002382 return;
2383 }
2384
2385 v8::HandleScope scope(CcTest::isolate());
2386 NewSpace* new_space = heap->new_space();
2387 intptr_t old_capacity, new_capacity;
2388 old_capacity = new_space->TotalCapacity();
2389 new_space->Grow();
2390 new_capacity = new_space->TotalCapacity();
2391 CHECK(2 * old_capacity == new_capacity);
2392 FillUpNewSpace(new_space);
2393 heap->CollectAllAvailableGarbage();
2394 new_capacity = new_space->TotalCapacity();
2395 CHECK(old_capacity == new_capacity);
2396}
2397
2398
2399static int NumberOfGlobalObjects() {
2400 int count = 0;
2401 HeapIterator iterator(CcTest::heap());
2402 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2403 if (obj->IsJSGlobalObject()) count++;
2404 }
2405 return count;
2406}
2407
2408
2409// Test that we don't embed maps from foreign contexts into
2410// optimized code.
2411TEST(LeakNativeContextViaMap) {
2412 i::FLAG_allow_natives_syntax = true;
2413 v8::Isolate* isolate = CcTest::isolate();
2414 v8::HandleScope outer_scope(isolate);
2415 v8::Persistent<v8::Context> ctx1p;
2416 v8::Persistent<v8::Context> ctx2p;
2417 {
2418 v8::HandleScope scope(isolate);
2419 ctx1p.Reset(isolate, v8::Context::New(isolate));
2420 ctx2p.Reset(isolate, v8::Context::New(isolate));
2421 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2422 }
2423
2424 CcTest::heap()->CollectAllAvailableGarbage();
2425 CHECK_EQ(2, NumberOfGlobalObjects());
2426
2427 {
2428 v8::HandleScope inner_scope(isolate);
2429 CompileRun("var v = {x: 42}");
2430 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2431 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2432 v8::Local<v8::Value> v =
2433 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2434 ctx2->Enter();
2435 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2436 v8::Local<v8::Value> res = CompileRun(
2437 "function f() { return o.x; }"
2438 "for (var i = 0; i < 10; ++i) f();"
2439 "%OptimizeFunctionOnNextCall(f);"
2440 "f();");
2441 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2442 CHECK(ctx2->Global()
2443 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2444 .FromJust());
2445 ctx2->Exit();
2446 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2447 ctx1p.Reset();
2448 isolate->ContextDisposedNotification();
2449 }
2450 CcTest::heap()->CollectAllAvailableGarbage();
2451 CHECK_EQ(1, NumberOfGlobalObjects());
2452 ctx2p.Reset();
2453 CcTest::heap()->CollectAllAvailableGarbage();
2454 CHECK_EQ(0, NumberOfGlobalObjects());
2455}
2456
2457
2458// Test that we don't embed functions from foreign contexts into
2459// optimized code.
2460TEST(LeakNativeContextViaFunction) {
2461 i::FLAG_allow_natives_syntax = true;
2462 v8::Isolate* isolate = CcTest::isolate();
2463 v8::HandleScope outer_scope(isolate);
2464 v8::Persistent<v8::Context> ctx1p;
2465 v8::Persistent<v8::Context> ctx2p;
2466 {
2467 v8::HandleScope scope(isolate);
2468 ctx1p.Reset(isolate, v8::Context::New(isolate));
2469 ctx2p.Reset(isolate, v8::Context::New(isolate));
2470 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2471 }
2472
2473 CcTest::heap()->CollectAllAvailableGarbage();
2474 CHECK_EQ(2, NumberOfGlobalObjects());
2475
2476 {
2477 v8::HandleScope inner_scope(isolate);
2478 CompileRun("var v = function() { return 42; }");
2479 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2480 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2481 v8::Local<v8::Value> v =
2482 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2483 ctx2->Enter();
2484 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2485 v8::Local<v8::Value> res = CompileRun(
2486 "function f(x) { return x(); }"
2487 "for (var i = 0; i < 10; ++i) f(o);"
2488 "%OptimizeFunctionOnNextCall(f);"
2489 "f(o);");
2490 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2491 CHECK(ctx2->Global()
2492 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2493 .FromJust());
2494 ctx2->Exit();
2495 ctx1->Exit();
2496 ctx1p.Reset();
2497 isolate->ContextDisposedNotification();
2498 }
2499 CcTest::heap()->CollectAllAvailableGarbage();
2500 CHECK_EQ(1, NumberOfGlobalObjects());
2501 ctx2p.Reset();
2502 CcTest::heap()->CollectAllAvailableGarbage();
2503 CHECK_EQ(0, NumberOfGlobalObjects());
2504}
2505
2506
2507TEST(LeakNativeContextViaMapKeyed) {
2508 i::FLAG_allow_natives_syntax = true;
2509 v8::Isolate* isolate = CcTest::isolate();
2510 v8::HandleScope outer_scope(isolate);
2511 v8::Persistent<v8::Context> ctx1p;
2512 v8::Persistent<v8::Context> ctx2p;
2513 {
2514 v8::HandleScope scope(isolate);
2515 ctx1p.Reset(isolate, v8::Context::New(isolate));
2516 ctx2p.Reset(isolate, v8::Context::New(isolate));
2517 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2518 }
2519
2520 CcTest::heap()->CollectAllAvailableGarbage();
2521 CHECK_EQ(2, NumberOfGlobalObjects());
2522
2523 {
2524 v8::HandleScope inner_scope(isolate);
2525 CompileRun("var v = [42, 43]");
2526 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2527 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2528 v8::Local<v8::Value> v =
2529 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2530 ctx2->Enter();
2531 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2532 v8::Local<v8::Value> res = CompileRun(
2533 "function f() { return o[0]; }"
2534 "for (var i = 0; i < 10; ++i) f();"
2535 "%OptimizeFunctionOnNextCall(f);"
2536 "f();");
2537 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2538 CHECK(ctx2->Global()
2539 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2540 .FromJust());
2541 ctx2->Exit();
2542 ctx1->Exit();
2543 ctx1p.Reset();
2544 isolate->ContextDisposedNotification();
2545 }
2546 CcTest::heap()->CollectAllAvailableGarbage();
2547 CHECK_EQ(1, NumberOfGlobalObjects());
2548 ctx2p.Reset();
2549 CcTest::heap()->CollectAllAvailableGarbage();
2550 CHECK_EQ(0, NumberOfGlobalObjects());
2551}
2552
2553
2554TEST(LeakNativeContextViaMapProto) {
2555 i::FLAG_allow_natives_syntax = true;
2556 v8::Isolate* isolate = CcTest::isolate();
2557 v8::HandleScope outer_scope(isolate);
2558 v8::Persistent<v8::Context> ctx1p;
2559 v8::Persistent<v8::Context> ctx2p;
2560 {
2561 v8::HandleScope scope(isolate);
2562 ctx1p.Reset(isolate, v8::Context::New(isolate));
2563 ctx2p.Reset(isolate, v8::Context::New(isolate));
2564 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2565 }
2566
2567 CcTest::heap()->CollectAllAvailableGarbage();
2568 CHECK_EQ(2, NumberOfGlobalObjects());
2569
2570 {
2571 v8::HandleScope inner_scope(isolate);
2572 CompileRun("var v = { y: 42}");
2573 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2574 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2575 v8::Local<v8::Value> v =
2576 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2577 ctx2->Enter();
2578 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2579 v8::Local<v8::Value> res = CompileRun(
2580 "function f() {"
2581 " var p = {x: 42};"
2582 " p.__proto__ = o;"
2583 " return p.x;"
2584 "}"
2585 "for (var i = 0; i < 10; ++i) f();"
2586 "%OptimizeFunctionOnNextCall(f);"
2587 "f();");
2588 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2589 CHECK(ctx2->Global()
2590 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2591 .FromJust());
2592 ctx2->Exit();
2593 ctx1->Exit();
2594 ctx1p.Reset();
2595 isolate->ContextDisposedNotification();
2596 }
2597 CcTest::heap()->CollectAllAvailableGarbage();
2598 CHECK_EQ(1, NumberOfGlobalObjects());
2599 ctx2p.Reset();
2600 CcTest::heap()->CollectAllAvailableGarbage();
2601 CHECK_EQ(0, NumberOfGlobalObjects());
2602}
2603
2604
2605TEST(InstanceOfStubWriteBarrier) {
2606 i::FLAG_allow_natives_syntax = true;
2607#ifdef VERIFY_HEAP
2608 i::FLAG_verify_heap = true;
2609#endif
2610
2611 CcTest::InitializeVM();
2612 if (!CcTest::i_isolate()->use_crankshaft()) return;
2613 if (i::FLAG_force_marking_deque_overflows) return;
2614 v8::HandleScope outer_scope(CcTest::isolate());
2615 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2616
2617 {
2618 v8::HandleScope scope(CcTest::isolate());
2619 CompileRun(
2620 "function foo () { }"
2621 "function mkbar () { return new (new Function(\"\")) (); }"
2622 "function f (x) { return (x instanceof foo); }"
2623 "function g () { f(mkbar()); }"
2624 "f(new foo()); f(new foo());"
2625 "%OptimizeFunctionOnNextCall(f);"
2626 "f(new foo()); g();");
2627 }
2628
2629 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2630 marking->Stop();
2631 CcTest::heap()->StartIncrementalMarking();
2632
2633 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2634 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2635 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2636
2637 CHECK(f->IsOptimized());
2638
2639 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2640 !marking->IsStopped()) {
2641 // Discard any pending GC requests otherwise we will get GC when we enter
2642 // code below.
2643 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2644 }
2645
2646 CHECK(marking->IsMarking());
2647
2648 {
2649 v8::HandleScope scope(CcTest::isolate());
2650 v8::Local<v8::Object> global = CcTest::global();
2651 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2652 global->Get(ctx, v8_str("g")).ToLocalChecked());
2653 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2654 }
2655
2656 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2657 CcTest::heap()->CollectGarbage(OLD_SPACE);
2658}
2659
2660
2661TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2662 i::FLAG_stress_compaction = false;
2663 i::FLAG_allow_natives_syntax = true;
2664#ifdef VERIFY_HEAP
2665 i::FLAG_verify_heap = true;
2666#endif
2667
2668 CcTest::InitializeVM();
2669 if (!CcTest::i_isolate()->use_crankshaft()) return;
2670 v8::HandleScope outer_scope(CcTest::isolate());
2671 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2672
2673 {
2674 v8::HandleScope scope(CcTest::isolate());
2675 CompileRun(
2676 "function f () {"
2677 " var s = 0;"
2678 " for (var i = 0; i < 100; i++) s += i;"
2679 " return s;"
2680 "}"
2681 "f(); f();"
2682 "%OptimizeFunctionOnNextCall(f);"
2683 "f();");
2684 }
2685 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2686 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2687 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2688 CHECK(f->IsOptimized());
2689
2690 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2691 marking->Stop();
2692 CcTest::heap()->StartIncrementalMarking();
2693 // The following calls will increment CcTest::heap()->global_ic_age().
2694 CcTest::isolate()->ContextDisposedNotification();
2695 SimulateIncrementalMarking(CcTest::heap());
2696 CcTest::heap()->CollectAllGarbage();
2697 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2698 CHECK_EQ(0, f->shared()->opt_count());
2699 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2700}
2701
2702
2703TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2704 i::FLAG_stress_compaction = false;
2705 i::FLAG_allow_natives_syntax = true;
2706#ifdef VERIFY_HEAP
2707 i::FLAG_verify_heap = true;
2708#endif
2709
2710 CcTest::InitializeVM();
2711 if (!CcTest::i_isolate()->use_crankshaft()) return;
2712 v8::HandleScope outer_scope(CcTest::isolate());
2713 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2714
2715 {
2716 v8::HandleScope scope(CcTest::isolate());
2717 CompileRun(
2718 "function f () {"
2719 " var s = 0;"
2720 " for (var i = 0; i < 100; i++) s += i;"
2721 " return s;"
2722 "}"
2723 "f(); f();"
2724 "%OptimizeFunctionOnNextCall(f);"
2725 "f();");
2726 }
2727 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2728 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2729 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2730
2731 CHECK(f->IsOptimized());
2732
2733 CcTest::heap()->incremental_marking()->Stop();
2734
2735 // The following two calls will increment CcTest::heap()->global_ic_age().
2736 CcTest::isolate()->ContextDisposedNotification();
2737 CcTest::heap()->CollectAllGarbage();
2738
2739 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2740 CHECK_EQ(0, f->shared()->opt_count());
2741 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2742}
2743
2744
2745HEAP_TEST(GCFlags) {
2746 CcTest::InitializeVM();
2747 Heap* heap = CcTest::heap();
2748
2749 heap->set_current_gc_flags(Heap::kNoGCFlags);
2750 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2751
2752 // Set the flags to check whether we appropriately resets them after the GC.
2753 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2754 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2755 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2756
2757 MarkCompactCollector* collector = heap->mark_compact_collector();
2758 if (collector->sweeping_in_progress()) {
2759 collector->EnsureSweepingCompleted();
2760 }
2761
2762 IncrementalMarking* marking = heap->incremental_marking();
2763 marking->Stop();
2764 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2765 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2766
2767 heap->CollectGarbage(NEW_SPACE);
2768 // NewSpace scavenges should not overwrite the flags.
2769 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2770
2771 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2772 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2773}
2774
2775
2776TEST(IdleNotificationFinishMarking) {
2777 i::FLAG_allow_natives_syntax = true;
2778 CcTest::InitializeVM();
2779 SimulateFullSpace(CcTest::heap()->old_space());
2780 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2781 marking->Stop();
2782 CcTest::heap()->StartIncrementalMarking();
2783
2784 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2785
2786 // TODO(hpayer): We cannot write proper unit test right now for heap.
2787 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2788 // marking delay counter.
2789
2790 // Perform a huge incremental marking step but don't complete marking.
2791 intptr_t bytes_processed = 0;
2792 do {
2793 bytes_processed =
2794 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2795 IncrementalMarking::FORCE_MARKING,
2796 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2797 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2798 } while (bytes_processed);
2799
2800 // The next invocations of incremental marking are not going to complete
2801 // marking
2802 // since the completion threshold is not reached
2803 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2804 i++) {
2805 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2806 IncrementalMarking::FORCE_MARKING,
2807 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2808 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2809 }
2810
2811 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2812
2813 // The next idle notification has to finish incremental marking.
2814 const double kLongIdleTime = 1000.0;
2815 CcTest::isolate()->IdleNotificationDeadline(
2816 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2817 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2818 kLongIdleTime);
2819 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2820}
2821
2822
2823// Test that HAllocateObject will always return an object in new-space.
2824TEST(OptimizedAllocationAlwaysInNewSpace) {
2825 i::FLAG_allow_natives_syntax = true;
2826 CcTest::InitializeVM();
2827 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2828 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2829 v8::HandleScope scope(CcTest::isolate());
2830 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2831 SimulateFullSpace(CcTest::heap()->new_space());
2832 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2833 v8::Local<v8::Value> res = CompileRun(
2834 "function c(x) {"
2835 " this.x = x;"
2836 " for (var i = 0; i < 32; i++) {"
2837 " this['x' + i] = x;"
2838 " }"
2839 "}"
2840 "function f(x) { return new c(x); };"
2841 "f(1); f(2); f(3);"
2842 "%OptimizeFunctionOnNextCall(f);"
2843 "f(4);");
2844
2845 CHECK_EQ(4, res.As<v8::Object>()
2846 ->GetRealNamedProperty(ctx, v8_str("x"))
2847 .ToLocalChecked()
2848 ->Int32Value(ctx)
2849 .FromJust());
2850
2851 i::Handle<JSReceiver> o =
2852 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2853
2854 CHECK(CcTest::heap()->InNewSpace(*o));
2855}
2856
2857
2858TEST(OptimizedPretenuringAllocationFolding) {
2859 i::FLAG_allow_natives_syntax = true;
2860 i::FLAG_expose_gc = true;
2861 CcTest::InitializeVM();
2862 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2863 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2864 v8::HandleScope scope(CcTest::isolate());
2865 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2866 // Grow new space unitl maximum capacity reached.
2867 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2868 CcTest::heap()->new_space()->Grow();
2869 }
2870
2871 i::ScopedVector<char> source(1024);
2872 i::SNPrintF(
2873 source,
2874 "var number_elements = %d;"
2875 "var elements = new Array();"
2876 "function f() {"
2877 " for (var i = 0; i < number_elements; i++) {"
2878 " elements[i] = [[{}], [1.1]];"
2879 " }"
2880 " return elements[number_elements-1]"
2881 "};"
2882 "f(); gc();"
2883 "f(); f();"
2884 "%%OptimizeFunctionOnNextCall(f);"
2885 "f();",
2886 AllocationSite::kPretenureMinimumCreated);
2887
2888 v8::Local<v8::Value> res = CompileRun(source.start());
2889
2890 v8::Local<v8::Value> int_array =
2891 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2892 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2893 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2894 v8::Local<v8::Value> double_array =
2895 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2896 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2897 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2898
2899 i::Handle<JSReceiver> o =
2900 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2901 CHECK(CcTest::heap()->InOldSpace(*o));
2902 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2903 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2904 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2905 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2906}
2907
2908
2909TEST(OptimizedPretenuringObjectArrayLiterals) {
2910 i::FLAG_allow_natives_syntax = true;
2911 i::FLAG_expose_gc = true;
2912 CcTest::InitializeVM();
2913 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2914 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2915 v8::HandleScope scope(CcTest::isolate());
2916
2917 // Grow new space unitl maximum capacity reached.
2918 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2919 CcTest::heap()->new_space()->Grow();
2920 }
2921
2922 i::ScopedVector<char> source(1024);
2923 i::SNPrintF(
2924 source,
2925 "var number_elements = %d;"
2926 "var elements = new Array(number_elements);"
2927 "function f() {"
2928 " for (var i = 0; i < number_elements; i++) {"
2929 " elements[i] = [{}, {}, {}];"
2930 " }"
2931 " return elements[number_elements - 1];"
2932 "};"
2933 "f(); gc();"
2934 "f(); f();"
2935 "%%OptimizeFunctionOnNextCall(f);"
2936 "f();",
2937 AllocationSite::kPretenureMinimumCreated);
2938
2939 v8::Local<v8::Value> res = CompileRun(source.start());
2940
2941 i::Handle<JSObject> o = Handle<JSObject>::cast(
2942 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2943
2944 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2945 CHECK(CcTest::heap()->InOldSpace(*o));
2946}
2947
2948
2949TEST(OptimizedPretenuringMixedInObjectProperties) {
2950 i::FLAG_allow_natives_syntax = true;
2951 i::FLAG_expose_gc = true;
2952 CcTest::InitializeVM();
2953 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2954 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2955 v8::HandleScope scope(CcTest::isolate());
2956
2957 // Grow new space unitl maximum capacity reached.
2958 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2959 CcTest::heap()->new_space()->Grow();
2960 }
2961
2962
2963 i::ScopedVector<char> source(1024);
2964 i::SNPrintF(
2965 source,
2966 "var number_elements = %d;"
2967 "var elements = new Array(number_elements);"
2968 "function f() {"
2969 " for (var i = 0; i < number_elements; i++) {"
2970 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2971 " }"
2972 " return elements[number_elements - 1];"
2973 "};"
2974 "f(); gc();"
2975 "f(); f();"
2976 "%%OptimizeFunctionOnNextCall(f);"
2977 "f();",
2978 AllocationSite::kPretenureMinimumCreated);
2979
2980 v8::Local<v8::Value> res = CompileRun(source.start());
2981
2982 i::Handle<JSObject> o = Handle<JSObject>::cast(
2983 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2984
2985 CHECK(CcTest::heap()->InOldSpace(*o));
2986 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2987 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2988 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2989 if (!o->IsUnboxedDoubleField(idx2)) {
2990 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2991 } else {
2992 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2993 }
2994
2995 JSObject* inner_object =
2996 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2997 CHECK(CcTest::heap()->InOldSpace(inner_object));
2998 if (!inner_object->IsUnboxedDoubleField(idx1)) {
2999 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
3000 } else {
3001 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
3002 }
3003 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
3004}
3005
3006
3007TEST(OptimizedPretenuringDoubleArrayProperties) {
3008 i::FLAG_allow_natives_syntax = true;
3009 i::FLAG_expose_gc = true;
3010 CcTest::InitializeVM();
3011 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3012 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3013 v8::HandleScope scope(CcTest::isolate());
3014
3015 // Grow new space unitl maximum capacity reached.
3016 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3017 CcTest::heap()->new_space()->Grow();
3018 }
3019
3020 i::ScopedVector<char> source(1024);
3021 i::SNPrintF(
3022 source,
3023 "var number_elements = %d;"
3024 "var elements = new Array(number_elements);"
3025 "function f() {"
3026 " for (var i = 0; i < number_elements; i++) {"
3027 " elements[i] = {a: 1.1, b: 2.2};"
3028 " }"
3029 " return elements[i - 1];"
3030 "};"
3031 "f(); gc();"
3032 "f(); f();"
3033 "%%OptimizeFunctionOnNextCall(f);"
3034 "f();",
3035 AllocationSite::kPretenureMinimumCreated);
3036
3037 v8::Local<v8::Value> res = CompileRun(source.start());
3038
3039 i::Handle<JSObject> o = Handle<JSObject>::cast(
3040 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3041
3042 CHECK(CcTest::heap()->InOldSpace(*o));
3043 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3044}
3045
3046
3047TEST(OptimizedPretenuringdoubleArrayLiterals) {
3048 i::FLAG_allow_natives_syntax = true;
3049 i::FLAG_expose_gc = true;
3050 CcTest::InitializeVM();
3051 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3052 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3053 v8::HandleScope scope(CcTest::isolate());
3054
3055 // Grow new space unitl maximum capacity reached.
3056 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3057 CcTest::heap()->new_space()->Grow();
3058 }
3059
3060 i::ScopedVector<char> source(1024);
3061 i::SNPrintF(
3062 source,
3063 "var number_elements = %d;"
3064 "var elements = new Array(number_elements);"
3065 "function f() {"
3066 " for (var i = 0; i < number_elements; i++) {"
3067 " elements[i] = [1.1, 2.2, 3.3];"
3068 " }"
3069 " return elements[number_elements - 1];"
3070 "};"
3071 "f(); gc();"
3072 "f(); f();"
3073 "%%OptimizeFunctionOnNextCall(f);"
3074 "f();",
3075 AllocationSite::kPretenureMinimumCreated);
3076
3077 v8::Local<v8::Value> res = CompileRun(source.start());
3078
3079 i::Handle<JSObject> o = Handle<JSObject>::cast(
3080 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3081
3082 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3083 CHECK(CcTest::heap()->InOldSpace(*o));
3084}
3085
3086
3087TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3088 i::FLAG_allow_natives_syntax = true;
3089 i::FLAG_expose_gc = true;
3090 CcTest::InitializeVM();
3091 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3092 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3093 v8::HandleScope scope(CcTest::isolate());
3094 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3095 // Grow new space unitl maximum capacity reached.
3096 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3097 CcTest::heap()->new_space()->Grow();
3098 }
3099
3100 i::ScopedVector<char> source(1024);
3101 i::SNPrintF(
3102 source,
3103 "var number_elements = 100;"
3104 "var elements = new Array(number_elements);"
3105 "function f() {"
3106 " for (var i = 0; i < number_elements; i++) {"
3107 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3108 " }"
3109 " return elements[number_elements - 1];"
3110 "};"
3111 "f(); gc();"
3112 "f(); f();"
3113 "%%OptimizeFunctionOnNextCall(f);"
3114 "f();");
3115
3116 v8::Local<v8::Value> res = CompileRun(source.start());
3117
3118 v8::Local<v8::Value> int_array =
3119 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3120 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
3121 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
3122 v8::Local<v8::Value> double_array =
3123 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3124 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
3125 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
3126
3127 Handle<JSObject> o = Handle<JSObject>::cast(
3128 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3129 CHECK(CcTest::heap()->InOldSpace(*o));
3130 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3131 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3132 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3133 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3134}
3135
3136
3137TEST(OptimizedPretenuringNestedObjectLiterals) {
3138 i::FLAG_allow_natives_syntax = true;
3139 i::FLAG_expose_gc = true;
3140 CcTest::InitializeVM();
3141 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3142 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3143 v8::HandleScope scope(CcTest::isolate());
3144 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3145 // Grow new space unitl maximum capacity reached.
3146 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3147 CcTest::heap()->new_space()->Grow();
3148 }
3149
3150 i::ScopedVector<char> source(1024);
3151 i::SNPrintF(
3152 source,
3153 "var number_elements = %d;"
3154 "var elements = new Array(number_elements);"
3155 "function f() {"
3156 " for (var i = 0; i < number_elements; i++) {"
3157 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3158 " }"
3159 " return elements[number_elements - 1];"
3160 "};"
3161 "f(); gc();"
3162 "f(); f();"
3163 "%%OptimizeFunctionOnNextCall(f);"
3164 "f();",
3165 AllocationSite::kPretenureMinimumCreated);
3166
3167 v8::Local<v8::Value> res = CompileRun(source.start());
3168
3169 v8::Local<v8::Value> int_array_1 =
3170 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3171 Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
3172 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
3173 v8::Local<v8::Value> int_array_2 =
3174 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3175 Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
3176 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
3177
3178 Handle<JSObject> o = Handle<JSObject>::cast(
3179 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3180 CHECK(CcTest::heap()->InOldSpace(*o));
3181 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3182 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3183 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3184 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3185}
3186
3187
3188TEST(OptimizedPretenuringNestedDoubleLiterals) {
3189 i::FLAG_allow_natives_syntax = true;
3190 i::FLAG_expose_gc = true;
3191 CcTest::InitializeVM();
3192 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3193 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3194 v8::HandleScope scope(CcTest::isolate());
3195 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3196 // Grow new space unitl maximum capacity reached.
3197 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3198 CcTest::heap()->new_space()->Grow();
3199 }
3200
3201 i::ScopedVector<char> source(1024);
3202 i::SNPrintF(
3203 source,
3204 "var number_elements = %d;"
3205 "var elements = new Array(number_elements);"
3206 "function f() {"
3207 " for (var i = 0; i < number_elements; i++) {"
3208 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3209 " }"
3210 " return elements[number_elements - 1];"
3211 "};"
3212 "f(); gc();"
3213 "f(); f();"
3214 "%%OptimizeFunctionOnNextCall(f);"
3215 "f();",
3216 AllocationSite::kPretenureMinimumCreated);
3217
3218 v8::Local<v8::Value> res = CompileRun(source.start());
3219
3220 v8::Local<v8::Value> double_array_1 =
3221 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3222 i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
3223 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
3224 v8::Local<v8::Value> double_array_2 =
3225 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3226 i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
3227 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
3228
3229 i::Handle<JSObject> o = Handle<JSObject>::cast(
3230 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3231 CHECK(CcTest::heap()->InOldSpace(*o));
3232 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3233 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3234 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3235 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3236}
3237
3238
3239// Test regular array literals allocation.
3240TEST(OptimizedAllocationArrayLiterals) {
3241 i::FLAG_allow_natives_syntax = true;
3242 CcTest::InitializeVM();
3243 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3244 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3245 v8::HandleScope scope(CcTest::isolate());
3246 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3247 v8::Local<v8::Value> res = CompileRun(
3248 "function f() {"
3249 " var numbers = new Array(1, 2, 3);"
3250 " numbers[0] = 3.14;"
3251 " return numbers;"
3252 "};"
3253 "f(); f(); f();"
3254 "%OptimizeFunctionOnNextCall(f);"
3255 "f();");
3256 CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
3257 ->Get(ctx, v8_str("0"))
3258 .ToLocalChecked()
3259 ->Int32Value(ctx)
3260 .FromJust());
3261
3262 i::Handle<JSObject> o = Handle<JSObject>::cast(
3263 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3264
3265 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3266}
3267
3268
3269static int CountMapTransitions(Map* map) {
3270 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3271}
3272
3273
3274// Test that map transitions are cleared and maps are collected with
3275// incremental marking as well.
3276TEST(Regress1465) {
3277 i::FLAG_stress_compaction = false;
3278 i::FLAG_allow_natives_syntax = true;
3279 i::FLAG_trace_incremental_marking = true;
3280 i::FLAG_retain_maps_for_n_gc = 0;
3281 CcTest::InitializeVM();
3282 v8::HandleScope scope(CcTest::isolate());
3283 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3284 static const int transitions_count = 256;
3285
3286 CompileRun("function F() {}");
3287 {
3288 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3289 for (int i = 0; i < transitions_count; i++) {
3290 EmbeddedVector<char, 64> buffer;
3291 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3292 CompileRun(buffer.start());
3293 }
3294 CompileRun("var root = new F;");
3295 }
3296
3297 i::Handle<JSReceiver> root =
3298 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3299 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3300
3301 // Count number of live transitions before marking.
3302 int transitions_before = CountMapTransitions(root->map());
3303 CompileRun("%DebugPrint(root);");
3304 CHECK_EQ(transitions_count, transitions_before);
3305
3306 SimulateIncrementalMarking(CcTest::heap());
3307 CcTest::heap()->CollectAllGarbage();
3308
3309 // Count number of live transitions after marking. Note that one transition
3310 // is left, because 'o' still holds an instance of one transition target.
3311 int transitions_after = CountMapTransitions(root->map());
3312 CompileRun("%DebugPrint(root);");
3313 CHECK_EQ(1, transitions_after);
3314}
3315
3316
3317#ifdef DEBUG
3318static void AddTransitions(int transitions_count) {
3319 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3320 for (int i = 0; i < transitions_count; i++) {
3321 EmbeddedVector<char, 64> buffer;
3322 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3323 CompileRun(buffer.start());
3324 }
3325}
3326
3327
3328static i::Handle<JSObject> GetByName(const char* name) {
3329 return i::Handle<JSObject>::cast(
3330 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3331 CcTest::global()
3332 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
3333 .ToLocalChecked())));
3334}
3335
3336
3337static void AddPropertyTo(
3338 int gc_count, Handle<JSObject> object, const char* property_name) {
3339 Isolate* isolate = CcTest::i_isolate();
3340 Factory* factory = isolate->factory();
3341 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3342 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3343 i::FLAG_gc_interval = gc_count;
3344 i::FLAG_gc_global = true;
3345 i::FLAG_retain_maps_for_n_gc = 0;
3346 CcTest::heap()->set_allocation_timeout(gc_count);
3347 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3348}
3349
3350
3351TEST(TransitionArrayShrinksDuringAllocToZero) {
3352 i::FLAG_stress_compaction = false;
3353 i::FLAG_allow_natives_syntax = true;
3354 CcTest::InitializeVM();
3355 v8::HandleScope scope(CcTest::isolate());
3356 static const int transitions_count = 10;
3357 CompileRun("function F() { }");
3358 AddTransitions(transitions_count);
3359 CompileRun("var root = new F;");
3360 Handle<JSObject> root = GetByName("root");
3361
3362 // Count number of live transitions before marking.
3363 int transitions_before = CountMapTransitions(root->map());
3364 CHECK_EQ(transitions_count, transitions_before);
3365
3366 // Get rid of o
3367 CompileRun("o = new F;"
3368 "root = new F");
3369 root = GetByName("root");
3370 AddPropertyTo(2, root, "funny");
3371 CcTest::heap()->CollectGarbage(NEW_SPACE);
3372
3373 // Count number of live transitions after marking. Note that one transition
3374 // is left, because 'o' still holds an instance of one transition target.
3375 int transitions_after = CountMapTransitions(
3376 Map::cast(root->map()->GetBackPointer()));
3377 CHECK_EQ(1, transitions_after);
3378}
3379
3380
3381TEST(TransitionArrayShrinksDuringAllocToOne) {
3382 i::FLAG_stress_compaction = false;
3383 i::FLAG_allow_natives_syntax = true;
3384 CcTest::InitializeVM();
3385 v8::HandleScope scope(CcTest::isolate());
3386 static const int transitions_count = 10;
3387 CompileRun("function F() {}");
3388 AddTransitions(transitions_count);
3389 CompileRun("var root = new F;");
3390 Handle<JSObject> root = GetByName("root");
3391
3392 // Count number of live transitions before marking.
3393 int transitions_before = CountMapTransitions(root->map());
3394 CHECK_EQ(transitions_count, transitions_before);
3395
3396 root = GetByName("root");
3397 AddPropertyTo(2, root, "funny");
3398 CcTest::heap()->CollectGarbage(NEW_SPACE);
3399
3400 // Count number of live transitions after marking. Note that one transition
3401 // is left, because 'o' still holds an instance of one transition target.
3402 int transitions_after = CountMapTransitions(
3403 Map::cast(root->map()->GetBackPointer()));
3404 CHECK_EQ(2, transitions_after);
3405}
3406
3407
3408TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3409 i::FLAG_stress_compaction = false;
3410 i::FLAG_allow_natives_syntax = true;
3411 CcTest::InitializeVM();
3412 v8::HandleScope scope(CcTest::isolate());
3413 static const int transitions_count = 10;
3414 CompileRun("function F() {}");
3415 AddTransitions(transitions_count);
3416 CompileRun("var root = new F;");
3417 Handle<JSObject> root = GetByName("root");
3418
3419 // Count number of live transitions before marking.
3420 int transitions_before = CountMapTransitions(root->map());
3421 CHECK_EQ(transitions_count, transitions_before);
3422
3423 root = GetByName("root");
3424 AddPropertyTo(0, root, "prop9");
3425 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3426
3427 // Count number of live transitions after marking. Note that one transition
3428 // is left, because 'o' still holds an instance of one transition target.
3429 int transitions_after = CountMapTransitions(
3430 Map::cast(root->map()->GetBackPointer()));
3431 CHECK_EQ(1, transitions_after);
3432}
3433
3434
3435TEST(TransitionArraySimpleToFull) {
3436 i::FLAG_stress_compaction = false;
3437 i::FLAG_allow_natives_syntax = true;
3438 CcTest::InitializeVM();
3439 v8::HandleScope scope(CcTest::isolate());
3440 static const int transitions_count = 1;
3441 CompileRun("function F() {}");
3442 AddTransitions(transitions_count);
3443 CompileRun("var root = new F;");
3444 Handle<JSObject> root = GetByName("root");
3445
3446 // Count number of live transitions before marking.
3447 int transitions_before = CountMapTransitions(root->map());
3448 CHECK_EQ(transitions_count, transitions_before);
3449
3450 CompileRun("o = new F;"
3451 "root = new F");
3452 root = GetByName("root");
3453 CHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3454 AddPropertyTo(2, root, "happy");
3455
3456 // Count number of live transitions after marking. Note that one transition
3457 // is left, because 'o' still holds an instance of one transition target.
3458 int transitions_after = CountMapTransitions(
3459 Map::cast(root->map()->GetBackPointer()));
3460 CHECK_EQ(1, transitions_after);
3461}
3462#endif // DEBUG
3463
3464
3465TEST(Regress2143a) {
3466 i::FLAG_incremental_marking = true;
3467 CcTest::InitializeVM();
3468 v8::HandleScope scope(CcTest::isolate());
3469
3470 // Prepare a map transition from the root object together with a yet
3471 // untransitioned root object.
3472 CompileRun("var root = new Object;"
3473 "root.foo = 0;"
3474 "root = new Object;");
3475
3476 SimulateIncrementalMarking(CcTest::heap());
3477
3478 // Compile a StoreIC that performs the prepared map transition. This
3479 // will restart incremental marking and should make sure the root is
3480 // marked grey again.
3481 CompileRun("function f(o) {"
3482 " o.foo = 0;"
3483 "}"
3484 "f(new Object);"
3485 "f(root);");
3486
3487 // This bug only triggers with aggressive IC clearing.
3488 CcTest::heap()->AgeInlineCaches();
3489
3490 // Explicitly request GC to perform final marking step and sweeping.
3491 CcTest::heap()->CollectAllGarbage();
3492
3493 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3494 CcTest::global()
3495 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3496 .ToLocalChecked()));
3497
3498 // The root object should be in a sane state.
3499 CHECK(root->IsJSObject());
3500 CHECK(root->map()->IsMap());
3501}
3502
3503
3504TEST(Regress2143b) {
3505 i::FLAG_incremental_marking = true;
3506 i::FLAG_allow_natives_syntax = true;
3507 CcTest::InitializeVM();
3508 v8::HandleScope scope(CcTest::isolate());
3509
3510 // Prepare a map transition from the root object together with a yet
3511 // untransitioned root object.
3512 CompileRun("var root = new Object;"
3513 "root.foo = 0;"
3514 "root = new Object;");
3515
3516 SimulateIncrementalMarking(CcTest::heap());
3517
3518 // Compile an optimized LStoreNamedField that performs the prepared
3519 // map transition. This will restart incremental marking and should
3520 // make sure the root is marked grey again.
3521 CompileRun("function f(o) {"
3522 " o.foo = 0;"
3523 "}"
3524 "f(new Object);"
3525 "f(new Object);"
3526 "%OptimizeFunctionOnNextCall(f);"
3527 "f(root);"
3528 "%DeoptimizeFunction(f);");
3529
3530 // This bug only triggers with aggressive IC clearing.
3531 CcTest::heap()->AgeInlineCaches();
3532
3533 // Explicitly request GC to perform final marking step and sweeping.
3534 CcTest::heap()->CollectAllGarbage();
3535
3536 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3537 CcTest::global()
3538 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3539 .ToLocalChecked()));
3540
3541 // The root object should be in a sane state.
3542 CHECK(root->IsJSObject());
3543 CHECK(root->map()->IsMap());
3544}
3545
3546
3547TEST(ReleaseOverReservedPages) {
3548 if (FLAG_never_compact) return;
3549 i::FLAG_trace_gc = true;
3550 // The optimizer can allocate stuff, messing up the test.
3551 i::FLAG_crankshaft = false;
3552 i::FLAG_always_opt = false;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003553 // Parallel compaction increases fragmentation, depending on how existing
3554 // memory is distributed. Since this is non-deterministic because of
3555 // concurrent sweeping, we disable it for this test.
3556 i::FLAG_parallel_compaction = false;
3557 // Concurrent sweeping adds non determinism, depending on when memory is
3558 // available for further reuse.
3559 i::FLAG_concurrent_sweeping = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003560 CcTest::InitializeVM();
3561 Isolate* isolate = CcTest::i_isolate();
3562 Factory* factory = isolate->factory();
3563 Heap* heap = isolate->heap();
3564 v8::HandleScope scope(CcTest::isolate());
3565 static const int number_of_test_pages = 20;
3566
3567 // Prepare many pages with low live-bytes count.
3568 PagedSpace* old_space = heap->old_space();
3569 CHECK_EQ(1, old_space->CountTotalPages());
3570 for (int i = 0; i < number_of_test_pages; i++) {
3571 AlwaysAllocateScope always_allocate(isolate);
3572 SimulateFullSpace(old_space);
3573 factory->NewFixedArray(1, TENURED);
3574 }
3575 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3576
3577 // Triggering one GC will cause a lot of garbage to be discovered but
3578 // even spread across all allocated pages.
3579 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3580 "triggered for preparation");
3581 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3582
3583 // Triggering subsequent GCs should cause at least half of the pages
3584 // to be released to the OS after at most two cycles.
3585 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3586 "triggered by test 1");
3587 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3588 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3589 "triggered by test 2");
3590 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3591
3592 // Triggering a last-resort GC should cause all pages to be released to the
3593 // OS so that other processes can seize the memory. If we get a failure here
3594 // where there are 2 pages left instead of 1, then we should increase the
3595 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3596 // first page should be small in order to reduce memory used when the VM
3597 // boots, but if the 20 small arrays don't fit on the first page then that's
3598 // an indication that it is too small.
3599 heap->CollectAllAvailableGarbage("triggered really hard");
3600 CHECK_EQ(1, old_space->CountTotalPages());
3601}
3602
3603static int forced_gc_counter = 0;
3604
3605void MockUseCounterCallback(v8::Isolate* isolate,
3606 v8::Isolate::UseCounterFeature feature) {
3607 isolate->GetCurrentContext();
3608 if (feature == v8::Isolate::kForcedGC) {
3609 forced_gc_counter++;
3610 }
3611}
3612
3613
3614TEST(CountForcedGC) {
3615 i::FLAG_expose_gc = true;
3616 CcTest::InitializeVM();
3617 Isolate* isolate = CcTest::i_isolate();
3618 v8::HandleScope scope(CcTest::isolate());
3619
3620 isolate->SetUseCounterCallback(MockUseCounterCallback);
3621
3622 forced_gc_counter = 0;
3623 const char* source = "gc();";
3624 CompileRun(source);
3625 CHECK_GT(forced_gc_counter, 0);
3626}
3627
3628
3629#ifdef OBJECT_PRINT
3630TEST(PrintSharedFunctionInfo) {
3631 CcTest::InitializeVM();
3632 v8::HandleScope scope(CcTest::isolate());
3633 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3634 const char* source = "f = function() { return 987654321; }\n"
3635 "g = function() { return 123456789; }\n";
3636 CompileRun(source);
3637 i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3638 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3639 CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3640
3641 OFStream os(stdout);
3642 g->shared()->Print(os);
3643 os << std::endl;
3644}
3645#endif // OBJECT_PRINT
3646
3647
3648TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3649 if (i::FLAG_always_opt) return;
3650 CcTest::InitializeVM();
3651 v8::HandleScope scope(CcTest::isolate());
3652 v8::Local<v8::Value> fun1, fun2;
3653 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3654 {
3655 CompileRun("function fun() {};");
3656 fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3657 }
3658
3659 {
3660 CompileRun("function fun() {};");
3661 fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3662 }
3663
3664 // Prepare function f that contains type feedback for the two closures.
3665 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3666 CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3667 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3668
3669 Handle<JSFunction> f = Handle<JSFunction>::cast(
3670 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3671 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3672
3673 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3674 FeedbackVectorHelper feedback_helper(feedback_vector);
3675
3676 int expected_slots = 2;
3677 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3678 int slot1 = 0;
3679 int slot2 = 1;
3680 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3681 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3682
3683 SimulateIncrementalMarking(CcTest::heap());
3684 CcTest::heap()->CollectAllGarbage();
3685
3686 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3687 ->cleared());
3688 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3689 ->cleared());
3690}
3691
3692
3693static Code* FindFirstIC(Code* code, Code::Kind kind) {
3694 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3695 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3696 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3697 RelocInfo* info = it.rinfo();
3698 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3699 if (target->is_inline_cache_stub() && target->kind() == kind) {
3700 return target;
3701 }
3702 }
3703 return NULL;
3704}
3705
3706
3707static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3708 InlineCacheState desired_state) {
3709 Handle<TypeFeedbackVector> vector =
3710 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3711 FeedbackVectorHelper helper(vector);
3712 FeedbackVectorSlot slot = helper.slot(slot_index);
3713 if (vector->GetKind(slot) == FeedbackVectorSlotKind::LOAD_IC) {
3714 LoadICNexus nexus(vector, slot);
3715 CHECK(nexus.StateFromFeedback() == desired_state);
3716 } else {
3717 CHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
3718 KeyedLoadICNexus nexus(vector, slot);
3719 CHECK(nexus.StateFromFeedback() == desired_state);
3720 }
3721}
3722
3723
3724static void CheckVectorICCleared(Handle<JSFunction> f, int slot_index) {
3725 Handle<TypeFeedbackVector> vector =
3726 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3727 FeedbackVectorSlot slot(slot_index);
3728 LoadICNexus nexus(vector, slot);
3729 CHECK(IC::IsCleared(&nexus));
3730}
3731
3732
3733TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3734 if (i::FLAG_always_opt) return;
3735 CcTest::InitializeVM();
3736 v8::HandleScope scope(CcTest::isolate());
3737 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3738 // Prepare function f that contains a monomorphic IC for object
3739 // originating from the same native context.
3740 CompileRun(
3741 "function fun() { this.x = 1; };"
3742 "function f(o) { return new o(); } f(fun); f(fun);");
3743 Handle<JSFunction> f = Handle<JSFunction>::cast(
3744 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3745 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3746
3747 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3748 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3749
3750 SimulateIncrementalMarking(CcTest::heap());
3751 CcTest::heap()->CollectAllGarbage();
3752
3753 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3754}
3755
3756
3757TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3758 if (i::FLAG_always_opt) return;
3759 CcTest::InitializeVM();
3760 Isolate* isolate = CcTest::i_isolate();
3761 v8::HandleScope scope(CcTest::isolate());
3762 v8::Local<v8::Value> fun1;
3763 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3764
3765 {
3766 LocalContext env;
3767 CompileRun("function fun() { this.x = 1; };");
3768 fun1 = env->Global()->Get(env.local(), v8_str("fun")).ToLocalChecked();
3769 }
3770
3771 // Prepare function f that contains a monomorphic constructor for object
3772 // originating from a different native context.
3773 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3774 CompileRun(
3775 "function fun() { this.x = 1; };"
3776 "function f(o) { return new o(); } f(fun1); f(fun1);");
3777 Handle<JSFunction> f = Handle<JSFunction>::cast(
3778 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3779 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3780
3781
3782 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3783 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3784
3785 // Fire context dispose notification.
3786 CcTest::isolate()->ContextDisposedNotification();
3787 SimulateIncrementalMarking(CcTest::heap());
3788 CcTest::heap()->CollectAllGarbage();
3789
3790 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3791 vector->Get(FeedbackVectorSlot(0)));
3792}
3793
3794
3795TEST(IncrementalMarkingPreservesMonomorphicIC) {
3796 if (i::FLAG_always_opt) return;
3797 CcTest::InitializeVM();
3798 v8::HandleScope scope(CcTest::isolate());
3799 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3800 // Prepare function f that contains a monomorphic IC for object
3801 // originating from the same native context.
3802 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3803 "function f(o) { return o.x; } f(obj); f(obj);");
3804 Handle<JSFunction> f = Handle<JSFunction>::cast(
3805 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3806 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3807
3808 CheckVectorIC(f, 0, MONOMORPHIC);
3809
3810 SimulateIncrementalMarking(CcTest::heap());
3811 CcTest::heap()->CollectAllGarbage();
3812
3813 CheckVectorIC(f, 0, MONOMORPHIC);
3814}
3815
3816
3817TEST(IncrementalMarkingClearsMonomorphicIC) {
3818 if (i::FLAG_always_opt) return;
3819 CcTest::InitializeVM();
3820 v8::HandleScope scope(CcTest::isolate());
3821 v8::Local<v8::Value> obj1;
3822 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3823
3824 {
3825 LocalContext env;
3826 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3827 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3828 }
3829
3830 // Prepare function f that contains a monomorphic IC for object
3831 // originating from a different native context.
3832 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3833 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3834 Handle<JSFunction> f = Handle<JSFunction>::cast(
3835 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3836 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3837
3838 CheckVectorIC(f, 0, MONOMORPHIC);
3839
3840 // Fire context dispose notification.
3841 CcTest::isolate()->ContextDisposedNotification();
3842 SimulateIncrementalMarking(CcTest::heap());
3843 CcTest::heap()->CollectAllGarbage();
3844
3845 CheckVectorICCleared(f, 0);
3846}
3847
3848
3849TEST(IncrementalMarkingPreservesPolymorphicIC) {
3850 if (i::FLAG_always_opt) return;
3851 CcTest::InitializeVM();
3852 v8::HandleScope scope(CcTest::isolate());
3853 v8::Local<v8::Value> obj1, obj2;
3854 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3855
3856 {
3857 LocalContext env;
3858 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3859 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3860 }
3861
3862 {
3863 LocalContext env;
3864 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3865 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3866 }
3867
3868 // Prepare function f that contains a polymorphic IC for objects
3869 // originating from two different native contexts.
3870 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3871 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3872 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3873 Handle<JSFunction> f = Handle<JSFunction>::cast(
3874 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3875 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3876
3877 CheckVectorIC(f, 0, POLYMORPHIC);
3878
3879 // Fire context dispose notification.
3880 SimulateIncrementalMarking(CcTest::heap());
3881 CcTest::heap()->CollectAllGarbage();
3882
3883 CheckVectorIC(f, 0, POLYMORPHIC);
3884}
3885
3886
3887TEST(IncrementalMarkingClearsPolymorphicIC) {
3888 if (i::FLAG_always_opt) return;
3889 CcTest::InitializeVM();
3890 v8::HandleScope scope(CcTest::isolate());
3891 v8::Local<v8::Value> obj1, obj2;
3892 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3893
3894 {
3895 LocalContext env;
3896 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3897 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3898 }
3899
3900 {
3901 LocalContext env;
3902 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3903 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3904 }
3905
3906 // Prepare function f that contains a polymorphic IC for objects
3907 // originating from two different native contexts.
3908 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3909 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3910 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3911 Handle<JSFunction> f = Handle<JSFunction>::cast(
3912 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3913 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3914
3915 CheckVectorIC(f, 0, POLYMORPHIC);
3916
3917 // Fire context dispose notification.
3918 CcTest::isolate()->ContextDisposedNotification();
3919 SimulateIncrementalMarking(CcTest::heap());
3920 CcTest::heap()->CollectAllGarbage();
3921
3922 CheckVectorICCleared(f, 0);
3923}
3924
3925
3926class SourceResource : public v8::String::ExternalOneByteStringResource {
3927 public:
3928 explicit SourceResource(const char* data)
3929 : data_(data), length_(strlen(data)) { }
3930
3931 virtual void Dispose() {
3932 i::DeleteArray(data_);
3933 data_ = NULL;
3934 }
3935
3936 const char* data() const { return data_; }
3937
3938 size_t length() const { return length_; }
3939
3940 bool IsDisposed() { return data_ == NULL; }
3941
3942 private:
3943 const char* data_;
3944 size_t length_;
3945};
3946
3947
3948void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3949 const char* accessor) {
3950 // Test that the data retained by the Error.stack accessor is released
3951 // after the first time the accessor is fired. We use external string
3952 // to check whether the data is being released since the external string
3953 // resource's callback is fired when the external string is GC'ed.
3954 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3955 v8::HandleScope scope(isolate);
3956 SourceResource* resource = new SourceResource(i::StrDup(source));
3957 {
3958 v8::HandleScope scope(isolate);
3959 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3960 v8::Local<v8::String> source_string =
3961 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3962 i_isolate->heap()->CollectAllAvailableGarbage();
3963 v8::Script::Compile(ctx, source_string)
3964 .ToLocalChecked()
3965 ->Run(ctx)
3966 .ToLocalChecked();
3967 CHECK(!resource->IsDisposed());
3968 }
3969 // i_isolate->heap()->CollectAllAvailableGarbage();
3970 CHECK(!resource->IsDisposed());
3971
3972 CompileRun(accessor);
3973 i_isolate->heap()->CollectAllAvailableGarbage();
3974
3975 // External source has been released.
3976 CHECK(resource->IsDisposed());
3977 delete resource;
3978}
3979
3980
3981UNINITIALIZED_TEST(ReleaseStackTraceData) {
3982 if (i::FLAG_always_opt) {
3983 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3984 // See: https://codereview.chromium.org/181833004/
3985 return;
3986 }
3987 FLAG_use_ic = false; // ICs retain objects.
3988 FLAG_concurrent_recompilation = false;
3989 v8::Isolate::CreateParams create_params;
3990 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3991 v8::Isolate* isolate = v8::Isolate::New(create_params);
3992 {
3993 v8::Isolate::Scope isolate_scope(isolate);
3994 v8::HandleScope handle_scope(isolate);
3995 v8::Context::New(isolate)->Enter();
3996 static const char* source1 = "var error = null; "
3997 /* Normal Error */ "try { "
3998 " throw new Error(); "
3999 "} catch (e) { "
4000 " error = e; "
4001 "} ";
4002 static const char* source2 = "var error = null; "
4003 /* Stack overflow */ "try { "
4004 " (function f() { f(); })(); "
4005 "} catch (e) { "
4006 " error = e; "
4007 "} ";
4008 static const char* source3 = "var error = null; "
4009 /* Normal Error */ "try { "
4010 /* as prototype */ " throw new Error(); "
4011 "} catch (e) { "
4012 " error = {}; "
4013 " error.__proto__ = e; "
4014 "} ";
4015 static const char* source4 = "var error = null; "
4016 /* Stack overflow */ "try { "
4017 /* as prototype */ " (function f() { f(); })(); "
4018 "} catch (e) { "
4019 " error = {}; "
4020 " error.__proto__ = e; "
4021 "} ";
4022 static const char* getter = "error.stack";
4023 static const char* setter = "error.stack = 0";
4024
4025 ReleaseStackTraceDataTest(isolate, source1, setter);
4026 ReleaseStackTraceDataTest(isolate, source2, setter);
4027 // We do not test source3 and source4 with setter, since the setter is
4028 // supposed to (untypically) write to the receiver, not the holder. This is
4029 // to emulate the behavior of a data property.
4030
4031 ReleaseStackTraceDataTest(isolate, source1, getter);
4032 ReleaseStackTraceDataTest(isolate, source2, getter);
4033 ReleaseStackTraceDataTest(isolate, source3, getter);
4034 ReleaseStackTraceDataTest(isolate, source4, getter);
4035 }
4036 isolate->Dispose();
4037}
4038
4039
4040TEST(Regress159140) {
4041 i::FLAG_allow_natives_syntax = true;
4042 CcTest::InitializeVM();
4043 Isolate* isolate = CcTest::i_isolate();
4044 LocalContext env;
4045 Heap* heap = isolate->heap();
4046 HandleScope scope(isolate);
4047
4048 // Perform one initial GC to enable code flushing.
4049 heap->CollectAllGarbage();
4050
4051 // Prepare several closures that are all eligible for code flushing
4052 // because all reachable ones are not optimized. Make sure that the
4053 // optimized code object is directly reachable through a handle so
4054 // that it is marked black during incremental marking.
4055 Handle<Code> code;
4056 {
4057 HandleScope inner_scope(isolate);
4058 CompileRun("function h(x) {}"
4059 "function mkClosure() {"
4060 " return function(x) { return x + 1; };"
4061 "}"
4062 "var f = mkClosure();"
4063 "var g = mkClosure();"
4064 "f(1); f(2);"
4065 "g(1); g(2);"
4066 "h(1); h(2);"
4067 "%OptimizeFunctionOnNextCall(f); f(3);"
4068 "%OptimizeFunctionOnNextCall(h); h(3);");
4069
4070 Handle<JSFunction> f = Handle<JSFunction>::cast(
4071 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4072 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4073 CHECK(f->is_compiled());
4074 CompileRun("f = null;");
4075
4076 Handle<JSFunction> g = Handle<JSFunction>::cast(
4077 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4078 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4079 CHECK(g->is_compiled());
4080 const int kAgingThreshold = 6;
4081 for (int i = 0; i < kAgingThreshold; i++) {
4082 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4083 }
4084
4085 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4086 }
4087
4088 // Simulate incremental marking so that the functions are enqueued as
4089 // code flushing candidates. Then optimize one function. Finally
4090 // finish the GC to complete code flushing.
4091 SimulateIncrementalMarking(heap);
4092 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4093 heap->CollectAllGarbage();
4094
4095 // Unoptimized code is missing and the deoptimizer will go ballistic.
4096 CompileRun("g('bozo');");
4097}
4098
4099
4100TEST(Regress165495) {
4101 i::FLAG_allow_natives_syntax = true;
4102 CcTest::InitializeVM();
4103 Isolate* isolate = CcTest::i_isolate();
4104 Heap* heap = isolate->heap();
4105 HandleScope scope(isolate);
4106
4107 // Perform one initial GC to enable code flushing.
4108 heap->CollectAllGarbage();
4109
4110 // Prepare an optimized closure that the optimized code map will get
4111 // populated. Then age the unoptimized code to trigger code flushing
4112 // but make sure the optimized code is unreachable.
4113 {
4114 HandleScope inner_scope(isolate);
4115 LocalContext env;
4116 CompileRun("function mkClosure() {"
4117 " return function(x) { return x + 1; };"
4118 "}"
4119 "var f = mkClosure();"
4120 "f(1); f(2);"
4121 "%OptimizeFunctionOnNextCall(f); f(3);");
4122
4123 Handle<JSFunction> f = Handle<JSFunction>::cast(
4124 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4125 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4126 CHECK(f->is_compiled());
4127 const int kAgingThreshold = 6;
4128 for (int i = 0; i < kAgingThreshold; i++) {
4129 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4130 }
4131
4132 CompileRun("f = null;");
4133 }
4134
4135 // Simulate incremental marking so that unoptimized code is flushed
4136 // even though it still is cached in the optimized code map.
4137 SimulateIncrementalMarking(heap);
4138 heap->CollectAllGarbage();
4139
4140 // Make a new closure that will get code installed from the code map.
4141 // Unoptimized code is missing and the deoptimizer will go ballistic.
4142 CompileRun("var g = mkClosure(); g('bozo');");
4143}
4144
4145
4146TEST(Regress169209) {
4147 i::FLAG_stress_compaction = false;
4148 i::FLAG_allow_natives_syntax = true;
4149
4150 CcTest::InitializeVM();
4151 Isolate* isolate = CcTest::i_isolate();
4152 Heap* heap = isolate->heap();
4153 HandleScope scope(isolate);
4154
4155 // Perform one initial GC to enable code flushing.
4156 heap->CollectAllGarbage();
4157
4158 // Prepare a shared function info eligible for code flushing for which
4159 // the unoptimized code will be replaced during optimization.
4160 Handle<SharedFunctionInfo> shared1;
4161 {
4162 HandleScope inner_scope(isolate);
4163 LocalContext env;
4164 CompileRun("function f() { return 'foobar'; }"
4165 "function g(x) { if (x) f(); }"
4166 "f();"
4167 "g(false);"
4168 "g(false);");
4169
4170 Handle<JSFunction> f = Handle<JSFunction>::cast(
4171 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4172 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4173 CHECK(f->is_compiled());
4174 const int kAgingThreshold = 6;
4175 for (int i = 0; i < kAgingThreshold; i++) {
4176 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4177 }
4178
4179 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4180 }
4181
4182 // Prepare a shared function info eligible for code flushing that will
4183 // represent the dangling tail of the candidate list.
4184 Handle<SharedFunctionInfo> shared2;
4185 {
4186 HandleScope inner_scope(isolate);
4187 LocalContext env;
4188 CompileRun("function flushMe() { return 0; }"
4189 "flushMe(1);");
4190
4191 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4192 *v8::Local<v8::Function>::Cast(CcTest::global()
4193 ->Get(env.local(), v8_str("flushMe"))
4194 .ToLocalChecked())));
4195 CHECK(f->is_compiled());
4196 const int kAgingThreshold = 6;
4197 for (int i = 0; i < kAgingThreshold; i++) {
4198 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4199 }
4200
4201 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4202 }
4203
4204 // Simulate incremental marking and collect code flushing candidates.
4205 SimulateIncrementalMarking(heap);
4206 CHECK(shared1->code()->gc_metadata() != NULL);
4207
4208 // Optimize function and make sure the unoptimized code is replaced.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004209 CompileRun("%OptimizeFunctionOnNextCall(g);"
4210 "g(false);");
4211
4212 // Finish garbage collection cycle.
4213 heap->CollectAllGarbage();
4214 CHECK(shared1->code()->gc_metadata() == NULL);
4215}
4216
4217
4218TEST(Regress169928) {
4219 i::FLAG_allow_natives_syntax = true;
4220 i::FLAG_crankshaft = false;
4221 CcTest::InitializeVM();
4222 Isolate* isolate = CcTest::i_isolate();
4223 LocalContext env;
4224 Factory* factory = isolate->factory();
4225 v8::HandleScope scope(CcTest::isolate());
4226
4227 // Some flags turn Scavenge collections into Mark-sweep collections
4228 // and hence are incompatible with this test case.
4229 if (FLAG_gc_global || FLAG_stress_compaction) return;
4230
4231 // Prepare the environment
4232 CompileRun("function fastliteralcase(literal, value) {"
4233 " literal[0] = value;"
4234 " return literal;"
4235 "}"
4236 "function get_standard_literal() {"
4237 " var literal = [1, 2, 3];"
4238 " return literal;"
4239 "}"
4240 "obj = fastliteralcase(get_standard_literal(), 1);"
4241 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4242 "obj = fastliteralcase(get_standard_literal(), 2);");
4243
4244 // prepare the heap
4245 v8::Local<v8::String> mote_code_string =
4246 v8_str("fastliteralcase(mote, 2.5);");
4247
4248 v8::Local<v8::String> array_name = v8_str("mote");
4249 CHECK(CcTest::global()
4250 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4251 .FromJust());
4252
4253 // First make sure we flip spaces
4254 CcTest::heap()->CollectGarbage(NEW_SPACE);
4255
4256 // Allocate the object.
4257 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4258 array_data->set(0, Smi::FromInt(1));
4259 array_data->set(1, Smi::FromInt(2));
4260
4261 AllocateAllButNBytes(CcTest::heap()->new_space(),
4262 JSArray::kSize + AllocationMemento::kSize +
4263 kPointerSize);
4264
4265 Handle<JSArray> array =
4266 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4267
4268 CHECK_EQ(Smi::FromInt(2), array->length());
4269 CHECK(array->HasFastSmiOrObjectElements());
4270
4271 // We need filler the size of AllocationMemento object, plus an extra
4272 // fill pointer value.
4273 HeapObject* obj = NULL;
4274 AllocationResult allocation =
4275 CcTest::heap()->new_space()->AllocateRawUnaligned(
4276 AllocationMemento::kSize + kPointerSize);
4277 CHECK(allocation.To(&obj));
4278 Address addr_obj = obj->address();
Ben Murdochda12d292016-06-02 14:46:10 +01004279 CcTest::heap()->CreateFillerObjectAt(addr_obj,
4280 AllocationMemento::kSize + kPointerSize,
4281 ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004282
4283 // Give the array a name, making sure not to allocate strings.
4284 v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
4285 CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
4286
4287 // This should crash with a protection violation if we are running a build
4288 // with the bug.
4289 AlwaysAllocateScope aa_scope(isolate);
4290 v8::Script::Compile(env.local(), mote_code_string)
4291 .ToLocalChecked()
4292 ->Run(env.local())
4293 .ToLocalChecked();
4294}
4295
4296
4297#ifdef DEBUG
4298TEST(Regress513507) {
4299 i::FLAG_flush_optimized_code_cache = false;
4300 i::FLAG_allow_natives_syntax = true;
4301 i::FLAG_gc_global = true;
4302 CcTest::InitializeVM();
4303 Isolate* isolate = CcTest::i_isolate();
4304 LocalContext env;
4305 Heap* heap = isolate->heap();
4306 HandleScope scope(isolate);
4307
4308 // Prepare function whose optimized code map we can use.
4309 Handle<SharedFunctionInfo> shared;
4310 {
4311 HandleScope inner_scope(isolate);
4312 CompileRun("function f() { return 1 }"
4313 "f(); %OptimizeFunctionOnNextCall(f); f();");
4314
4315 Handle<JSFunction> f = Handle<JSFunction>::cast(
4316 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4317 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4318 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4319 CompileRun("f = null");
4320 }
4321
4322 // Prepare optimized code that we can use.
4323 Handle<Code> code;
4324 {
4325 HandleScope inner_scope(isolate);
4326 CompileRun("function g() { return 2 }"
4327 "g(); %OptimizeFunctionOnNextCall(g); g();");
4328
4329 Handle<JSFunction> g = Handle<JSFunction>::cast(
4330 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4331 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4332 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4333 if (!code->is_optimized_code()) return;
4334 }
4335
4336 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4337 Handle<LiteralsArray> lit =
4338 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4339 Handle<Context> context(isolate->context());
4340
4341 // Add the new code several times to the optimized code map and also set an
4342 // allocation timeout so that expanding the code map will trigger a GC.
4343 heap->set_allocation_timeout(5);
4344 FLAG_gc_interval = 1000;
4345 for (int i = 0; i < 10; ++i) {
4346 BailoutId id = BailoutId(i);
4347 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4348 }
4349}
4350#endif // DEBUG
4351
4352
4353TEST(Regress514122) {
4354 i::FLAG_flush_optimized_code_cache = false;
4355 i::FLAG_allow_natives_syntax = true;
4356 CcTest::InitializeVM();
4357 Isolate* isolate = CcTest::i_isolate();
4358 LocalContext env;
4359 Heap* heap = isolate->heap();
4360 HandleScope scope(isolate);
4361
4362 // Perfrom one initial GC to enable code flushing.
4363 CcTest::heap()->CollectAllGarbage();
4364
4365 // Prepare function whose optimized code map we can use.
4366 Handle<SharedFunctionInfo> shared;
4367 {
4368 HandleScope inner_scope(isolate);
4369 CompileRun("function f() { return 1 }"
4370 "f(); %OptimizeFunctionOnNextCall(f); f();");
4371
4372 Handle<JSFunction> f = Handle<JSFunction>::cast(
4373 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4374 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4375 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4376 CompileRun("f = null");
4377 }
4378
4379 // Prepare optimized code that we can use.
4380 Handle<Code> code;
4381 {
4382 HandleScope inner_scope(isolate);
4383 CompileRun("function g() { return 2 }"
4384 "g(); %OptimizeFunctionOnNextCall(g); g();");
4385
4386 Handle<JSFunction> g = Handle<JSFunction>::cast(
4387 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4388 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4389 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4390 if (!code->is_optimized_code()) return;
4391 }
4392
4393 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4394 Handle<LiteralsArray> lit =
4395 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4396 Handle<Context> context(isolate->context());
4397
4398 // Add the code several times to the optimized code map.
4399 for (int i = 0; i < 3; ++i) {
4400 HandleScope inner_scope(isolate);
4401 BailoutId id = BailoutId(i);
4402 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4403 }
4404 shared->optimized_code_map()->Print();
4405
4406 // Add the code with a literals array to be evacuated.
4407 Page* evac_page;
4408 {
4409 HandleScope inner_scope(isolate);
4410 AlwaysAllocateScope always_allocate(isolate);
4411 // Make sure literal is placed on an old-space evacuation candidate.
4412 SimulateFullSpace(heap->old_space());
4413
4414 // Make sure there the number of literals is > 0.
4415 Handle<LiteralsArray> lit =
4416 LiteralsArray::New(isolate, vector, 23, TENURED);
4417
4418 evac_page = Page::FromAddress(lit->address());
4419 BailoutId id = BailoutId(100);
4420 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4421 }
4422
4423 // Heap is ready, force {lit_page} to become an evacuation candidate and
4424 // simulate incremental marking to enqueue optimized code map.
4425 FLAG_manual_evacuation_candidates_selection = true;
4426 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4427 SimulateIncrementalMarking(heap);
4428
4429 // No matter whether reachable or not, {boomer} is doomed.
4430 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4431
4432 // Add the code several times to the optimized code map. This will leave old
4433 // copies of the optimized code map unreachable but still marked.
4434 for (int i = 3; i < 6; ++i) {
4435 HandleScope inner_scope(isolate);
4436 BailoutId id = BailoutId(i);
4437 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4438 }
4439
4440 // Trigger a GC to flush out the bug.
4441 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4442 boomer->Print();
4443}
4444
4445
4446TEST(OptimizedCodeMapReuseEntries) {
4447 i::FLAG_flush_optimized_code_cache = false;
4448 i::FLAG_allow_natives_syntax = true;
4449 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
4450 // run this test.
4451 if (i::FLAG_turbo) return;
4452 CcTest::InitializeVM();
4453 v8::Isolate* v8_isolate = CcTest::isolate();
4454 Isolate* isolate = CcTest::i_isolate();
4455 Heap* heap = isolate->heap();
4456 HandleScope scope(isolate);
4457
4458 // Create 3 contexts, allow the 2nd one to be disposed, and verify that
4459 // a 4th context will re-use the weak slots in the optimized code map
4460 // to hold data, rather than expanding the map.
4461 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
4462 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
4463 v8::ScriptCompiler::Source script_source(
4464 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
4465 .ToLocalChecked());
4466 v8::Local<v8::UnboundScript> indep =
4467 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
4468 .ToLocalChecked();
4469 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
4470 // Perfrom one initial GC to enable code flushing.
4471 heap->CollectAllGarbage();
4472
4473 c1->Enter();
4474 indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
4475 CompileRun(toplevel);
4476
4477 Handle<SharedFunctionInfo> shared;
4478 Handle<JSFunction> foo = Handle<JSFunction>::cast(
4479 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4480 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
4481 CHECK(foo->shared()->is_compiled());
4482 shared = handle(foo->shared());
4483 c1->Exit();
4484
4485 {
4486 HandleScope scope(isolate);
4487 v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
4488 c2->Enter();
4489 indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
4490 CompileRun(toplevel);
4491 c2->Exit();
4492 }
4493
4494 {
4495 HandleScope scope(isolate);
4496 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
4497 c3->Enter();
4498 indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
4499 CompileRun(toplevel);
4500 c3->Exit();
4501
4502 // Now, collect garbage. Context c2 should have no roots to it, and it's
4503 // entry in the optimized code map should be free for a new context.
4504 for (int i = 0; i < 4; i++) {
4505 heap->CollectAllGarbage();
4506 }
4507
4508 Handle<FixedArray> optimized_code_map =
4509 handle(shared->optimized_code_map());
4510 // There should be 3 entries in the map.
4511 CHECK_EQ(
4512 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
4513 SharedFunctionInfo::kEntryLength));
4514 // But one of them (formerly for c2) should be cleared.
4515 int cleared_count = 0;
4516 for (int i = SharedFunctionInfo::kEntriesStart;
4517 i < optimized_code_map->length();
4518 i += SharedFunctionInfo::kEntryLength) {
4519 cleared_count +=
4520 WeakCell::cast(
4521 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4522 ->cleared()
4523 ? 1
4524 : 0;
4525 }
4526 CHECK_EQ(1, cleared_count);
4527
4528 // Verify that a new context uses the cleared entry rather than creating a
4529 // new
4530 // optimized code map array.
4531 v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
4532 c4->Enter();
4533 indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
4534 CompileRun(toplevel);
4535 c4->Exit();
4536 CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
4537
4538 // Now each entry is in use.
4539 cleared_count = 0;
4540 for (int i = SharedFunctionInfo::kEntriesStart;
4541 i < optimized_code_map->length();
4542 i += SharedFunctionInfo::kEntryLength) {
4543 cleared_count +=
4544 WeakCell::cast(
4545 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4546 ->cleared()
4547 ? 1
4548 : 0;
4549 }
4550 CHECK_EQ(0, cleared_count);
4551 }
4552}
4553
4554
4555TEST(Regress513496) {
4556 i::FLAG_flush_optimized_code_cache = false;
4557 i::FLAG_allow_natives_syntax = true;
4558 CcTest::InitializeVM();
4559 Isolate* isolate = CcTest::i_isolate();
4560 Heap* heap = isolate->heap();
4561 HandleScope scope(isolate);
4562
4563 // Perfrom one initial GC to enable code flushing.
4564 CcTest::heap()->CollectAllGarbage();
4565
4566 // Prepare an optimized closure with containing an inlined function. Then age
4567 // the inlined unoptimized code to trigger code flushing but make sure the
4568 // outer optimized code is kept in the optimized code map.
4569 Handle<SharedFunctionInfo> shared;
4570 {
4571 LocalContext context;
4572 HandleScope inner_scope(isolate);
4573 CompileRun(
4574 "function g(x) { return x + 1 }"
4575 "function mkClosure() {"
4576 " return function(x) { return g(x); };"
4577 "}"
4578 "var f = mkClosure();"
4579 "f(1); f(2);"
4580 "%OptimizeFunctionOnNextCall(f); f(3);");
4581
4582 Handle<JSFunction> g = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4583 *v8::Local<v8::Function>::Cast(CcTest::global()
4584 ->Get(context.local(), v8_str("g"))
4585 .ToLocalChecked())));
4586 CHECK(g->shared()->is_compiled());
4587 const int kAgingThreshold = 6;
4588 for (int i = 0; i < kAgingThreshold; i++) {
4589 g->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4590 }
4591
4592 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4593 *v8::Local<v8::Function>::Cast(CcTest::global()
4594 ->Get(context.local(), v8_str("f"))
4595 .ToLocalChecked())));
4596 CHECK(f->is_compiled());
4597 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4598 CompileRun("f = null");
4599 }
4600
4601 // Lookup the optimized code and keep it alive.
4602 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4603 isolate->context()->native_context(), BailoutId::None());
4604 Handle<Code> optimized_code(result.code, isolate);
4605
4606 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4607 // though the optimized code for 'f' is reachable via the optimized code map.
4608 heap->CollectAllGarbage();
4609
4610 // Make a new closure that will get code installed from the code map.
4611 // Unoptimized code is missing and the deoptimizer will go ballistic.
4612 CompileRun("var h = mkClosure(); h('bozo');");
4613}
4614
4615
4616TEST(LargeObjectSlotRecording) {
4617 FLAG_manual_evacuation_candidates_selection = true;
4618 CcTest::InitializeVM();
4619 Isolate* isolate = CcTest::i_isolate();
4620 Heap* heap = isolate->heap();
4621 HandleScope scope(isolate);
4622
4623 // Create an object on an evacuation candidate.
4624 SimulateFullSpace(heap->old_space());
4625 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4626 Page* evac_page = Page::FromAddress(lit->address());
4627 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4628 FixedArray* old_location = *lit;
4629
4630 // Allocate a large object.
4631 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB);
4632 CHECK(size > Page::kMaxRegularHeapObjectSize);
4633 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
4634 CHECK(heap->lo_space()->Contains(*lo));
4635
4636 // Start incremental marking to active write barrier.
4637 SimulateIncrementalMarking(heap, false);
4638 heap->incremental_marking()->AdvanceIncrementalMarking(
Ben Murdochda12d292016-06-02 14:46:10 +01004639 10000000, IncrementalMarking::IdleStepActions());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004640
4641 // Create references from the large object to the object on the evacuation
4642 // candidate.
4643 const int kStep = size / 10;
4644 for (int i = 0; i < size; i += kStep) {
4645 lo->set(i, *lit);
4646 CHECK(lo->get(i) == old_location);
4647 }
4648
4649 // Move the evaucation candidate object.
4650 CcTest::heap()->CollectAllGarbage();
4651
4652 // Verify that the pointers in the large object got updated.
4653 for (int i = 0; i < size; i += kStep) {
4654 CHECK_EQ(lo->get(i), *lit);
4655 CHECK(lo->get(i) != old_location);
4656 }
4657}
4658
4659
4660class DummyVisitor : public ObjectVisitor {
4661 public:
4662 void VisitPointers(Object** start, Object** end) override {}
4663};
4664
4665
4666TEST(DeferredHandles) {
4667 CcTest::InitializeVM();
4668 Isolate* isolate = CcTest::i_isolate();
4669 Heap* heap = isolate->heap();
4670 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4671 HandleScopeData* data = isolate->handle_scope_data();
4672 Handle<Object> init(heap->empty_string(), isolate);
4673 while (data->next < data->limit) {
4674 Handle<Object> obj(heap->empty_string(), isolate);
4675 }
4676 // An entire block of handles has been filled.
4677 // Next handle would require a new block.
4678 CHECK(data->next == data->limit);
4679
4680 DeferredHandleScope deferred(isolate);
4681 DummyVisitor visitor;
4682 isolate->handle_scope_implementer()->Iterate(&visitor);
4683 delete deferred.Detach();
4684}
4685
4686
4687TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4688 CcTest::InitializeVM();
4689 v8::HandleScope scope(CcTest::isolate());
4690 CompileRun("function f(n) {"
4691 " var a = new Array(n);"
4692 " for (var i = 0; i < n; i += 100) a[i] = i;"
4693 "};"
4694 "f(10 * 1024 * 1024);");
4695 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4696 if (marking->IsStopped()) {
4697 CcTest::heap()->StartIncrementalMarking();
4698 }
4699 // This big step should be sufficient to mark the whole array.
4700 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4701 CHECK(marking->IsComplete() ||
4702 marking->IsReadyToOverApproximateWeakClosure());
4703}
4704
4705
4706TEST(DisableInlineAllocation) {
4707 i::FLAG_allow_natives_syntax = true;
4708 CcTest::InitializeVM();
4709 v8::HandleScope scope(CcTest::isolate());
4710 CompileRun("function test() {"
4711 " var x = [];"
4712 " for (var i = 0; i < 10; i++) {"
4713 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4714 " }"
4715 "}"
4716 "function run() {"
4717 " %OptimizeFunctionOnNextCall(test);"
4718 " test();"
4719 " %DeoptimizeFunction(test);"
4720 "}");
4721
4722 // Warm-up with inline allocation enabled.
4723 CompileRun("test(); test(); run();");
4724
4725 // Run test with inline allocation disabled.
4726 CcTest::heap()->DisableInlineAllocation();
4727 CompileRun("run()");
4728
4729 // Run test with inline allocation re-enabled.
4730 CcTest::heap()->EnableInlineAllocation();
4731 CompileRun("run()");
4732}
4733
4734
4735static int AllocationSitesCount(Heap* heap) {
4736 int count = 0;
4737 for (Object* site = heap->allocation_sites_list();
4738 !(site->IsUndefined());
4739 site = AllocationSite::cast(site)->weak_next()) {
4740 count++;
4741 }
4742 return count;
4743}
4744
4745
4746TEST(EnsureAllocationSiteDependentCodesProcessed) {
4747 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4748 i::FLAG_allow_natives_syntax = true;
4749 CcTest::InitializeVM();
4750 Isolate* isolate = CcTest::i_isolate();
4751 v8::internal::Heap* heap = CcTest::heap();
4752 GlobalHandles* global_handles = isolate->global_handles();
4753
4754 if (!isolate->use_crankshaft()) return;
4755
4756 // The allocation site at the head of the list is ours.
4757 Handle<AllocationSite> site;
4758 {
4759 LocalContext context;
4760 v8::HandleScope scope(context->GetIsolate());
4761
4762 int count = AllocationSitesCount(heap);
4763 CompileRun("var bar = function() { return (new Array()); };"
4764 "var a = bar();"
4765 "bar();"
4766 "bar();");
4767
4768 // One allocation site should have been created.
4769 int new_count = AllocationSitesCount(heap);
4770 CHECK_EQ(new_count, (count + 1));
4771 site = Handle<AllocationSite>::cast(
4772 global_handles->Create(
4773 AllocationSite::cast(heap->allocation_sites_list())));
4774
4775 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4776
4777 CHECK_EQ(DependentCode::kAllocationSiteTransitionChangedGroup,
4778 site->dependent_code()->group());
4779 CHECK_EQ(1, site->dependent_code()->count());
4780 CHECK(site->dependent_code()->object_at(0)->IsWeakCell());
4781 Code* function_bar = Code::cast(
4782 WeakCell::cast(site->dependent_code()->object_at(0))->value());
4783 Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
4784 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4785 CcTest::global()
4786 ->Get(context.local(), v8_str("bar"))
4787 .ToLocalChecked())));
4788 CHECK_EQ(bar_handle->code(), function_bar);
4789 }
4790
4791 // Now make sure that a gc should get rid of the function, even though we
4792 // still have the allocation site alive.
4793 for (int i = 0; i < 4; i++) {
4794 heap->CollectAllGarbage();
4795 }
4796
4797 // The site still exists because of our global handle, but the code is no
4798 // longer referred to by dependent_code().
4799 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4800 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4801}
4802
4803
4804TEST(CellsInOptimizedCodeAreWeak) {
4805 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4806 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4807 i::FLAG_allow_natives_syntax = true;
4808 CcTest::InitializeVM();
4809 Isolate* isolate = CcTest::i_isolate();
4810 v8::internal::Heap* heap = CcTest::heap();
4811
4812 if (!isolate->use_crankshaft()) return;
4813 HandleScope outer_scope(heap->isolate());
4814 Handle<Code> code;
4815 {
4816 LocalContext context;
4817 HandleScope scope(heap->isolate());
4818
4819 CompileRun(
4820 "bar = (function() {"
4821 " function bar() {"
4822 " return foo(1);"
4823 " };"
4824 " var foo = function(x) { with (x) { return 1 + x; } };"
4825 " %NeverOptimizeFunction(foo);"
4826 " bar(foo);"
4827 " bar(foo);"
4828 " bar(foo);"
4829 " %OptimizeFunctionOnNextCall(bar);"
4830 " bar(foo);"
4831 " return bar;})();");
4832
4833 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4834 *v8::Local<v8::Function>::Cast(CcTest::global()
4835 ->Get(context.local(), v8_str("bar"))
4836 .ToLocalChecked())));
4837 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4838 }
4839
4840 // Now make sure that a gc should get rid of the function
4841 for (int i = 0; i < 4; i++) {
4842 heap->CollectAllGarbage();
4843 }
4844
4845 CHECK(code->marked_for_deoptimization());
4846}
4847
4848
4849TEST(ObjectsInOptimizedCodeAreWeak) {
4850 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4851 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4852 i::FLAG_allow_natives_syntax = true;
4853 CcTest::InitializeVM();
4854 Isolate* isolate = CcTest::i_isolate();
4855 v8::internal::Heap* heap = CcTest::heap();
4856
4857 if (!isolate->use_crankshaft()) return;
4858 HandleScope outer_scope(heap->isolate());
4859 Handle<Code> code;
4860 {
4861 LocalContext context;
4862 HandleScope scope(heap->isolate());
4863
4864 CompileRun(
4865 "function bar() {"
4866 " return foo(1);"
4867 "};"
4868 "function foo(x) { with (x) { return 1 + x; } };"
4869 "%NeverOptimizeFunction(foo);"
4870 "bar();"
4871 "bar();"
4872 "bar();"
4873 "%OptimizeFunctionOnNextCall(bar);"
4874 "bar();");
4875
4876 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4877 *v8::Local<v8::Function>::Cast(CcTest::global()
4878 ->Get(context.local(), v8_str("bar"))
4879 .ToLocalChecked())));
4880 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4881 }
4882
4883 // Now make sure that a gc should get rid of the function
4884 for (int i = 0; i < 4; i++) {
4885 heap->CollectAllGarbage();
4886 }
4887
4888 CHECK(code->marked_for_deoptimization());
4889}
4890
4891
4892TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4893 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4894 if (!i::FLAG_incremental_marking) return;
4895 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4896 i::FLAG_allow_natives_syntax = true;
4897 i::FLAG_compilation_cache = false;
4898 i::FLAG_retain_maps_for_n_gc = 0;
4899 CcTest::InitializeVM();
4900 Isolate* isolate = CcTest::i_isolate();
4901
4902 // Do not run for no-snap builds.
4903 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4904
4905 v8::internal::Heap* heap = CcTest::heap();
4906
4907 // Get a clean slate regarding optimized functions on the heap.
4908 i::Deoptimizer::DeoptimizeAll(isolate);
4909 heap->CollectAllGarbage();
4910
4911 if (!isolate->use_crankshaft()) return;
4912 HandleScope outer_scope(heap->isolate());
4913 for (int i = 0; i < 3; i++) {
4914 SimulateIncrementalMarking(heap);
4915 {
4916 LocalContext context;
4917 HandleScope scope(heap->isolate());
4918 EmbeddedVector<char, 256> source;
4919 SNPrintF(source,
4920 "function bar%d() {"
4921 " return foo%d(1);"
4922 "};"
4923 "function foo%d(x) { with (x) { return 1 + x; } };"
4924 "bar%d();"
4925 "bar%d();"
4926 "bar%d();"
4927 "%%OptimizeFunctionOnNextCall(bar%d);"
4928 "bar%d();",
4929 i, i, i, i, i, i, i, i);
4930 CompileRun(source.start());
4931 }
Ben Murdochda12d292016-06-02 14:46:10 +01004932 // We have to abort incremental marking here to abandon black pages.
4933 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004934 }
4935 int elements = 0;
4936 if (heap->weak_object_to_code_table()->IsHashTable()) {
4937 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4938 elements = t->NumberOfElements();
4939 }
4940 CHECK_EQ(0, elements);
4941}
4942
4943
4944static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4945 const char* name) {
4946 EmbeddedVector<char, 256> source;
4947 SNPrintF(source,
4948 "function %s() { return 0; }"
4949 "%s(); %s();"
4950 "%%OptimizeFunctionOnNextCall(%s);"
4951 "%s();", name, name, name, name, name);
4952 CompileRun(source.start());
4953 i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4954 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4955 CcTest::global()
4956 ->Get(isolate->GetCurrentContext(), v8_str(name))
4957 .ToLocalChecked())));
4958 return fun;
4959}
4960
4961
4962static int GetCodeChainLength(Code* code) {
4963 int result = 0;
4964 while (code->next_code_link()->IsCode()) {
4965 result++;
4966 code = Code::cast(code->next_code_link());
4967 }
4968 return result;
4969}
4970
4971
4972TEST(NextCodeLinkIsWeak) {
4973 i::FLAG_always_opt = false;
4974 i::FLAG_allow_natives_syntax = true;
4975 CcTest::InitializeVM();
4976 Isolate* isolate = CcTest::i_isolate();
4977 v8::internal::Heap* heap = CcTest::heap();
4978
4979 if (!isolate->use_crankshaft()) return;
4980 HandleScope outer_scope(heap->isolate());
4981 Handle<Code> code;
4982 heap->CollectAllAvailableGarbage();
4983 int code_chain_length_before, code_chain_length_after;
4984 {
4985 HandleScope scope(heap->isolate());
4986 Handle<JSFunction> mortal =
4987 OptimizeDummyFunction(CcTest::isolate(), "mortal");
4988 Handle<JSFunction> immortal =
4989 OptimizeDummyFunction(CcTest::isolate(), "immortal");
4990 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4991 code_chain_length_before = GetCodeChainLength(immortal->code());
4992 // Keep the immortal code and let the mortal code die.
4993 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4994 CompileRun("mortal = null; immortal = null;");
4995 }
4996 heap->CollectAllAvailableGarbage();
4997 // Now mortal code should be dead.
4998 code_chain_length_after = GetCodeChainLength(*code);
4999 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
5000}
5001
5002
5003static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
5004 i::byte buffer[i::Assembler::kMinimalBufferSize];
5005 MacroAssembler masm(isolate, buffer, sizeof(buffer),
5006 v8::internal::CodeObjectRequired::kYes);
5007 CodeDesc desc;
5008 masm.Push(isolate->factory()->undefined_value());
5009 masm.Drop(1);
5010 masm.GetCode(&desc);
5011 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
5012 Handle<Code> code = isolate->factory()->NewCode(
5013 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
5014 CHECK(code->IsCode());
5015 return code;
5016}
5017
5018
5019TEST(NextCodeLinkIsWeak2) {
5020 i::FLAG_allow_natives_syntax = true;
5021 CcTest::InitializeVM();
5022 Isolate* isolate = CcTest::i_isolate();
5023 v8::internal::Heap* heap = CcTest::heap();
5024
5025 if (!isolate->use_crankshaft()) return;
5026 HandleScope outer_scope(heap->isolate());
5027 heap->CollectAllAvailableGarbage();
5028 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
5029 Handle<Code> new_head;
5030 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5031 {
5032 HandleScope scope(heap->isolate());
5033 Handle<Code> immortal = DummyOptimizedCode(isolate);
5034 Handle<Code> mortal = DummyOptimizedCode(isolate);
5035 mortal->set_next_code_link(*old_head);
5036 immortal->set_next_code_link(*mortal);
5037 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5038 new_head = scope.CloseAndEscape(immortal);
5039 }
5040 heap->CollectAllAvailableGarbage();
5041 // Now mortal code should be dead.
5042 CHECK_EQ(*old_head, new_head->next_code_link());
5043}
5044
5045
5046static bool weak_ic_cleared = false;
5047
5048static void ClearWeakIC(
5049 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5050 printf("clear weak is called\n");
5051 weak_ic_cleared = true;
5052 data.GetParameter()->Reset();
5053}
5054
5055
5056TEST(WeakFunctionInConstructor) {
5057 if (i::FLAG_always_opt) return;
5058 i::FLAG_stress_compaction = false;
5059 CcTest::InitializeVM();
5060 v8::Isolate* isolate = CcTest::isolate();
5061 LocalContext env;
5062 v8::HandleScope scope(isolate);
5063 CompileRun(
5064 "function createObj(obj) {"
5065 " return new obj();"
5066 "}");
5067 i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
5068 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
5069 CcTest::global()
5070 ->Get(env.local(), v8_str("createObj"))
5071 .ToLocalChecked())));
5072
5073 v8::Persistent<v8::Object> garbage;
5074 {
5075 v8::HandleScope scope(isolate);
5076 const char* source =
5077 " (function() {"
5078 " function hat() { this.x = 5; }"
5079 " createObj(hat);"
5080 " createObj(hat);"
5081 " return hat;"
5082 " })();";
5083 garbage.Reset(isolate, CompileRun(env.local(), source)
5084 .ToLocalChecked()
5085 ->ToObject(env.local())
5086 .ToLocalChecked());
5087 }
5088 weak_ic_cleared = false;
5089 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5090 Heap* heap = CcTest::i_isolate()->heap();
5091 heap->CollectAllGarbage();
5092 CHECK(weak_ic_cleared);
5093
5094 // We've determined the constructor in createObj has had it's weak cell
5095 // cleared. Now, verify that one additional call with a new function
5096 // allows monomorphicity.
5097 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5098 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5099 for (int i = 0; i < 20; i++) {
5100 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5101 CHECK(slot_value->IsWeakCell());
5102 if (WeakCell::cast(slot_value)->cleared()) break;
5103 heap->CollectAllGarbage();
5104 }
5105
5106 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5107 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5108 CompileRun(
5109 "function coat() { this.x = 6; }"
5110 "createObj(coat);");
5111 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5112 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5113}
5114
5115
5116// Checks that the value returned by execution of the source is weak.
5117void CheckWeakness(const char* source) {
5118 i::FLAG_stress_compaction = false;
5119 CcTest::InitializeVM();
5120 v8::Isolate* isolate = CcTest::isolate();
5121 LocalContext env;
5122 v8::HandleScope scope(isolate);
5123 v8::Persistent<v8::Object> garbage;
5124 {
5125 v8::HandleScope scope(isolate);
5126 garbage.Reset(isolate, CompileRun(env.local(), source)
5127 .ToLocalChecked()
5128 ->ToObject(env.local())
5129 .ToLocalChecked());
5130 }
5131 weak_ic_cleared = false;
5132 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5133 Heap* heap = CcTest::i_isolate()->heap();
5134 heap->CollectAllGarbage();
5135 CHECK(weak_ic_cleared);
5136}
5137
5138
5139// Each of the following "weak IC" tests creates an IC that embeds a map with
5140// the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5141TEST(WeakMapInMonomorphicLoadIC) {
5142 CheckWeakness("function loadIC(obj) {"
5143 " return obj.name;"
5144 "}"
5145 " (function() {"
5146 " var proto = {'name' : 'weak'};"
5147 " var obj = Object.create(proto);"
5148 " loadIC(obj);"
5149 " loadIC(obj);"
5150 " loadIC(obj);"
5151 " return proto;"
5152 " })();");
5153}
5154
5155
5156TEST(WeakMapInPolymorphicLoadIC) {
5157 CheckWeakness(
5158 "function loadIC(obj) {"
5159 " return obj.name;"
5160 "}"
5161 " (function() {"
5162 " var proto = {'name' : 'weak'};"
5163 " var obj = Object.create(proto);"
5164 " loadIC(obj);"
5165 " loadIC(obj);"
5166 " loadIC(obj);"
5167 " var poly = Object.create(proto);"
5168 " poly.x = true;"
5169 " loadIC(poly);"
5170 " return proto;"
5171 " })();");
5172}
5173
5174
5175TEST(WeakMapInMonomorphicKeyedLoadIC) {
5176 CheckWeakness("function keyedLoadIC(obj, field) {"
5177 " return obj[field];"
5178 "}"
5179 " (function() {"
5180 " var proto = {'name' : 'weak'};"
5181 " var obj = Object.create(proto);"
5182 " keyedLoadIC(obj, 'name');"
5183 " keyedLoadIC(obj, 'name');"
5184 " keyedLoadIC(obj, 'name');"
5185 " return proto;"
5186 " })();");
5187}
5188
5189
5190TEST(WeakMapInPolymorphicKeyedLoadIC) {
5191 CheckWeakness(
5192 "function keyedLoadIC(obj, field) {"
5193 " return obj[field];"
5194 "}"
5195 " (function() {"
5196 " var proto = {'name' : 'weak'};"
5197 " var obj = Object.create(proto);"
5198 " keyedLoadIC(obj, 'name');"
5199 " keyedLoadIC(obj, 'name');"
5200 " keyedLoadIC(obj, 'name');"
5201 " var poly = Object.create(proto);"
5202 " poly.x = true;"
5203 " keyedLoadIC(poly, 'name');"
5204 " return proto;"
5205 " })();");
5206}
5207
5208
5209TEST(WeakMapInMonomorphicStoreIC) {
5210 CheckWeakness("function storeIC(obj, value) {"
5211 " obj.name = value;"
5212 "}"
5213 " (function() {"
5214 " var proto = {'name' : 'weak'};"
5215 " var obj = Object.create(proto);"
5216 " storeIC(obj, 'x');"
5217 " storeIC(obj, 'x');"
5218 " storeIC(obj, 'x');"
5219 " return proto;"
5220 " })();");
5221}
5222
5223
5224TEST(WeakMapInPolymorphicStoreIC) {
5225 CheckWeakness(
5226 "function storeIC(obj, value) {"
5227 " obj.name = value;"
5228 "}"
5229 " (function() {"
5230 " var proto = {'name' : 'weak'};"
5231 " var obj = Object.create(proto);"
5232 " storeIC(obj, 'x');"
5233 " storeIC(obj, 'x');"
5234 " storeIC(obj, 'x');"
5235 " var poly = Object.create(proto);"
5236 " poly.x = true;"
5237 " storeIC(poly, 'x');"
5238 " return proto;"
5239 " })();");
5240}
5241
5242
5243TEST(WeakMapInMonomorphicKeyedStoreIC) {
5244 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5245 " obj[field] = value;"
5246 "}"
5247 " (function() {"
5248 " var proto = {'name' : 'weak'};"
5249 " var obj = Object.create(proto);"
5250 " keyedStoreIC(obj, 'x');"
5251 " keyedStoreIC(obj, 'x');"
5252 " keyedStoreIC(obj, 'x');"
5253 " return proto;"
5254 " })();");
5255}
5256
5257
5258TEST(WeakMapInPolymorphicKeyedStoreIC) {
5259 CheckWeakness(
5260 "function keyedStoreIC(obj, field, value) {"
5261 " obj[field] = value;"
5262 "}"
5263 " (function() {"
5264 " var proto = {'name' : 'weak'};"
5265 " var obj = Object.create(proto);"
5266 " keyedStoreIC(obj, 'x');"
5267 " keyedStoreIC(obj, 'x');"
5268 " keyedStoreIC(obj, 'x');"
5269 " var poly = Object.create(proto);"
5270 " poly.x = true;"
5271 " keyedStoreIC(poly, 'x');"
5272 " return proto;"
5273 " })();");
5274}
5275
5276
5277TEST(WeakMapInMonomorphicCompareNilIC) {
5278 CheckWeakness("function compareNilIC(obj) {"
5279 " return obj == null;"
5280 "}"
5281 " (function() {"
5282 " var proto = {'name' : 'weak'};"
5283 " var obj = Object.create(proto);"
5284 " compareNilIC(obj);"
5285 " compareNilIC(obj);"
5286 " compareNilIC(obj);"
5287 " return proto;"
5288 " })();");
5289}
5290
5291
5292Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5293 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5294 Handle<Object> obj =
5295 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5296 return Handle<JSFunction>::cast(obj);
5297}
5298
5299
5300void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5301 int slot_index, InlineCacheState state) {
5302 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5303 kind == Code::CALL_IC) {
5304 TypeFeedbackVector* vector = shared->feedback_vector();
5305 FeedbackVectorSlot slot(slot_index);
5306 if (kind == Code::LOAD_IC) {
5307 LoadICNexus nexus(vector, slot);
5308 CHECK_EQ(nexus.StateFromFeedback(), state);
5309 } else if (kind == Code::KEYED_LOAD_IC) {
5310 KeyedLoadICNexus nexus(vector, slot);
5311 CHECK_EQ(nexus.StateFromFeedback(), state);
5312 } else if (kind == Code::CALL_IC) {
5313 CallICNexus nexus(vector, slot);
5314 CHECK_EQ(nexus.StateFromFeedback(), state);
5315 }
5316 } else {
5317 Code* ic = FindFirstIC(code, kind);
5318 CHECK(ic->is_inline_cache_stub());
5319 CHECK(ic->ic_state() == state);
5320 }
5321}
5322
5323
5324TEST(MonomorphicStaysMonomorphicAfterGC) {
5325 if (FLAG_always_opt) return;
5326 CcTest::InitializeVM();
5327 Isolate* isolate = CcTest::i_isolate();
5328 Heap* heap = isolate->heap();
5329 v8::HandleScope scope(CcTest::isolate());
5330 CompileRun(
5331 "function loadIC(obj) {"
5332 " return obj.name;"
5333 "}"
5334 "function testIC() {"
5335 " var proto = {'name' : 'weak'};"
5336 " var obj = Object.create(proto);"
5337 " loadIC(obj);"
5338 " loadIC(obj);"
5339 " loadIC(obj);"
5340 " return proto;"
5341 "};");
5342 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5343 {
5344 v8::HandleScope scope(CcTest::isolate());
5345 CompileRun("(testIC())");
5346 }
5347 heap->CollectAllGarbage();
5348 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5349 {
5350 v8::HandleScope scope(CcTest::isolate());
5351 CompileRun("(testIC())");
5352 }
5353 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5354}
5355
5356
5357TEST(PolymorphicStaysPolymorphicAfterGC) {
5358 if (FLAG_always_opt) return;
5359 CcTest::InitializeVM();
5360 Isolate* isolate = CcTest::i_isolate();
5361 Heap* heap = isolate->heap();
5362 v8::HandleScope scope(CcTest::isolate());
5363 CompileRun(
5364 "function loadIC(obj) {"
5365 " return obj.name;"
5366 "}"
5367 "function testIC() {"
5368 " var proto = {'name' : 'weak'};"
5369 " var obj = Object.create(proto);"
5370 " loadIC(obj);"
5371 " loadIC(obj);"
5372 " loadIC(obj);"
5373 " var poly = Object.create(proto);"
5374 " poly.x = true;"
5375 " loadIC(poly);"
5376 " return proto;"
5377 "};");
5378 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5379 {
5380 v8::HandleScope scope(CcTest::isolate());
5381 CompileRun("(testIC())");
5382 }
5383 heap->CollectAllGarbage();
5384 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5385 {
5386 v8::HandleScope scope(CcTest::isolate());
5387 CompileRun("(testIC())");
5388 }
5389 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5390}
5391
5392
5393TEST(WeakCell) {
5394 CcTest::InitializeVM();
5395 Isolate* isolate = CcTest::i_isolate();
5396 v8::internal::Heap* heap = CcTest::heap();
5397 v8::internal::Factory* factory = isolate->factory();
5398
5399 HandleScope outer_scope(isolate);
5400 Handle<WeakCell> weak_cell1;
5401 {
5402 HandleScope inner_scope(isolate);
5403 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5404 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5405 }
5406
5407 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5408 Handle<WeakCell> weak_cell2;
5409 {
5410 HandleScope inner_scope(isolate);
5411 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5412 }
5413 CHECK(weak_cell1->value()->IsFixedArray());
5414 CHECK_EQ(*survivor, weak_cell2->value());
5415 heap->CollectGarbage(NEW_SPACE);
5416 CHECK(weak_cell1->value()->IsFixedArray());
5417 CHECK_EQ(*survivor, weak_cell2->value());
5418 heap->CollectGarbage(NEW_SPACE);
5419 CHECK(weak_cell1->value()->IsFixedArray());
5420 CHECK_EQ(*survivor, weak_cell2->value());
5421 heap->CollectAllAvailableGarbage();
5422 CHECK(weak_cell1->cleared());
5423 CHECK_EQ(*survivor, weak_cell2->value());
5424}
5425
5426
5427TEST(WeakCellsWithIncrementalMarking) {
5428 CcTest::InitializeVM();
5429 Isolate* isolate = CcTest::i_isolate();
5430 v8::internal::Heap* heap = CcTest::heap();
5431 v8::internal::Factory* factory = isolate->factory();
5432
5433 const int N = 16;
5434 HandleScope outer_scope(isolate);
5435 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5436 Handle<WeakCell> weak_cells[N];
5437
5438 for (int i = 0; i < N; i++) {
5439 HandleScope inner_scope(isolate);
5440 Handle<HeapObject> value =
5441 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5442 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5443 CHECK(weak_cell->value()->IsFixedArray());
5444 IncrementalMarking* marking = heap->incremental_marking();
5445 if (marking->IsStopped()) {
5446 heap->StartIncrementalMarking();
5447 }
5448 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5449 heap->CollectGarbage(NEW_SPACE);
5450 CHECK(weak_cell->value()->IsFixedArray());
5451 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5452 }
Ben Murdochda12d292016-06-02 14:46:10 +01005453 // Call collect all twice to make sure that we also cleared
5454 // weak cells that were allocated on black pages.
5455 heap->CollectAllGarbage();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005456 heap->CollectAllGarbage();
5457 CHECK_EQ(*survivor, weak_cells[0]->value());
5458 for (int i = 1; i < N; i++) {
5459 CHECK(weak_cells[i]->cleared());
5460 }
5461}
5462
5463
5464#ifdef DEBUG
5465TEST(AddInstructionChangesNewSpacePromotion) {
5466 i::FLAG_allow_natives_syntax = true;
5467 i::FLAG_expose_gc = true;
5468 i::FLAG_stress_compaction = true;
5469 i::FLAG_gc_interval = 1000;
5470 CcTest::InitializeVM();
5471 if (!i::FLAG_allocation_site_pretenuring) return;
5472 v8::HandleScope scope(CcTest::isolate());
5473 Isolate* isolate = CcTest::i_isolate();
5474 Heap* heap = isolate->heap();
5475 LocalContext env;
5476 CompileRun(
5477 "function add(a, b) {"
5478 " return a + b;"
5479 "}"
5480 "add(1, 2);"
5481 "add(\"a\", \"b\");"
5482 "var oldSpaceObject;"
5483 "gc();"
5484 "function crash(x) {"
5485 " var object = {a: null, b: null};"
5486 " var result = add(1.5, x | 0);"
5487 " object.a = result;"
5488 " oldSpaceObject = object;"
5489 " return object;"
5490 "}"
5491 "crash(1);"
5492 "crash(1);"
5493 "%OptimizeFunctionOnNextCall(crash);"
5494 "crash(1);");
5495
5496 v8::Local<v8::Object> global = CcTest::global();
5497 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5498 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5499 v8::Local<v8::Value> args1[] = {v8_num(1)};
5500 heap->DisableInlineAllocation();
5501 heap->set_allocation_timeout(1);
5502 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5503 heap->CollectAllGarbage();
5504}
5505
5506
5507void OnFatalErrorExpectOOM(const char* location, const char* message) {
5508 // Exit with 0 if the location matches our expectation.
5509 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5510}
5511
5512
5513TEST(CEntryStubOOM) {
5514 i::FLAG_allow_natives_syntax = true;
5515 CcTest::InitializeVM();
5516 v8::HandleScope scope(CcTest::isolate());
5517 CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
5518
5519 v8::Local<v8::Value> result = CompileRun(
5520 "%SetFlags('--gc-interval=1');"
5521 "var a = [];"
5522 "a.__proto__ = [];"
5523 "a.unshift(1)");
5524
5525 CHECK(result->IsNumber());
5526}
5527
5528#endif // DEBUG
5529
5530
5531static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5532
5533
5534static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5535 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5536}
5537
5538
5539UNINITIALIZED_TEST(Regress538257) {
5540 i::FLAG_manual_evacuation_candidates_selection = true;
5541 v8::Isolate::CreateParams create_params;
5542 // Set heap limits.
5543 create_params.constraints.set_max_semi_space_size(1 * Page::kPageSize / MB);
5544 create_params.constraints.set_max_old_space_size(6 * Page::kPageSize / MB);
5545 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5546 v8::Isolate* isolate = v8::Isolate::New(create_params);
5547 isolate->Enter();
5548 {
5549 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5550 HandleScope handle_scope(i_isolate);
5551 PagedSpace* old_space = i_isolate->heap()->old_space();
5552 const int kMaxObjects = 10000;
5553 const int kFixedArrayLen = 512;
5554 Handle<FixedArray> objects[kMaxObjects];
5555 for (int i = 0; (i < kMaxObjects) && old_space->CanExpand(Page::kPageSize);
5556 i++) {
5557 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5558 Page::FromAddress(objects[i]->address())
5559 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5560 }
5561 SimulateFullSpace(old_space);
5562 i_isolate->heap()->CollectGarbage(OLD_SPACE);
5563 // If we get this far, we've successfully aborted compaction. Any further
5564 // allocations might trigger OOM.
5565 }
5566 isolate->Exit();
5567 isolate->Dispose();
5568}
5569
5570
5571TEST(Regress357137) {
5572 CcTest::InitializeVM();
5573 v8::Isolate* isolate = CcTest::isolate();
5574 v8::HandleScope hscope(isolate);
5575 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5576 global->Set(
5577 v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
5578 .ToLocalChecked(),
5579 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5580 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5581 CHECK(!context.IsEmpty());
5582 v8::Context::Scope cscope(context);
5583
5584 v8::Local<v8::Value> result = CompileRun(
5585 "var locals = '';"
5586 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5587 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5588 "interrupt();" // This triggers a fake stack overflow in f.
5589 "f()()");
5590 CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
5591}
5592
5593
5594TEST(Regress507979) {
5595 const int kFixedArrayLen = 10;
5596 CcTest::InitializeVM();
5597 Isolate* isolate = CcTest::i_isolate();
5598 Heap* heap = isolate->heap();
5599 HandleScope handle_scope(isolate);
5600
5601 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5602 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005603 CHECK(heap->InNewSpace(*o1));
5604 CHECK(heap->InNewSpace(*o2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005605
5606 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5607
5608 // Replace parts of an object placed before a live object with a filler. This
5609 // way the filler object shares the mark bits with the following live object.
5610 o1->Shrink(kFixedArrayLen - 1);
5611
5612 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5613 // Let's not optimize the loop away.
5614 CHECK(obj->address() != nullptr);
5615 }
5616}
5617
5618
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005619UNINITIALIZED_TEST(PromotionQueue) {
5620 i::FLAG_expose_gc = true;
5621 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5622 i::FLAG_min_semi_space_size = i::FLAG_max_semi_space_size;
5623 v8::Isolate::CreateParams create_params;
5624 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5625 v8::Isolate* isolate = v8::Isolate::New(create_params);
5626 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5627 {
5628 v8::Isolate::Scope isolate_scope(isolate);
5629 v8::HandleScope handle_scope(isolate);
5630 v8::Context::New(isolate)->Enter();
5631 Heap* heap = i_isolate->heap();
5632 NewSpace* new_space = heap->new_space();
5633
5634 // In this test we will try to overwrite the promotion queue which is at the
5635 // end of to-space. To actually make that possible, we need at least two
5636 // semi-space pages and take advantage of fragmentation.
5637 // (1) Use a semi-space consisting of two pages.
5638 // (2) Create a few small long living objects and call the scavenger to
5639 // move them to the other semi-space.
5640 // (3) Create a huge object, i.e., remainder of first semi-space page and
5641 // create another huge object which should be of maximum allocatable memory
5642 // size of the second semi-space page.
5643 // (4) Call the scavenger again.
5644 // What will happen is: the scavenger will promote the objects created in
5645 // (2) and will create promotion queue entries at the end of the second
5646 // semi-space page during the next scavenge when it promotes the objects to
5647 // the old generation. The first allocation of (3) will fill up the first
5648 // semi-space page. The second allocation in (3) will not fit into the
5649 // first semi-space page, but it will overwrite the promotion queue which
5650 // are in the second semi-space page. If the right guards are in place, the
5651 // promotion queue will be evacuated in that case.
5652
5653
5654 CHECK(new_space->IsAtMaximumCapacity());
5655 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5656
5657 // Call the scavenger two times to get an empty new space
5658 heap->CollectGarbage(NEW_SPACE);
5659 heap->CollectGarbage(NEW_SPACE);
5660
5661 // First create a few objects which will survive a scavenge, and will get
5662 // promoted to the old generation later on. These objects will create
5663 // promotion queue entries at the end of the second semi-space page.
5664 const int number_handles = 12;
5665 Handle<FixedArray> handles[number_handles];
5666 for (int i = 0; i < number_handles; i++) {
5667 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5668 }
5669
5670 heap->CollectGarbage(NEW_SPACE);
5671 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5672
5673 // Fill-up the first semi-space page.
5674 FillUpOnePage(new_space);
5675
5676 // Create a small object to initialize the bump pointer on the second
5677 // semi-space page.
5678 Handle<FixedArray> small =
5679 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5680 CHECK(heap->InNewSpace(*small));
5681
5682 // Fill-up the second semi-space page.
5683 FillUpOnePage(new_space);
5684
5685 // This scavenge will corrupt memory if the promotion queue is not
5686 // evacuated.
5687 heap->CollectGarbage(NEW_SPACE);
5688 }
5689 isolate->Dispose();
5690}
5691
5692
5693TEST(Regress388880) {
5694 i::FLAG_expose_gc = true;
5695 CcTest::InitializeVM();
5696 v8::HandleScope scope(CcTest::isolate());
5697 Isolate* isolate = CcTest::i_isolate();
5698 Factory* factory = isolate->factory();
5699 Heap* heap = isolate->heap();
5700
5701 Handle<Map> map1 = Map::Create(isolate, 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005702 Handle<String> name = factory->NewStringFromStaticChars("foo");
5703 name = factory->InternalizeString(name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005704 Handle<Map> map2 =
Ben Murdoch097c5b22016-05-18 11:27:45 +01005705 Map::CopyWithField(map1, name, FieldType::Any(isolate), NONE,
5706 Representation::Tagged(), OMIT_TRANSITION)
5707 .ToHandleChecked();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005708
5709 int desired_offset = Page::kPageSize - map1->instance_size();
5710
5711 // Allocate padding objects in old pointer space so, that object allocated
5712 // afterwards would end at the end of the page.
5713 SimulateFullSpace(heap->old_space());
5714 int padding_size = desired_offset - Page::kObjectStartOffset;
5715 CreatePadding(heap, padding_size, TENURED);
5716
5717 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5718 o->set_properties(*factory->empty_fixed_array());
5719
5720 // Ensure that the object allocated where we need it.
5721 Page* page = Page::FromAddress(o->address());
5722 CHECK_EQ(desired_offset, page->Offset(o->address()));
5723
5724 // Now we have an object right at the end of the page.
5725
5726 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5727 // that would cause crash.
5728 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5729 marking->Stop();
5730 CcTest::heap()->StartIncrementalMarking();
5731 CHECK(marking->IsMarking());
5732
5733 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5734 // when it calls heap->AdjustLiveBytes(...).
5735 JSObject::MigrateToMap(o, map2);
5736}
5737
5738
5739TEST(Regress3631) {
5740 i::FLAG_expose_gc = true;
5741 CcTest::InitializeVM();
5742 v8::HandleScope scope(CcTest::isolate());
5743 Isolate* isolate = CcTest::i_isolate();
5744 Heap* heap = isolate->heap();
5745 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5746 v8::Local<v8::Value> result = CompileRun(
5747 "var weak_map = new WeakMap();"
5748 "var future_keys = [];"
5749 "for (var i = 0; i < 50; i++) {"
5750 " var key = {'k' : i + 0.1};"
5751 " weak_map.set(key, 1);"
5752 " future_keys.push({'x' : i + 0.2});"
5753 "}"
5754 "weak_map");
5755 if (marking->IsStopped()) {
5756 CcTest::heap()->StartIncrementalMarking();
5757 }
5758 // Incrementally mark the backing store.
5759 Handle<JSReceiver> obj =
5760 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5761 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5762 while (!Marking::IsBlack(
5763 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5764 !marking->IsStopped()) {
5765 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5766 }
5767 // Stash the backing store in a handle.
5768 Handle<Object> save(weak_map->table(), isolate);
5769 // The following line will update the backing store.
5770 CompileRun(
5771 "for (var i = 0; i < 50; i++) {"
5772 " weak_map.set(future_keys[i], i);"
5773 "}");
5774 heap->incremental_marking()->set_should_hurry(true);
5775 heap->CollectGarbage(OLD_SPACE);
5776}
5777
5778
5779TEST(Regress442710) {
5780 CcTest::InitializeVM();
5781 Isolate* isolate = CcTest::i_isolate();
5782 Heap* heap = isolate->heap();
5783 Factory* factory = isolate->factory();
5784
5785 HandleScope sc(isolate);
5786 Handle<JSGlobalObject> global(
5787 CcTest::i_isolate()->context()->global_object());
5788 Handle<JSArray> array = factory->NewJSArray(2);
5789
5790 Handle<String> name = factory->InternalizeUtf8String("testArray");
5791 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5792 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5793 heap->CollectGarbage(OLD_SPACE);
5794}
5795
5796
5797HEAP_TEST(NumberStringCacheSize) {
5798 // Test that the number-string cache has not been resized in the snapshot.
5799 CcTest::InitializeVM();
5800 Isolate* isolate = CcTest::i_isolate();
5801 if (!isolate->snapshot_available()) return;
5802 Heap* heap = isolate->heap();
5803 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5804 heap->number_string_cache()->length());
5805}
5806
5807
5808TEST(Regress3877) {
5809 CcTest::InitializeVM();
5810 Isolate* isolate = CcTest::i_isolate();
5811 Heap* heap = isolate->heap();
5812 Factory* factory = isolate->factory();
5813 HandleScope scope(isolate);
5814 CompileRun("function cls() { this.x = 10; }");
5815 Handle<WeakCell> weak_prototype;
5816 {
5817 HandleScope inner_scope(isolate);
5818 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5819 Handle<JSReceiver> proto =
5820 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5821 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5822 }
5823 CHECK(!weak_prototype->cleared());
5824 CompileRun(
5825 "var a = { };"
5826 "a.x = new cls();"
5827 "cls.prototype = null;");
5828 for (int i = 0; i < 4; i++) {
5829 heap->CollectAllGarbage();
5830 }
5831 // The map of a.x keeps prototype alive
5832 CHECK(!weak_prototype->cleared());
5833 // Change the map of a.x and make the previous map garbage collectable.
5834 CompileRun("a.x.__proto__ = {};");
5835 for (int i = 0; i < 4; i++) {
5836 heap->CollectAllGarbage();
5837 }
5838 CHECK(weak_prototype->cleared());
5839}
5840
5841
5842Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5843 HandleScope inner_scope(isolate);
5844 Handle<Map> map = Map::Create(isolate, 1);
5845 v8::Local<v8::Value> result =
5846 CompileRun("(function () { return {x : 10}; })();");
5847 Handle<JSReceiver> proto =
5848 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5849 Map::SetPrototype(map, proto);
5850 heap->AddRetainedMap(map);
5851 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5852}
5853
5854
5855void CheckMapRetainingFor(int n) {
5856 FLAG_retain_maps_for_n_gc = n;
5857 Isolate* isolate = CcTest::i_isolate();
5858 Heap* heap = isolate->heap();
5859 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5860 CHECK(!weak_cell->cleared());
5861 for (int i = 0; i < n; i++) {
5862 SimulateIncrementalMarking(heap);
5863 heap->CollectGarbage(OLD_SPACE);
5864 }
5865 CHECK(!weak_cell->cleared());
5866 SimulateIncrementalMarking(heap);
5867 heap->CollectGarbage(OLD_SPACE);
5868 CHECK(weak_cell->cleared());
5869}
5870
5871
5872TEST(MapRetaining) {
5873 CcTest::InitializeVM();
5874 v8::HandleScope scope(CcTest::isolate());
5875 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5876 CheckMapRetainingFor(0);
5877 CheckMapRetainingFor(1);
5878 CheckMapRetainingFor(7);
5879}
5880
5881
5882TEST(RegressArrayListGC) {
5883 FLAG_retain_maps_for_n_gc = 1;
5884 FLAG_incremental_marking = 0;
5885 FLAG_gc_global = true;
5886 CcTest::InitializeVM();
5887 v8::HandleScope scope(CcTest::isolate());
5888 Isolate* isolate = CcTest::i_isolate();
5889 Heap* heap = isolate->heap();
5890 AddRetainedMap(isolate, heap);
5891 Handle<Map> map = Map::Create(isolate, 1);
5892 heap->CollectGarbage(OLD_SPACE);
5893 // Force GC in old space on next addition of retained map.
5894 Map::WeakCellForMap(map);
5895 SimulateFullSpace(CcTest::heap()->new_space());
5896 for (int i = 0; i < 10; i++) {
5897 heap->AddRetainedMap(map);
5898 }
5899 heap->CollectGarbage(OLD_SPACE);
5900}
5901
5902
5903#ifdef DEBUG
5904TEST(PathTracer) {
5905 CcTest::InitializeVM();
5906 v8::HandleScope scope(CcTest::isolate());
5907
5908 v8::Local<v8::Value> result = CompileRun("'abc'");
5909 Handle<Object> o = v8::Utils::OpenHandle(*result);
5910 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5911}
5912#endif // DEBUG
5913
5914
5915TEST(WritableVsImmortalRoots) {
5916 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5917 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5918 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5919 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5920 // A root value can be writable, immortal, or neither, but not both.
5921 CHECK(!immortal || !writable);
5922 }
5923}
5924
5925
5926static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5927 int initial_length,
5928 int elements_to_trim) {
5929 v8::HandleScope scope(CcTest::isolate());
5930 Isolate* isolate = CcTest::i_isolate();
5931 Factory* factory = isolate->factory();
5932 Heap* heap = isolate->heap();
5933
5934 Handle<FixedTypedArrayBase> array =
5935 factory->NewFixedTypedArray(initial_length, type, true);
5936 int old_size = array->size();
5937 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5938 elements_to_trim);
5939
5940 // Check that free space filler is at the right place and did not smash the
5941 // array header.
5942 CHECK(array->IsFixedArrayBase());
5943 CHECK_EQ(initial_length - elements_to_trim, array->length());
5944 int new_size = array->size();
5945 if (new_size != old_size) {
5946 // Free space filler should be created in this case.
5947 Address next_obj_address = array->address() + array->size();
5948 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5949 }
5950 heap->CollectAllAvailableGarbage();
5951}
5952
5953
5954TEST(Regress472513) {
5955 CcTest::InitializeVM();
5956 v8::HandleScope scope(CcTest::isolate());
5957
5958 // The combination of type/initial_length/elements_to_trim triggered
5959 // typed array header smashing with free space filler (crbug/472513).
5960
5961 // 64-bit cases.
5962 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5963 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5964 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5965 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5966 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5967 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5968
5969 // 32-bit cases.
5970 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5971 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5972 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5973 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5974 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5975}
5976
5977
5978TEST(WeakFixedArray) {
5979 CcTest::InitializeVM();
5980 v8::HandleScope scope(CcTest::isolate());
5981
5982 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5983 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5984 array->Remove(number);
5985 array->Compact<WeakFixedArray::NullCallback>();
5986 WeakFixedArray::Add(array, number);
5987}
5988
5989
5990TEST(PreprocessStackTrace) {
5991 // Do not automatically trigger early GC.
5992 FLAG_gc_interval = -1;
5993 CcTest::InitializeVM();
5994 v8::HandleScope scope(CcTest::isolate());
5995 v8::TryCatch try_catch(CcTest::isolate());
5996 CompileRun("throw new Error();");
5997 CHECK(try_catch.HasCaught());
5998 Isolate* isolate = CcTest::i_isolate();
5999 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
6000 Handle<Name> key = isolate->factory()->stack_trace_symbol();
6001 Handle<Object> stack_trace =
Ben Murdochda12d292016-06-02 14:46:10 +01006002 Object::GetProperty(exception, key).ToHandleChecked();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006003 Handle<Object> code =
6004 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6005 CHECK(code->IsCode());
6006
6007 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
6008
6009 Handle<Object> pos =
6010 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6011 CHECK(pos->IsSmi());
6012
6013 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
6014 int array_length = Smi::cast(stack_trace_array->length())->value();
6015 for (int i = 0; i < array_length; i++) {
6016 Handle<Object> element =
6017 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
6018 CHECK(!element->IsCode());
6019 }
6020}
6021
6022
6023static bool utils_has_been_collected = false;
6024
6025static void UtilsHasBeenCollected(
6026 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
6027 utils_has_been_collected = true;
6028 data.GetParameter()->Reset();
6029}
6030
6031
6032TEST(BootstrappingExports) {
6033 // Expose utils object and delete it to observe that it is indeed
6034 // being garbage-collected.
6035 FLAG_expose_natives_as = "utils";
6036 CcTest::InitializeVM();
6037 v8::Isolate* isolate = CcTest::isolate();
6038 LocalContext env;
6039
6040 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
6041
6042 utils_has_been_collected = false;
6043
6044 v8::Persistent<v8::Object> utils;
6045
6046 {
6047 v8::HandleScope scope(isolate);
6048 v8::Local<v8::String> name = v8_str("utils");
6049 utils.Reset(isolate, CcTest::global()
6050 ->Get(env.local(), name)
6051 .ToLocalChecked()
6052 ->ToObject(env.local())
6053 .ToLocalChecked());
6054 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
6055 }
6056
6057 utils.SetWeak(&utils, UtilsHasBeenCollected,
6058 v8::WeakCallbackType::kParameter);
6059
6060 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6061
6062 CHECK(utils_has_been_collected);
6063}
6064
6065
6066TEST(Regress1878) {
6067 FLAG_allow_natives_syntax = true;
6068 CcTest::InitializeVM();
6069 v8::Isolate* isolate = CcTest::isolate();
6070 v8::HandleScope scope(isolate);
6071 v8::Local<v8::Function> constructor = v8::Utils::CallableToLocal(
6072 CcTest::i_isolate()->internal_array_function());
6073 LocalContext env;
6074 CHECK(CcTest::global()
6075 ->Set(env.local(), v8_str("InternalArray"), constructor)
6076 .FromJust());
6077
6078 v8::TryCatch try_catch(isolate);
6079
6080 CompileRun(
6081 "var a = Array();"
6082 "for (var i = 0; i < 1000; i++) {"
6083 " var ai = new InternalArray(10000);"
6084 " if (%HaveSameMap(ai, a)) throw Error();"
6085 " if (!%HasFastObjectElements(ai)) throw Error();"
6086 "}"
6087 "for (var i = 0; i < 1000; i++) {"
6088 " var ai = new InternalArray(10000);"
6089 " if (%HaveSameMap(ai, a)) throw Error();"
6090 " if (!%HasFastObjectElements(ai)) throw Error();"
6091 "}");
6092
6093 CHECK(!try_catch.HasCaught());
6094}
6095
6096
6097void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6098 CHECK(bytes >= FixedArray::kHeaderSize);
6099 CHECK(bytes % kPointerSize == 0);
6100 Factory* factory = isolate->factory();
6101 HandleScope scope(isolate);
6102 AlwaysAllocateScope always_allocate(isolate);
6103 int elements =
6104 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6105 Handle<FixedArray> array = factory->NewFixedArray(
6106 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6107 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6108 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6109}
6110
6111
6112TEST(NewSpaceAllocationCounter) {
6113 CcTest::InitializeVM();
6114 v8::HandleScope scope(CcTest::isolate());
6115 Isolate* isolate = CcTest::i_isolate();
6116 Heap* heap = isolate->heap();
6117 size_t counter1 = heap->NewSpaceAllocationCounter();
6118 heap->CollectGarbage(NEW_SPACE);
6119 const size_t kSize = 1024;
6120 AllocateInSpace(isolate, kSize, NEW_SPACE);
6121 size_t counter2 = heap->NewSpaceAllocationCounter();
6122 CHECK_EQ(kSize, counter2 - counter1);
6123 heap->CollectGarbage(NEW_SPACE);
6124 size_t counter3 = heap->NewSpaceAllocationCounter();
6125 CHECK_EQ(0U, counter3 - counter2);
6126 // Test counter overflow.
6127 size_t max_counter = -1;
6128 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6129 size_t start = heap->NewSpaceAllocationCounter();
6130 for (int i = 0; i < 20; i++) {
6131 AllocateInSpace(isolate, kSize, NEW_SPACE);
6132 size_t counter = heap->NewSpaceAllocationCounter();
6133 CHECK_EQ(kSize, counter - start);
6134 start = counter;
6135 }
6136}
6137
6138
6139TEST(OldSpaceAllocationCounter) {
6140 CcTest::InitializeVM();
6141 v8::HandleScope scope(CcTest::isolate());
6142 Isolate* isolate = CcTest::i_isolate();
6143 Heap* heap = isolate->heap();
6144 size_t counter1 = heap->OldGenerationAllocationCounter();
6145 heap->CollectGarbage(NEW_SPACE);
6146 heap->CollectGarbage(NEW_SPACE);
6147 const size_t kSize = 1024;
6148 AllocateInSpace(isolate, kSize, OLD_SPACE);
6149 size_t counter2 = heap->OldGenerationAllocationCounter();
6150 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6151 CHECK_LE(kSize, counter2 - counter1);
6152 heap->CollectGarbage(NEW_SPACE);
6153 size_t counter3 = heap->OldGenerationAllocationCounter();
6154 CHECK_EQ(0u, counter3 - counter2);
6155 AllocateInSpace(isolate, kSize, OLD_SPACE);
6156 heap->CollectGarbage(OLD_SPACE);
6157 size_t counter4 = heap->OldGenerationAllocationCounter();
6158 CHECK_LE(kSize, counter4 - counter3);
6159 // Test counter overflow.
6160 size_t max_counter = -1;
6161 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6162 size_t start = heap->OldGenerationAllocationCounter();
6163 for (int i = 0; i < 20; i++) {
6164 AllocateInSpace(isolate, kSize, OLD_SPACE);
6165 size_t counter = heap->OldGenerationAllocationCounter();
6166 CHECK_LE(kSize, counter - start);
6167 start = counter;
6168 }
6169}
6170
6171
6172TEST(NewSpaceAllocationThroughput) {
6173 CcTest::InitializeVM();
6174 v8::HandleScope scope(CcTest::isolate());
6175 Isolate* isolate = CcTest::i_isolate();
6176 Heap* heap = isolate->heap();
6177 GCTracer* tracer = heap->tracer();
6178 int time1 = 100;
6179 size_t counter1 = 1000;
6180 tracer->SampleAllocation(time1, counter1, 0);
6181 int time2 = 200;
6182 size_t counter2 = 2000;
6183 tracer->SampleAllocation(time2, counter2, 0);
6184 size_t throughput =
6185 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6186 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6187 int time3 = 1000;
6188 size_t counter3 = 30000;
6189 tracer->SampleAllocation(time3, counter3, 0);
6190 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6191 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6192}
6193
6194
6195TEST(NewSpaceAllocationThroughput2) {
6196 CcTest::InitializeVM();
6197 v8::HandleScope scope(CcTest::isolate());
6198 Isolate* isolate = CcTest::i_isolate();
6199 Heap* heap = isolate->heap();
6200 GCTracer* tracer = heap->tracer();
6201 int time1 = 100;
6202 size_t counter1 = 1000;
6203 tracer->SampleAllocation(time1, counter1, 0);
6204 int time2 = 200;
6205 size_t counter2 = 2000;
6206 tracer->SampleAllocation(time2, counter2, 0);
6207 size_t throughput =
6208 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6209 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6210 int time3 = 1000;
6211 size_t counter3 = 30000;
6212 tracer->SampleAllocation(time3, counter3, 0);
6213 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6214 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6215}
6216
6217
6218static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6219 Isolate* isolate = CcTest::i_isolate();
6220 Object* message =
6221 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6222 CHECK(message->IsTheHole());
6223}
6224
6225
6226TEST(MessageObjectLeak) {
6227 CcTest::InitializeVM();
6228 v8::Isolate* isolate = CcTest::isolate();
6229 v8::HandleScope scope(isolate);
6230 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6231 global->Set(
6232 v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
6233 .ToLocalChecked(),
6234 v8::FunctionTemplate::New(isolate, CheckLeak));
6235 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6236 v8::Context::Scope cscope(context);
6237
6238 const char* test =
6239 "try {"
6240 " throw 'message 1';"
6241 "} catch (e) {"
6242 "}"
6243 "check();"
6244 "L: try {"
6245 " throw 'message 2';"
6246 "} finally {"
6247 " break L;"
6248 "}"
6249 "check();";
6250 CompileRun(test);
6251
6252 const char* flag = "--turbo-filter=*";
6253 FlagList::SetFlagsFromString(flag, StrLength(flag));
6254 FLAG_always_opt = true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006255
6256 CompileRun(test);
6257}
6258
6259
6260static void CheckEqualSharedFunctionInfos(
6261 const v8::FunctionCallbackInfo<v8::Value>& args) {
6262 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6263 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6264 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6265 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6266 CHECK(fun1->shared() == fun2->shared());
6267}
6268
6269
6270static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6271 Isolate* isolate = CcTest::i_isolate();
6272 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6273 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6274 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6275 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
Ben Murdochda12d292016-06-02 14:46:10 +01006276 fun->shared()->ClearBytecodeArray(); // Bytecode is code too.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006277 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6278}
6279
6280
6281TEST(CanonicalSharedFunctionInfo) {
6282 CcTest::InitializeVM();
6283 v8::Isolate* isolate = CcTest::isolate();
6284 v8::HandleScope scope(isolate);
6285 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6286 global->Set(isolate, "check", v8::FunctionTemplate::New(
6287 isolate, CheckEqualSharedFunctionInfos));
6288 global->Set(isolate, "remove",
6289 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6290 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6291 v8::Context::Scope cscope(context);
6292 CompileRun(
6293 "function f() { return function g() {}; }"
6294 "var g1 = f();"
6295 "remove(f);"
6296 "var g2 = f();"
6297 "check(g1, g2);");
6298
6299 CompileRun(
6300 "function f() { return (function() { return function g() {}; })(); }"
6301 "var g1 = f();"
6302 "remove(f);"
6303 "var g2 = f();"
6304 "check(g1, g2);");
6305}
6306
6307
6308TEST(OldGenerationAllocationThroughput) {
6309 CcTest::InitializeVM();
6310 v8::HandleScope scope(CcTest::isolate());
6311 Isolate* isolate = CcTest::i_isolate();
6312 Heap* heap = isolate->heap();
6313 GCTracer* tracer = heap->tracer();
6314 int time1 = 100;
6315 size_t counter1 = 1000;
6316 tracer->SampleAllocation(time1, 0, counter1);
6317 int time2 = 200;
6318 size_t counter2 = 2000;
6319 tracer->SampleAllocation(time2, 0, counter2);
Ben Murdochda12d292016-06-02 14:46:10 +01006320 size_t throughput = static_cast<size_t>(
6321 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006322 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6323 int time3 = 1000;
6324 size_t counter3 = 30000;
6325 tracer->SampleAllocation(time3, 0, counter3);
Ben Murdochda12d292016-06-02 14:46:10 +01006326 throughput = static_cast<size_t>(
6327 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006328 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6329}
6330
6331
6332TEST(AllocationThroughput) {
6333 CcTest::InitializeVM();
6334 v8::HandleScope scope(CcTest::isolate());
6335 Isolate* isolate = CcTest::i_isolate();
6336 Heap* heap = isolate->heap();
6337 GCTracer* tracer = heap->tracer();
6338 int time1 = 100;
6339 size_t counter1 = 1000;
6340 tracer->SampleAllocation(time1, counter1, counter1);
6341 int time2 = 200;
6342 size_t counter2 = 2000;
6343 tracer->SampleAllocation(time2, counter2, counter2);
Ben Murdochda12d292016-06-02 14:46:10 +01006344 size_t throughput = static_cast<size_t>(
6345 tracer->AllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006346 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6347 int time3 = 1000;
6348 size_t counter3 = 30000;
6349 tracer->SampleAllocation(time3, counter3, counter3);
6350 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6351 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6352}
6353
6354
6355TEST(ContextMeasure) {
6356 CcTest::InitializeVM();
6357 v8::HandleScope scope(CcTest::isolate());
6358 Isolate* isolate = CcTest::i_isolate();
6359 LocalContext context;
6360
6361 int size_upper_limit = 0;
6362 int count_upper_limit = 0;
6363 HeapIterator it(CcTest::heap());
6364 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6365 size_upper_limit += obj->Size();
6366 count_upper_limit++;
6367 }
6368
6369 ContextMeasure measure(*isolate->native_context());
6370
6371 PrintF("Context size : %d bytes\n", measure.Size());
6372 PrintF("Context object count: %d\n", measure.Count());
6373
6374 CHECK_LE(1000, measure.Count());
6375 CHECK_LE(50000, measure.Size());
6376
6377 CHECK_LE(measure.Count(), count_upper_limit);
6378 CHECK_LE(measure.Size(), size_upper_limit);
6379}
6380
6381
6382TEST(ScriptIterator) {
6383 CcTest::InitializeVM();
6384 v8::HandleScope scope(CcTest::isolate());
6385 Isolate* isolate = CcTest::i_isolate();
6386 Heap* heap = CcTest::heap();
6387 LocalContext context;
6388
6389 heap->CollectAllGarbage();
6390
6391 int script_count = 0;
6392 {
6393 HeapIterator it(heap);
6394 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6395 if (obj->IsScript()) script_count++;
6396 }
6397 }
6398
6399 {
6400 Script::Iterator iterator(isolate);
6401 while (iterator.Next()) script_count--;
6402 }
6403
6404 CHECK_EQ(0, script_count);
6405}
6406
6407
6408TEST(SharedFunctionInfoIterator) {
6409 CcTest::InitializeVM();
6410 v8::HandleScope scope(CcTest::isolate());
6411 Isolate* isolate = CcTest::i_isolate();
6412 Heap* heap = CcTest::heap();
6413 LocalContext context;
6414
6415 heap->CollectAllGarbage();
6416 heap->CollectAllGarbage();
6417
6418 int sfi_count = 0;
6419 {
6420 HeapIterator it(heap);
6421 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6422 if (!obj->IsSharedFunctionInfo()) continue;
6423 sfi_count++;
6424 }
6425 }
6426
6427 {
6428 SharedFunctionInfo::Iterator iterator(isolate);
6429 while (iterator.Next()) sfi_count--;
6430 }
6431
6432 CHECK_EQ(0, sfi_count);
6433}
6434
6435
6436template <typename T>
6437static UniqueId MakeUniqueId(const Persistent<T>& p) {
6438 return UniqueId(reinterpret_cast<uintptr_t>(*v8::Utils::OpenPersistent(p)));
6439}
6440
6441
6442TEST(Regress519319) {
6443 CcTest::InitializeVM();
6444 v8::Isolate* isolate = CcTest::isolate();
6445 v8::HandleScope scope(isolate);
6446 Heap* heap = CcTest::heap();
6447 LocalContext context;
6448
6449 v8::Persistent<Value> parent;
6450 v8::Persistent<Value> child;
6451
6452 parent.Reset(isolate, v8::Object::New(isolate));
6453 child.Reset(isolate, v8::Object::New(isolate));
6454
6455 SimulateFullSpace(heap->old_space());
6456 heap->CollectGarbage(OLD_SPACE);
6457 {
6458 UniqueId id = MakeUniqueId(parent);
6459 isolate->SetObjectGroupId(parent, id);
6460 isolate->SetReferenceFromGroup(id, child);
6461 }
6462 // The CollectGarbage call above starts sweeper threads.
6463 // The crash will happen if the following two functions
6464 // are called before sweeping finishes.
6465 heap->StartIncrementalMarking();
6466 heap->FinalizeIncrementalMarkingIfComplete("test");
6467}
6468
6469
6470HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6471 CcTest::InitializeVM();
6472 v8::HandleScope scope(CcTest::isolate());
6473 Heap* heap = CcTest::heap();
6474 Isolate* isolate = CcTest::i_isolate();
6475 MemoryReducer* memory_reducer = heap->memory_reducer_;
6476 memory_reducer->SampleAndGetJsCallsPerMs(0);
6477 isolate->IncrementJsCallsFromApiCounter();
6478 isolate->IncrementJsCallsFromApiCounter();
6479 isolate->IncrementJsCallsFromApiCounter();
6480 double calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(1);
6481 CheckDoubleEquals(3, calls_per_ms);
6482
6483 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(2);
6484 CheckDoubleEquals(0, calls_per_ms);
6485
6486 isolate->IncrementJsCallsFromApiCounter();
6487 isolate->IncrementJsCallsFromApiCounter();
6488 isolate->IncrementJsCallsFromApiCounter();
6489 isolate->IncrementJsCallsFromApiCounter();
6490 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(4);
6491 CheckDoubleEquals(2, calls_per_ms);
6492}
6493
Ben Murdoch097c5b22016-05-18 11:27:45 +01006494HEAP_TEST(Regress587004) {
6495 FLAG_concurrent_sweeping = false;
6496#ifdef VERIFY_HEAP
6497 FLAG_verify_heap = false;
6498#endif
6499 CcTest::InitializeVM();
6500 v8::HandleScope scope(CcTest::isolate());
6501 Heap* heap = CcTest::heap();
6502 Isolate* isolate = CcTest::i_isolate();
6503 Factory* factory = isolate->factory();
6504 const int N = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
6505 kPointerSize;
6506 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
6507 CHECK(heap->old_space()->Contains(*array));
6508 Handle<Object> number = factory->NewHeapNumber(1.0);
6509 CHECK(heap->InNewSpace(*number));
6510 for (int i = 0; i < N; i++) {
6511 array->set(i, *number);
6512 }
6513 heap->CollectGarbage(OLD_SPACE);
6514 SimulateFullSpace(heap->old_space());
6515 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
6516 heap->mark_compact_collector()->EnsureSweepingCompleted();
6517 ByteArray* byte_array;
6518 const int M = 256;
6519 // Don't allow old space expansion. The test works without this flag too,
6520 // but becomes very slow.
6521 heap->set_force_oom(true);
6522 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
6523 for (int j = 0; j < M; j++) {
6524 byte_array->set(j, 0x31);
6525 }
6526 }
6527 // Re-enable old space expansion to avoid OOM crash.
6528 heap->set_force_oom(false);
6529 heap->CollectGarbage(NEW_SPACE);
6530}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006531
Ben Murdochda12d292016-06-02 14:46:10 +01006532HEAP_TEST(Regress589413) {
6533 FLAG_stress_compaction = true;
6534 FLAG_manual_evacuation_candidates_selection = true;
6535 FLAG_parallel_compaction = false;
6536 FLAG_concurrent_sweeping = false;
6537 CcTest::InitializeVM();
6538 v8::HandleScope scope(CcTest::isolate());
6539 Heap* heap = CcTest::heap();
6540 // Get the heap in clean state.
6541 heap->CollectGarbage(OLD_SPACE);
6542 heap->CollectGarbage(OLD_SPACE);
6543 Isolate* isolate = CcTest::i_isolate();
6544 Factory* factory = isolate->factory();
6545 // Fill the new space with byte arrays with elements looking like pointers.
6546 const int M = 256;
6547 ByteArray* byte_array;
6548 while (heap->AllocateByteArray(M).To(&byte_array)) {
6549 for (int j = 0; j < M; j++) {
6550 byte_array->set(j, 0x31);
6551 }
6552 // Add the array in root set.
6553 handle(byte_array);
6554 }
6555 // Make sure the byte arrays will be promoted on the next GC.
6556 heap->CollectGarbage(NEW_SPACE);
6557 // This number is close to large free list category threshold.
6558 const int N = 0x3eee;
6559 {
6560 std::vector<FixedArray*> arrays;
6561 std::set<Page*> pages;
6562 FixedArray* array;
6563 // Fill all pages with fixed arrays.
6564 heap->set_force_oom(true);
6565 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6566 arrays.push_back(array);
6567 pages.insert(Page::FromAddress(array->address()));
6568 // Add the array in root set.
6569 handle(array);
6570 }
6571 // Expand and full one complete page with fixed arrays.
6572 heap->set_force_oom(false);
6573 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6574 arrays.push_back(array);
6575 pages.insert(Page::FromAddress(array->address()));
6576 // Add the array in root set.
6577 handle(array);
6578 // Do not expand anymore.
6579 heap->set_force_oom(true);
6580 }
6581 // Expand and mark the new page as evacuation candidate.
6582 heap->set_force_oom(false);
6583 {
6584 AlwaysAllocateScope always_allocate(isolate);
6585 Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
6586 Page* ec_page = Page::FromAddress(ec_obj->address());
6587 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
6588 // Make all arrays point to evacuation candidate so that
6589 // slots are recorded for them.
6590 for (size_t j = 0; j < arrays.size(); j++) {
6591 array = arrays[j];
6592 for (int i = 0; i < N; i++) {
6593 array->set(i, *ec_obj);
6594 }
6595 }
6596 }
6597 SimulateIncrementalMarking(heap);
6598 for (size_t j = 0; j < arrays.size(); j++) {
6599 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
6600 }
6601 }
6602 // Force allocation from the free list.
6603 heap->set_force_oom(true);
6604 heap->CollectGarbage(OLD_SPACE);
6605}
6606
6607TEST(Regress609761) {
6608 CcTest::InitializeVM();
6609 v8::HandleScope scope(CcTest::isolate());
6610 Heap* heap = CcTest::heap();
6611 Isolate* isolate = heap->isolate();
6612
6613 intptr_t size_before = heap->SizeOfObjects();
6614 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
6615 array->Shrink(1);
6616 intptr_t size_after = heap->SizeOfObjects();
6617 CHECK_EQ(size_after, size_before + array->Size());
6618}
6619
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006620} // namespace internal
6621} // namespace v8