blob: 88aee8adf89e4794ded2445a3261d83a01ca650f [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include <stdlib.h>
29#include <utility>
30
31#include "src/compilation-cache.h"
32#include "src/context-measure.h"
33#include "src/deoptimizer.h"
34#include "src/execution.h"
35#include "src/factory.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010036#include "src/field-type.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037#include "src/global-handles.h"
38#include "src/heap/gc-tracer.h"
39#include "src/heap/memory-reducer.h"
40#include "src/ic/ic.h"
41#include "src/macro-assembler.h"
42#include "src/regexp/jsregexp.h"
43#include "src/snapshot/snapshot.h"
44#include "test/cctest/cctest.h"
45#include "test/cctest/heap/heap-tester.h"
46#include "test/cctest/heap/utils-inl.h"
47#include "test/cctest/test-feedback-vector.h"
48
49
50namespace v8 {
51namespace internal {
52
53static void CheckMap(Map* map, int type, int instance_size) {
54 CHECK(map->IsHeapObject());
55#ifdef DEBUG
56 CHECK(CcTest::heap()->Contains(map));
57#endif
58 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
59 CHECK_EQ(type, map->instance_type());
60 CHECK_EQ(instance_size, map->instance_size());
61}
62
63
64TEST(HeapMaps) {
65 CcTest::InitializeVM();
66 Heap* heap = CcTest::heap();
67 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
68 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
69#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
70 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
71 SIMD128_TYPES(SIMD128_TYPE)
72#undef SIMD128_TYPE
73 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
74 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
75}
76
77
78static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
79 CHECK(obj->IsOddball());
80 Handle<Object> handle(obj, isolate);
81 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
82 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
83}
84
85
86static void CheckSmi(Isolate* isolate, int value, const char* string) {
87 Handle<Object> handle(Smi::FromInt(value), isolate);
88 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
89 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
90}
91
92
93static void CheckNumber(Isolate* isolate, double value, const char* string) {
94 Handle<Object> number = isolate->factory()->NewNumber(value);
95 CHECK(number->IsNumber());
96 Handle<Object> print_string =
97 Object::ToString(isolate, number).ToHandleChecked();
98 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
99}
100
101
102static void CheckFindCodeObject(Isolate* isolate) {
103 // Test FindCodeObject
104#define __ assm.
105
106 Assembler assm(isolate, NULL, 0);
107
108 __ nop(); // supported on all architectures
109
110 CodeDesc desc;
111 assm.GetCode(&desc);
112 Handle<Code> code = isolate->factory()->NewCode(
113 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
114 CHECK(code->IsCode());
115
116 HeapObject* obj = HeapObject::cast(*code);
117 Address obj_addr = obj->address();
118
119 for (int i = 0; i < obj->Size(); i += kPointerSize) {
120 Object* found = isolate->FindCodeObject(obj_addr + i);
121 CHECK_EQ(*code, found);
122 }
123
124 Handle<Code> copy = isolate->factory()->NewCode(
125 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
126 HeapObject* obj_copy = HeapObject::cast(*copy);
127 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
128 obj_copy->Size() / 2);
129 CHECK(not_right != *code);
130}
131
132
133TEST(HandleNull) {
134 CcTest::InitializeVM();
135 Isolate* isolate = CcTest::i_isolate();
136 HandleScope outer_scope(isolate);
137 LocalContext context;
138 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
139 CHECK(!n.is_null());
140}
141
142
143TEST(HeapObjects) {
144 CcTest::InitializeVM();
145 Isolate* isolate = CcTest::i_isolate();
146 Factory* factory = isolate->factory();
147 Heap* heap = isolate->heap();
148
149 HandleScope sc(isolate);
150 Handle<Object> value = factory->NewNumber(1.000123);
151 CHECK(value->IsHeapNumber());
152 CHECK(value->IsNumber());
153 CHECK_EQ(1.000123, value->Number());
154
155 value = factory->NewNumber(1.0);
156 CHECK(value->IsSmi());
157 CHECK(value->IsNumber());
158 CHECK_EQ(1.0, value->Number());
159
160 value = factory->NewNumberFromInt(1024);
161 CHECK(value->IsSmi());
162 CHECK(value->IsNumber());
163 CHECK_EQ(1024.0, value->Number());
164
165 value = factory->NewNumberFromInt(Smi::kMinValue);
166 CHECK(value->IsSmi());
167 CHECK(value->IsNumber());
168 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
169
170 value = factory->NewNumberFromInt(Smi::kMaxValue);
171 CHECK(value->IsSmi());
172 CHECK(value->IsNumber());
173 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
174
175#if !defined(V8_TARGET_ARCH_64_BIT)
176 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
177 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
178 CHECK(value->IsHeapNumber());
179 CHECK(value->IsNumber());
180 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
181#endif
182
183 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
184 CHECK(value->IsHeapNumber());
185 CHECK(value->IsNumber());
186 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
187 value->Number());
188
189 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
190 CHECK(value->IsHeapNumber());
191 CHECK(value->IsNumber());
192 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
193 value->Number());
194
195 // nan oddball checks
196 CHECK(factory->nan_value()->IsNumber());
197 CHECK(std::isnan(factory->nan_value()->Number()));
198
199 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
200 CHECK(s->IsString());
201 CHECK_EQ(10, s->length());
202
203 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
204 Handle<JSGlobalObject> global(
205 CcTest::i_isolate()->context()->global_object());
206 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
207
208 // Check ToString for oddballs
209 CheckOddball(isolate, heap->true_value(), "true");
210 CheckOddball(isolate, heap->false_value(), "false");
211 CheckOddball(isolate, heap->null_value(), "null");
212 CheckOddball(isolate, heap->undefined_value(), "undefined");
213
214 // Check ToString for Smis
215 CheckSmi(isolate, 0, "0");
216 CheckSmi(isolate, 42, "42");
217 CheckSmi(isolate, -42, "-42");
218
219 // Check ToString for Numbers
220 CheckNumber(isolate, 1.1, "1.1");
221
222 CheckFindCodeObject(isolate);
223}
224
225
226template <typename T, typename LANE_TYPE, int LANES>
227static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
228 LANE_TYPE other_value) {
229 // Check against lane_values, and check that all lanes can be set to
230 // other_value without disturbing the other lanes.
231 for (int i = 0; i < LANES; i++) {
232 CHECK_EQ(lane_values[i], value->get_lane(i));
233 }
234 for (int i = 0; i < LANES; i++) {
235 value->set_lane(i, other_value); // change the value
236 for (int j = 0; j < LANES; j++) {
237 if (i != j)
238 CHECK_EQ(lane_values[j], value->get_lane(j));
239 else
240 CHECK_EQ(other_value, value->get_lane(j));
241 }
242 value->set_lane(i, lane_values[i]); // restore the lane
243 }
244 CHECK(value->BooleanValue()); // SIMD values are 'true'.
245}
246
247
248TEST(SimdObjects) {
249 CcTest::InitializeVM();
250 Isolate* isolate = CcTest::i_isolate();
251 Factory* factory = isolate->factory();
252
253 HandleScope sc(isolate);
254
255 // Float32x4
256 {
257 float lanes[4] = {1, 2, 3, 4};
258 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
259 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
260
261 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
262 CHECK(value->IsFloat32x4());
263 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
264
265 // Check special lane values.
266 value->set_lane(1, -0.0);
267 CHECK_EQ(-0.0f, value->get_lane(1));
268 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
269 value->set_lane(2, quiet_NaN);
270 CHECK(std::isnan(value->get_lane(2)));
271 value->set_lane(3, signaling_NaN);
272 CHECK(std::isnan(value->get_lane(3)));
273
274#ifdef OBJECT_PRINT
275 // Check value printing.
276 {
277 value = factory->NewFloat32x4(lanes);
278 std::ostringstream os;
279 value->Float32x4Print(os);
280 CHECK_EQ("1, 2, 3, 4", os.str());
281 }
282 {
283 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
284 value = factory->NewFloat32x4(special_lanes);
285 std::ostringstream os;
286 value->Float32x4Print(os);
287 // Value printing doesn't preserve signed zeroes.
288 CHECK_EQ("0, 0, NaN, NaN", os.str());
289 }
290#endif // OBJECT_PRINT
291 }
292 // Int32x4
293 {
294 int32_t lanes[4] = {1, 2, 3, 4};
295
296 Handle<Int32x4> value = factory->NewInt32x4(lanes);
297 CHECK(value->IsInt32x4());
298 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
299
300#ifdef OBJECT_PRINT
301 std::ostringstream os;
302 value->Int32x4Print(os);
303 CHECK_EQ("1, 2, 3, 4", os.str());
304#endif // OBJECT_PRINT
305 }
306 // Uint32x4
307 {
308 uint32_t lanes[4] = {1, 2, 3, 4};
309
310 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
311 CHECK(value->IsUint32x4());
312 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
313
314#ifdef OBJECT_PRINT
315 std::ostringstream os;
316 value->Uint32x4Print(os);
317 CHECK_EQ("1, 2, 3, 4", os.str());
318#endif // OBJECT_PRINT
319 }
320 // Bool32x4
321 {
322 bool lanes[4] = {true, false, true, false};
323
324 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
325 CHECK(value->IsBool32x4());
326 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
327
328#ifdef OBJECT_PRINT
329 std::ostringstream os;
330 value->Bool32x4Print(os);
331 CHECK_EQ("true, false, true, false", os.str());
332#endif // OBJECT_PRINT
333 }
334 // Int16x8
335 {
336 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
337
338 Handle<Int16x8> value = factory->NewInt16x8(lanes);
339 CHECK(value->IsInt16x8());
340 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
341
342#ifdef OBJECT_PRINT
343 std::ostringstream os;
344 value->Int16x8Print(os);
345 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
346#endif // OBJECT_PRINT
347 }
348 // Uint16x8
349 {
350 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
351
352 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
353 CHECK(value->IsUint16x8());
354 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
355
356#ifdef OBJECT_PRINT
357 std::ostringstream os;
358 value->Uint16x8Print(os);
359 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
360#endif // OBJECT_PRINT
361 }
362 // Bool16x8
363 {
364 bool lanes[8] = {true, false, true, false, true, false, true, false};
365
366 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
367 CHECK(value->IsBool16x8());
368 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
369
370#ifdef OBJECT_PRINT
371 std::ostringstream os;
372 value->Bool16x8Print(os);
373 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
374#endif // OBJECT_PRINT
375 }
376 // Int8x16
377 {
378 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
379
380 Handle<Int8x16> value = factory->NewInt8x16(lanes);
381 CHECK(value->IsInt8x16());
382 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
383
384#ifdef OBJECT_PRINT
385 std::ostringstream os;
386 value->Int8x16Print(os);
387 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
388#endif // OBJECT_PRINT
389 }
390 // Uint8x16
391 {
392 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
393
394 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
395 CHECK(value->IsUint8x16());
396 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
397
398#ifdef OBJECT_PRINT
399 std::ostringstream os;
400 value->Uint8x16Print(os);
401 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
402#endif // OBJECT_PRINT
403 }
404 // Bool8x16
405 {
406 bool lanes[16] = {true, false, true, false, true, false, true, false,
407 true, false, true, false, true, false, true, false};
408
409 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
410 CHECK(value->IsBool8x16());
411 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
412
413#ifdef OBJECT_PRINT
414 std::ostringstream os;
415 value->Bool8x16Print(os);
416 CHECK_EQ(
417 "true, false, true, false, true, false, true, false, true, false, "
418 "true, false, true, false, true, false",
419 os.str());
420#endif // OBJECT_PRINT
421 }
422}
423
424
425TEST(Tagging) {
426 CcTest::InitializeVM();
427 int request = 24;
428 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
429 CHECK(Smi::FromInt(42)->IsSmi());
430 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
431 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
432}
433
434
435TEST(GarbageCollection) {
436 CcTest::InitializeVM();
437 Isolate* isolate = CcTest::i_isolate();
438 Heap* heap = isolate->heap();
439 Factory* factory = isolate->factory();
440
441 HandleScope sc(isolate);
442 // Check GC.
443 heap->CollectGarbage(NEW_SPACE);
444
445 Handle<JSGlobalObject> global(
446 CcTest::i_isolate()->context()->global_object());
447 Handle<String> name = factory->InternalizeUtf8String("theFunction");
448 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
449 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
450 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
451 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
452 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
453
454 {
455 HandleScope inner_scope(isolate);
456 // Allocate a function and keep it in global object's property.
457 Handle<JSFunction> function = factory->NewFunction(name);
458 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
459 // Allocate an object. Unrooted after leaving the scope.
460 Handle<JSObject> obj = factory->NewJSObject(function);
461 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
462 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
463
464 CHECK_EQ(Smi::FromInt(23),
465 *Object::GetProperty(obj, prop_name).ToHandleChecked());
466 CHECK_EQ(Smi::FromInt(24),
467 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
468 }
469
470 heap->CollectGarbage(NEW_SPACE);
471
472 // Function should be alive.
473 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
474 // Check function is retained.
475 Handle<Object> func_value =
476 Object::GetProperty(global, name).ToHandleChecked();
477 CHECK(func_value->IsJSFunction());
478 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
479
480 {
481 HandleScope inner_scope(isolate);
482 // Allocate another object, make it reachable from global.
483 Handle<JSObject> obj = factory->NewJSObject(function);
484 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
485 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
486 }
487
488 // After gc, it should survive.
489 heap->CollectGarbage(NEW_SPACE);
490
491 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
492 Handle<Object> obj =
493 Object::GetProperty(global, obj_name).ToHandleChecked();
494 CHECK(obj->IsJSObject());
495 CHECK_EQ(Smi::FromInt(23),
496 *Object::GetProperty(obj, prop_name).ToHandleChecked());
497}
498
499
500static void VerifyStringAllocation(Isolate* isolate, const char* string) {
501 HandleScope scope(isolate);
502 Handle<String> s = isolate->factory()->NewStringFromUtf8(
503 CStrVector(string)).ToHandleChecked();
504 CHECK_EQ(StrLength(string), s->length());
505 for (int index = 0; index < s->length(); index++) {
506 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
507 }
508}
509
510
511TEST(String) {
512 CcTest::InitializeVM();
513 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
514
515 VerifyStringAllocation(isolate, "a");
516 VerifyStringAllocation(isolate, "ab");
517 VerifyStringAllocation(isolate, "abc");
518 VerifyStringAllocation(isolate, "abcd");
519 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
520}
521
522
523TEST(LocalHandles) {
524 CcTest::InitializeVM();
525 Isolate* isolate = CcTest::i_isolate();
526 Factory* factory = isolate->factory();
527
528 v8::HandleScope scope(CcTest::isolate());
529 const char* name = "Kasper the spunky";
530 Handle<String> string = factory->NewStringFromAsciiChecked(name);
531 CHECK_EQ(StrLength(name), string->length());
532}
533
534
535TEST(GlobalHandles) {
536 CcTest::InitializeVM();
537 Isolate* isolate = CcTest::i_isolate();
538 Heap* heap = isolate->heap();
539 Factory* factory = isolate->factory();
540 GlobalHandles* global_handles = isolate->global_handles();
541
542 Handle<Object> h1;
543 Handle<Object> h2;
544 Handle<Object> h3;
545 Handle<Object> h4;
546
547 {
548 HandleScope scope(isolate);
549
550 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
551 Handle<Object> u = factory->NewNumber(1.12344);
552
553 h1 = global_handles->Create(*i);
554 h2 = global_handles->Create(*u);
555 h3 = global_handles->Create(*i);
556 h4 = global_handles->Create(*u);
557 }
558
559 // after gc, it should survive
560 heap->CollectGarbage(NEW_SPACE);
561
562 CHECK((*h1)->IsString());
563 CHECK((*h2)->IsHeapNumber());
564 CHECK((*h3)->IsString());
565 CHECK((*h4)->IsHeapNumber());
566
567 CHECK_EQ(*h3, *h1);
568 GlobalHandles::Destroy(h1.location());
569 GlobalHandles::Destroy(h3.location());
570
571 CHECK_EQ(*h4, *h2);
572 GlobalHandles::Destroy(h2.location());
573 GlobalHandles::Destroy(h4.location());
574}
575
576
577static bool WeakPointerCleared = false;
578
579static void TestWeakGlobalHandleCallback(
580 const v8::WeakCallbackData<v8::Value, void>& data) {
581 std::pair<v8::Persistent<v8::Value>*, int>* p =
582 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
583 data.GetParameter());
584 if (p->second == 1234) WeakPointerCleared = true;
585 p->first->Reset();
586}
587
588
589TEST(WeakGlobalHandlesScavenge) {
590 i::FLAG_stress_compaction = false;
591 CcTest::InitializeVM();
592 Isolate* isolate = CcTest::i_isolate();
593 Heap* heap = isolate->heap();
594 Factory* factory = isolate->factory();
595 GlobalHandles* global_handles = isolate->global_handles();
596
597 WeakPointerCleared = false;
598
599 Handle<Object> h1;
600 Handle<Object> h2;
601
602 {
603 HandleScope scope(isolate);
604
605 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
606 Handle<Object> u = factory->NewNumber(1.12344);
607
608 h1 = global_handles->Create(*i);
609 h2 = global_handles->Create(*u);
610 }
611
612 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
613 GlobalHandles::MakeWeak(h2.location(),
614 reinterpret_cast<void*>(&handle_and_id),
615 &TestWeakGlobalHandleCallback);
616
617 // Scavenge treats weak pointers as normal roots.
618 heap->CollectGarbage(NEW_SPACE);
619
620 CHECK((*h1)->IsString());
621 CHECK((*h2)->IsHeapNumber());
622
623 CHECK(!WeakPointerCleared);
624 CHECK(!global_handles->IsNearDeath(h2.location()));
625 CHECK(!global_handles->IsNearDeath(h1.location()));
626
627 GlobalHandles::Destroy(h1.location());
628 GlobalHandles::Destroy(h2.location());
629}
630
631
632TEST(WeakGlobalHandlesMark) {
633 CcTest::InitializeVM();
634 Isolate* isolate = CcTest::i_isolate();
635 Heap* heap = isolate->heap();
636 Factory* factory = isolate->factory();
637 GlobalHandles* global_handles = isolate->global_handles();
638
639 WeakPointerCleared = false;
640
641 Handle<Object> h1;
642 Handle<Object> h2;
643
644 {
645 HandleScope scope(isolate);
646
647 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
648 Handle<Object> u = factory->NewNumber(1.12344);
649
650 h1 = global_handles->Create(*i);
651 h2 = global_handles->Create(*u);
652 }
653
654 // Make sure the objects are promoted.
655 heap->CollectGarbage(OLD_SPACE);
656 heap->CollectGarbage(NEW_SPACE);
657 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
658
659 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
660 GlobalHandles::MakeWeak(h2.location(),
661 reinterpret_cast<void*>(&handle_and_id),
662 &TestWeakGlobalHandleCallback);
663 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
664 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
665
666 // Incremental marking potentially marked handles before they turned weak.
667 heap->CollectAllGarbage();
668
669 CHECK((*h1)->IsString());
670
671 CHECK(WeakPointerCleared);
672 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
673
674 GlobalHandles::Destroy(h1.location());
675}
676
677
678TEST(DeleteWeakGlobalHandle) {
679 i::FLAG_stress_compaction = false;
680 CcTest::InitializeVM();
681 Isolate* isolate = CcTest::i_isolate();
682 Heap* heap = isolate->heap();
683 Factory* factory = isolate->factory();
684 GlobalHandles* global_handles = isolate->global_handles();
685
686 WeakPointerCleared = false;
687
688 Handle<Object> h;
689
690 {
691 HandleScope scope(isolate);
692
693 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
694 h = global_handles->Create(*i);
695 }
696
697 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
698 GlobalHandles::MakeWeak(h.location(),
699 reinterpret_cast<void*>(&handle_and_id),
700 &TestWeakGlobalHandleCallback);
701
702 // Scanvenge does not recognize weak reference.
703 heap->CollectGarbage(NEW_SPACE);
704
705 CHECK(!WeakPointerCleared);
706
707 // Mark-compact treats weak reference properly.
708 heap->CollectGarbage(OLD_SPACE);
709
710 CHECK(WeakPointerCleared);
711}
712
713
714TEST(BytecodeArray) {
715 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
716 static const int kRawBytesSize = sizeof(kRawBytes);
717 static const int kFrameSize = 32;
718 static const int kParameterCount = 2;
719
720 i::FLAG_manual_evacuation_candidates_selection = true;
721 CcTest::InitializeVM();
722 Isolate* isolate = CcTest::i_isolate();
723 Heap* heap = isolate->heap();
724 Factory* factory = isolate->factory();
725 HandleScope scope(isolate);
726
727 SimulateFullSpace(heap->old_space());
728 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
729 for (int i = 0; i < 5; i++) {
730 Handle<Object> number = factory->NewHeapNumber(i);
731 constant_pool->set(i, *number);
732 }
733
734 // Allocate and initialize BytecodeArray
735 Handle<BytecodeArray> array = factory->NewBytecodeArray(
736 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
737
738 CHECK(array->IsBytecodeArray());
739 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
740 CHECK_EQ(array->frame_size(), kFrameSize);
741 CHECK_EQ(array->parameter_count(), kParameterCount);
742 CHECK_EQ(array->constant_pool(), *constant_pool);
743 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
744 CHECK_GE(array->address() + array->BytecodeArraySize(),
745 array->GetFirstBytecodeAddress() + array->length());
746 for (int i = 0; i < kRawBytesSize; i++) {
747 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
748 CHECK_EQ(array->get(i), kRawBytes[i]);
749 }
750
751 FixedArray* old_constant_pool_address = *constant_pool;
752
753 // Perform a full garbage collection and force the constant pool to be on an
754 // evacuation candidate.
755 Page* evac_page = Page::FromAddress(constant_pool->address());
756 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
757 heap->CollectAllGarbage();
758
759 // BytecodeArray should survive.
760 CHECK_EQ(array->length(), kRawBytesSize);
761 CHECK_EQ(array->frame_size(), kFrameSize);
762 for (int i = 0; i < kRawBytesSize; i++) {
763 CHECK_EQ(array->get(i), kRawBytes[i]);
764 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
765 }
766
767 // Constant pool should have been migrated.
768 CHECK_EQ(array->constant_pool(), *constant_pool);
769 CHECK_NE(array->constant_pool(), old_constant_pool_address);
770}
771
772
773static const char* not_so_random_string_table[] = {
774 "abstract",
775 "boolean",
776 "break",
777 "byte",
778 "case",
779 "catch",
780 "char",
781 "class",
782 "const",
783 "continue",
784 "debugger",
785 "default",
786 "delete",
787 "do",
788 "double",
789 "else",
790 "enum",
791 "export",
792 "extends",
793 "false",
794 "final",
795 "finally",
796 "float",
797 "for",
798 "function",
799 "goto",
800 "if",
801 "implements",
802 "import",
803 "in",
804 "instanceof",
805 "int",
806 "interface",
807 "long",
808 "native",
809 "new",
810 "null",
811 "package",
812 "private",
813 "protected",
814 "public",
815 "return",
816 "short",
817 "static",
818 "super",
819 "switch",
820 "synchronized",
821 "this",
822 "throw",
823 "throws",
824 "transient",
825 "true",
826 "try",
827 "typeof",
828 "var",
829 "void",
830 "volatile",
831 "while",
832 "with",
833 0
834};
835
836
837static void CheckInternalizedStrings(const char** strings) {
838 Isolate* isolate = CcTest::i_isolate();
839 Factory* factory = isolate->factory();
840 for (const char* string = *strings; *strings != 0; string = *strings++) {
841 HandleScope scope(isolate);
842 Handle<String> a =
843 isolate->factory()->InternalizeUtf8String(CStrVector(string));
844 // InternalizeUtf8String may return a failure if a GC is needed.
845 CHECK(a->IsInternalizedString());
846 Handle<String> b = factory->InternalizeUtf8String(string);
847 CHECK_EQ(*b, *a);
848 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
849 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
850 CHECK_EQ(*b, *a);
851 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
852 }
853}
854
855
856TEST(StringTable) {
857 CcTest::InitializeVM();
858
859 v8::HandleScope sc(CcTest::isolate());
860 CheckInternalizedStrings(not_so_random_string_table);
861 CheckInternalizedStrings(not_so_random_string_table);
862}
863
864
865TEST(FunctionAllocation) {
866 CcTest::InitializeVM();
867 Isolate* isolate = CcTest::i_isolate();
868 Factory* factory = isolate->factory();
869
870 v8::HandleScope sc(CcTest::isolate());
871 Handle<String> name = factory->InternalizeUtf8String("theFunction");
872 Handle<JSFunction> function = factory->NewFunction(name);
873
874 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
875 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
876
877 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
878 Handle<JSObject> obj = factory->NewJSObject(function);
879 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
880 CHECK_EQ(Smi::FromInt(23),
881 *Object::GetProperty(obj, prop_name).ToHandleChecked());
882 // Check that we can add properties to function objects.
883 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
884 CHECK_EQ(Smi::FromInt(24),
885 *Object::GetProperty(function, prop_name).ToHandleChecked());
886}
887
888
889TEST(ObjectProperties) {
890 CcTest::InitializeVM();
891 Isolate* isolate = CcTest::i_isolate();
892 Factory* factory = isolate->factory();
893
894 v8::HandleScope sc(CcTest::isolate());
895 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
896 Handle<Object> object = Object::GetProperty(
897 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
898 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
899 Handle<JSObject> obj = factory->NewJSObject(constructor);
900 Handle<String> first = factory->InternalizeUtf8String("first");
901 Handle<String> second = factory->InternalizeUtf8String("second");
902
903 Handle<Smi> one(Smi::FromInt(1), isolate);
904 Handle<Smi> two(Smi::FromInt(2), isolate);
905
906 // check for empty
907 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
908
909 // add first
910 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
911 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
912
913 // delete first
914 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
915 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
916
917 // add first and then second
918 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
919 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
920 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
921 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
922
923 // delete first and then second
924 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
925 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
926 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
927 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
928 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
929
930 // add first and then second
931 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
932 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
933 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
934 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
935
936 // delete second and then first
937 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
938 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
939 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
940 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
941 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
942
943 // check string and internalized string match
944 const char* string1 = "fisk";
945 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
946 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
947 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
948 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
949
950 // check internalized string and string match
951 const char* string2 = "fugl";
952 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
953 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
954 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
955 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
956}
957
958
959TEST(JSObjectMaps) {
960 CcTest::InitializeVM();
961 Isolate* isolate = CcTest::i_isolate();
962 Factory* factory = isolate->factory();
963
964 v8::HandleScope sc(CcTest::isolate());
965 Handle<String> name = factory->InternalizeUtf8String("theFunction");
966 Handle<JSFunction> function = factory->NewFunction(name);
967
968 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
969 Handle<JSObject> obj = factory->NewJSObject(function);
970 Handle<Map> initial_map(function->initial_map());
971
972 // Set a propery
973 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
974 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
975 CHECK_EQ(Smi::FromInt(23),
976 *Object::GetProperty(obj, prop_name).ToHandleChecked());
977
978 // Check the map has changed
979 CHECK(*initial_map != obj->map());
980}
981
982
983TEST(JSArray) {
984 CcTest::InitializeVM();
985 Isolate* isolate = CcTest::i_isolate();
986 Factory* factory = isolate->factory();
987
988 v8::HandleScope sc(CcTest::isolate());
989 Handle<String> name = factory->InternalizeUtf8String("Array");
990 Handle<Object> fun_obj = Object::GetProperty(
991 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
992 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
993
994 // Allocate the object.
995 Handle<Object> element;
996 Handle<JSObject> object = factory->NewJSObject(function);
997 Handle<JSArray> array = Handle<JSArray>::cast(object);
998 // We just initialized the VM, no heap allocation failure yet.
999 JSArray::Initialize(array, 0);
1000
1001 // Set array length to 0.
1002 JSArray::SetLength(array, 0);
1003 CHECK_EQ(Smi::FromInt(0), array->length());
1004 // Must be in fast mode.
1005 CHECK(array->HasFastSmiOrObjectElements());
1006
1007 // array[length] = name.
1008 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
1009 CHECK_EQ(Smi::FromInt(1), array->length());
1010 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1011 CHECK_EQ(*element, *name);
1012
1013 // Set array length with larger than smi value.
1014 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1015
1016 uint32_t int_length = 0;
1017 CHECK(array->length()->ToArrayIndex(&int_length));
1018 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1019 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1020
1021 // array[length] = name.
1022 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1023 uint32_t new_int_length = 0;
1024 CHECK(array->length()->ToArrayIndex(&new_int_length));
1025 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1026 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1027 CHECK_EQ(*element, *name);
1028 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1029 CHECK_EQ(*element, *name);
1030}
1031
1032
1033TEST(JSObjectCopy) {
1034 CcTest::InitializeVM();
1035 Isolate* isolate = CcTest::i_isolate();
1036 Factory* factory = isolate->factory();
1037
1038 v8::HandleScope sc(CcTest::isolate());
1039 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1040 Handle<Object> object = Object::GetProperty(
1041 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1042 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1043 Handle<JSObject> obj = factory->NewJSObject(constructor);
1044 Handle<String> first = factory->InternalizeUtf8String("first");
1045 Handle<String> second = factory->InternalizeUtf8String("second");
1046
1047 Handle<Smi> one(Smi::FromInt(1), isolate);
1048 Handle<Smi> two(Smi::FromInt(2), isolate);
1049
1050 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1051 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1052
1053 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1054 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1055
1056 // Make the clone.
1057 Handle<Object> value1, value2;
1058 Handle<JSObject> clone = factory->CopyJSObject(obj);
1059 CHECK(!clone.is_identical_to(obj));
1060
1061 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1062 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1063 CHECK_EQ(*value1, *value2);
1064 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1065 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1066 CHECK_EQ(*value1, *value2);
1067
1068 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1069 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1070 CHECK_EQ(*value1, *value2);
1071 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1072 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1073 CHECK_EQ(*value1, *value2);
1074
1075 // Flip the values.
1076 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1077 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1078
1079 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1080 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1081
1082 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1083 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1084 CHECK_EQ(*value1, *value2);
1085 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1086 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1087 CHECK_EQ(*value1, *value2);
1088
1089 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1090 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1091 CHECK_EQ(*value1, *value2);
1092 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1093 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1094 CHECK_EQ(*value1, *value2);
1095}
1096
1097
1098TEST(StringAllocation) {
1099 CcTest::InitializeVM();
1100 Isolate* isolate = CcTest::i_isolate();
1101 Factory* factory = isolate->factory();
1102
1103 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1104 for (int length = 0; length < 100; length++) {
1105 v8::HandleScope scope(CcTest::isolate());
1106 char* non_one_byte = NewArray<char>(3 * length + 1);
1107 char* one_byte = NewArray<char>(length + 1);
1108 non_one_byte[3 * length] = 0;
1109 one_byte[length] = 0;
1110 for (int i = 0; i < length; i++) {
1111 one_byte[i] = 'a';
1112 non_one_byte[3 * i] = chars[0];
1113 non_one_byte[3 * i + 1] = chars[1];
1114 non_one_byte[3 * i + 2] = chars[2];
1115 }
1116 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1117 Vector<const char>(non_one_byte, 3 * length));
1118 CHECK_EQ(length, non_one_byte_sym->length());
1119 Handle<String> one_byte_sym =
1120 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1121 CHECK_EQ(length, one_byte_sym->length());
1122 Handle<String> non_one_byte_str =
1123 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1124 .ToHandleChecked();
1125 non_one_byte_str->Hash();
1126 CHECK_EQ(length, non_one_byte_str->length());
1127 Handle<String> one_byte_str =
1128 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1129 .ToHandleChecked();
1130 one_byte_str->Hash();
1131 CHECK_EQ(length, one_byte_str->length());
1132 DeleteArray(non_one_byte);
1133 DeleteArray(one_byte);
1134 }
1135}
1136
1137
1138static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1139 // Count the number of objects found in the heap.
1140 int found_count = 0;
1141 HeapIterator iterator(heap);
1142 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1143 for (int i = 0; i < size; i++) {
1144 if (*objs[i] == obj) {
1145 found_count++;
1146 }
1147 }
1148 }
1149 return found_count;
1150}
1151
1152
1153TEST(Iteration) {
1154 CcTest::InitializeVM();
1155 Isolate* isolate = CcTest::i_isolate();
1156 Factory* factory = isolate->factory();
1157 v8::HandleScope scope(CcTest::isolate());
1158
1159 // Array of objects to scan haep for.
1160 const int objs_count = 6;
1161 Handle<Object> objs[objs_count];
1162 int next_objs_index = 0;
1163
1164 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1165 objs[next_objs_index++] = factory->NewJSArray(10);
1166 objs[next_objs_index++] =
1167 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
1168
1169 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1170 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1171 objs[next_objs_index++] =
1172 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1173
1174 // Allocate a large string (for large object space).
1175 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1176 char* str = new char[large_size];
1177 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1178 str[large_size - 1] = '\0';
1179 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1180 delete[] str;
1181
1182 // Add a Map object to look for.
1183 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1184
1185 CHECK_EQ(objs_count, next_objs_index);
1186 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1187}
1188
1189
1190UNINITIALIZED_TEST(TestCodeFlushing) {
1191 // If we do not flush code this test is invalid.
1192 if (!FLAG_flush_code) return;
1193 i::FLAG_allow_natives_syntax = true;
1194 i::FLAG_optimize_for_size = false;
1195 v8::Isolate::CreateParams create_params;
1196 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1197 v8::Isolate* isolate = v8::Isolate::New(create_params);
1198 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1199 isolate->Enter();
1200 Factory* factory = i_isolate->factory();
1201 {
1202 v8::HandleScope scope(isolate);
1203 v8::Context::New(isolate)->Enter();
1204 const char* source =
1205 "function foo() {"
1206 " var x = 42;"
1207 " var y = 42;"
1208 " var z = x + y;"
1209 "};"
1210 "foo()";
1211 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1212
1213 // This compile will add the code to the compilation cache.
1214 {
1215 v8::HandleScope scope(isolate);
1216 CompileRun(source);
1217 }
1218
1219 // Check function is compiled.
1220 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1221 foo_name).ToHandleChecked();
1222 CHECK(func_value->IsJSFunction());
1223 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1224 CHECK(function->shared()->is_compiled());
1225
1226 // The code will survive at least two GCs.
1227 i_isolate->heap()->CollectAllGarbage();
1228 i_isolate->heap()->CollectAllGarbage();
1229 CHECK(function->shared()->is_compiled());
1230
1231 // Simulate several GCs that use full marking.
1232 const int kAgingThreshold = 6;
1233 for (int i = 0; i < kAgingThreshold; i++) {
1234 i_isolate->heap()->CollectAllGarbage();
1235 }
1236
1237 // foo should no longer be in the compilation cache
1238 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1239 CHECK(!function->is_compiled() || function->IsOptimized());
1240 // Call foo to get it recompiled.
1241 CompileRun("foo()");
1242 CHECK(function->shared()->is_compiled());
1243 CHECK(function->is_compiled());
1244 }
1245 isolate->Exit();
1246 isolate->Dispose();
1247}
1248
1249
1250TEST(TestCodeFlushingPreAged) {
1251 // If we do not flush code this test is invalid.
1252 if (!FLAG_flush_code) return;
1253 i::FLAG_allow_natives_syntax = true;
1254 i::FLAG_optimize_for_size = true;
1255 CcTest::InitializeVM();
1256 Isolate* isolate = CcTest::i_isolate();
1257 Factory* factory = isolate->factory();
1258 v8::HandleScope scope(CcTest::isolate());
1259 const char* source = "function foo() {"
1260 " var x = 42;"
1261 " var y = 42;"
1262 " var z = x + y;"
1263 "};"
1264 "foo()";
1265 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1266
1267 // Compile foo, but don't run it.
1268 { v8::HandleScope scope(CcTest::isolate());
1269 CompileRun(source);
1270 }
1271
1272 // Check function is compiled.
1273 Handle<Object> func_value =
1274 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1275 CHECK(func_value->IsJSFunction());
1276 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1277 CHECK(function->shared()->is_compiled());
1278
1279 // The code has been run so will survive at least one GC.
1280 CcTest::heap()->CollectAllGarbage();
1281 CHECK(function->shared()->is_compiled());
1282
1283 // The code was only run once, so it should be pre-aged and collected on the
1284 // next GC.
1285 CcTest::heap()->CollectAllGarbage();
1286 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1287
1288 // Execute the function again twice, and ensure it is reset to the young age.
1289 { v8::HandleScope scope(CcTest::isolate());
1290 CompileRun("foo();"
1291 "foo();");
1292 }
1293
1294 // The code will survive at least two GC now that it is young again.
1295 CcTest::heap()->CollectAllGarbage();
1296 CcTest::heap()->CollectAllGarbage();
1297 CHECK(function->shared()->is_compiled());
1298
1299 // Simulate several GCs that use full marking.
1300 const int kAgingThreshold = 6;
1301 for (int i = 0; i < kAgingThreshold; i++) {
1302 CcTest::heap()->CollectAllGarbage();
1303 }
1304
1305 // foo should no longer be in the compilation cache
1306 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1307 CHECK(!function->is_compiled() || function->IsOptimized());
1308 // Call foo to get it recompiled.
1309 CompileRun("foo()");
1310 CHECK(function->shared()->is_compiled());
1311 CHECK(function->is_compiled());
1312}
1313
1314
1315TEST(TestCodeFlushingIncremental) {
1316 // If we do not flush code this test is invalid.
1317 if (!FLAG_flush_code) return;
1318 i::FLAG_allow_natives_syntax = true;
1319 i::FLAG_optimize_for_size = false;
1320 CcTest::InitializeVM();
1321 Isolate* isolate = CcTest::i_isolate();
1322 Factory* factory = isolate->factory();
1323 v8::HandleScope scope(CcTest::isolate());
1324 const char* source = "function foo() {"
1325 " var x = 42;"
1326 " var y = 42;"
1327 " var z = x + y;"
1328 "};"
1329 "foo()";
1330 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1331
1332 // This compile will add the code to the compilation cache.
1333 { v8::HandleScope scope(CcTest::isolate());
1334 CompileRun(source);
1335 }
1336
1337 // Check function is compiled.
1338 Handle<Object> func_value =
1339 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1340 CHECK(func_value->IsJSFunction());
1341 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1342 CHECK(function->shared()->is_compiled());
1343
1344 // The code will survive at least two GCs.
1345 CcTest::heap()->CollectAllGarbage();
1346 CcTest::heap()->CollectAllGarbage();
1347 CHECK(function->shared()->is_compiled());
1348
1349 // Simulate several GCs that use incremental marking.
1350 const int kAgingThreshold = 6;
1351 for (int i = 0; i < kAgingThreshold; i++) {
1352 SimulateIncrementalMarking(CcTest::heap());
1353 CcTest::heap()->CollectAllGarbage();
1354 }
1355 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1356 CHECK(!function->is_compiled() || function->IsOptimized());
1357
1358 // This compile will compile the function again.
1359 { v8::HandleScope scope(CcTest::isolate());
1360 CompileRun("foo();");
1361 }
1362
1363 // Simulate several GCs that use incremental marking but make sure
1364 // the loop breaks once the function is enqueued as a candidate.
1365 for (int i = 0; i < kAgingThreshold; i++) {
1366 SimulateIncrementalMarking(CcTest::heap());
1367 if (!function->next_function_link()->IsUndefined()) break;
1368 CcTest::heap()->CollectAllGarbage();
1369 }
1370
1371 // Force optimization while incremental marking is active and while
1372 // the function is enqueued as a candidate.
1373 { v8::HandleScope scope(CcTest::isolate());
1374 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1375 }
1376
1377 // Simulate one final GC to make sure the candidate queue is sane.
1378 CcTest::heap()->CollectAllGarbage();
1379 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1380 CHECK(function->is_compiled() || !function->IsOptimized());
1381}
1382
1383
1384TEST(TestCodeFlushingIncrementalScavenge) {
1385 // If we do not flush code this test is invalid.
1386 if (!FLAG_flush_code) return;
1387 i::FLAG_allow_natives_syntax = true;
1388 i::FLAG_optimize_for_size = false;
1389 CcTest::InitializeVM();
1390 Isolate* isolate = CcTest::i_isolate();
1391 Factory* factory = isolate->factory();
1392 v8::HandleScope scope(CcTest::isolate());
1393 const char* source = "var foo = function() {"
1394 " var x = 42;"
1395 " var y = 42;"
1396 " var z = x + y;"
1397 "};"
1398 "foo();"
1399 "var bar = function() {"
1400 " var x = 23;"
1401 "};"
1402 "bar();";
1403 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1404 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1405
1406 // Perfrom one initial GC to enable code flushing.
1407 CcTest::heap()->CollectAllGarbage();
1408
1409 // This compile will add the code to the compilation cache.
1410 { v8::HandleScope scope(CcTest::isolate());
1411 CompileRun(source);
1412 }
1413
1414 // Check functions are compiled.
1415 Handle<Object> func_value =
1416 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1417 CHECK(func_value->IsJSFunction());
1418 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1419 CHECK(function->shared()->is_compiled());
1420 Handle<Object> func_value2 =
1421 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1422 CHECK(func_value2->IsJSFunction());
1423 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1424 CHECK(function2->shared()->is_compiled());
1425
1426 // Clear references to functions so that one of them can die.
1427 { v8::HandleScope scope(CcTest::isolate());
1428 CompileRun("foo = 0; bar = 0;");
1429 }
1430
1431 // Bump the code age so that flushing is triggered while the function
1432 // object is still located in new-space.
1433 const int kAgingThreshold = 6;
1434 for (int i = 0; i < kAgingThreshold; i++) {
1435 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1436 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1437 }
1438
1439 // Simulate incremental marking so that the functions are enqueued as
1440 // code flushing candidates. Then kill one of the functions. Finally
1441 // perform a scavenge while incremental marking is still running.
1442 SimulateIncrementalMarking(CcTest::heap());
1443 *function2.location() = NULL;
1444 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1445
1446 // Simulate one final GC to make sure the candidate queue is sane.
1447 CcTest::heap()->CollectAllGarbage();
1448 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1449 CHECK(!function->is_compiled() || function->IsOptimized());
1450}
1451
1452
1453TEST(TestCodeFlushingIncrementalAbort) {
1454 // If we do not flush code this test is invalid.
1455 if (!FLAG_flush_code) return;
1456 i::FLAG_allow_natives_syntax = true;
1457 i::FLAG_optimize_for_size = false;
1458 CcTest::InitializeVM();
1459 Isolate* isolate = CcTest::i_isolate();
1460 Factory* factory = isolate->factory();
1461 Heap* heap = isolate->heap();
1462 v8::HandleScope scope(CcTest::isolate());
1463 const char* source = "function foo() {"
1464 " var x = 42;"
1465 " var y = 42;"
1466 " var z = x + y;"
1467 "};"
1468 "foo()";
1469 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1470
1471 // This compile will add the code to the compilation cache.
1472 { v8::HandleScope scope(CcTest::isolate());
1473 CompileRun(source);
1474 }
1475
1476 // Check function is compiled.
1477 Handle<Object> func_value =
1478 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1479 CHECK(func_value->IsJSFunction());
1480 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1481 CHECK(function->shared()->is_compiled());
1482
1483 // The code will survive at least two GCs.
1484 heap->CollectAllGarbage();
1485 heap->CollectAllGarbage();
1486 CHECK(function->shared()->is_compiled());
1487
1488 // Bump the code age so that flushing is triggered.
1489 const int kAgingThreshold = 6;
1490 for (int i = 0; i < kAgingThreshold; i++) {
1491 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1492 }
1493
1494 // Simulate incremental marking so that the function is enqueued as
1495 // code flushing candidate.
1496 SimulateIncrementalMarking(heap);
1497
1498 // Enable the debugger and add a breakpoint while incremental marking
1499 // is running so that incremental marking aborts and code flushing is
1500 // disabled.
1501 int position = 0;
1502 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1503 EnableDebugger(CcTest::isolate());
1504 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1505 isolate->debug()->ClearAllBreakPoints();
1506 DisableDebugger(CcTest::isolate());
1507
1508 // Force optimization now that code flushing is disabled.
1509 { v8::HandleScope scope(CcTest::isolate());
1510 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1511 }
1512
1513 // Simulate one final GC to make sure the candidate queue is sane.
1514 heap->CollectAllGarbage();
1515 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1516 CHECK(function->is_compiled() || !function->IsOptimized());
1517}
1518
Ben Murdoch097c5b22016-05-18 11:27:45 +01001519TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1520 // Turn off always_opt because it interferes with running the built-in for
1521 // the last call to g().
1522 i::FLAG_always_opt = false;
1523 i::FLAG_allow_natives_syntax = true;
1524 CcTest::InitializeVM();
1525 Isolate* isolate = CcTest::i_isolate();
1526 Factory* factory = isolate->factory();
1527 Heap* heap = isolate->heap();
1528 v8::HandleScope scope(CcTest::isolate());
1529
1530 CompileRun(
1531 "function make_closure(x) {"
1532 " return function() { return x + 3 };"
1533 "}"
1534 "var f = make_closure(5); f();"
1535 "var g = make_closure(5);");
1536
1537 // Check f is compiled.
1538 Handle<String> f_name = factory->InternalizeUtf8String("f");
1539 Handle<Object> f_value =
1540 Object::GetProperty(isolate->global_object(), f_name).ToHandleChecked();
1541 Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1542 CHECK(f_function->is_compiled());
1543
1544 // Check g is not compiled.
1545 Handle<String> g_name = factory->InternalizeUtf8String("g");
1546 Handle<Object> g_value =
1547 Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked();
1548 Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1549 // TODO(mvstanton): change to check that g is *not* compiled when optimized
1550 // cache
1551 // map lookup moves to the compile lazy builtin.
1552 CHECK(g_function->is_compiled());
1553
1554 SimulateIncrementalMarking(heap);
1555 CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1556
1557 // g should now have available an optimized function, unmarked by gc. The
1558 // CompileLazy built-in will discover it and install it in the closure, and
1559 // the incremental write barrier should be used.
1560 CompileRun("g();");
1561 CHECK(g_function->is_compiled());
1562}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563
1564TEST(CompilationCacheCachingBehavior) {
1565 // If we do not flush code, or have the compilation cache turned off, this
1566 // test is invalid.
1567 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1568 return;
1569 }
1570 CcTest::InitializeVM();
1571 Isolate* isolate = CcTest::i_isolate();
1572 Factory* factory = isolate->factory();
1573 Heap* heap = isolate->heap();
1574 CompilationCache* compilation_cache = isolate->compilation_cache();
1575 LanguageMode language_mode =
1576 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1577
1578 v8::HandleScope scope(CcTest::isolate());
1579 const char* raw_source =
1580 "function foo() {"
1581 " var x = 42;"
1582 " var y = 42;"
1583 " var z = x + y;"
1584 "};"
1585 "foo()";
1586 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1587 Handle<Context> native_context = isolate->native_context();
1588
1589 {
1590 v8::HandleScope scope(CcTest::isolate());
1591 CompileRun(raw_source);
1592 }
1593
1594 // On first compilation, only a hash is inserted in the code cache. We can't
1595 // find that value.
1596 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1597 source, Handle<Object>(), 0, 0,
1598 v8::ScriptOriginOptions(false, true, false), native_context,
1599 language_mode);
1600 CHECK(info.is_null());
1601
1602 {
1603 v8::HandleScope scope(CcTest::isolate());
1604 CompileRun(raw_source);
1605 }
1606
1607 // On second compilation, the hash is replaced by a real cache entry mapping
1608 // the source to the shared function info containing the code.
1609 info = compilation_cache->LookupScript(
1610 source, Handle<Object>(), 0, 0,
1611 v8::ScriptOriginOptions(false, true, false), native_context,
1612 language_mode);
1613 CHECK(!info.is_null());
1614
1615 // Check that the code cache entry survives at least on GC.
1616 // (Unless --optimize-for-size, in which case it might get collected
1617 // immediately.)
1618 if (!FLAG_optimize_for_size) {
1619 heap->CollectAllGarbage();
1620 info = compilation_cache->LookupScript(
1621 source, Handle<Object>(), 0, 0,
1622 v8::ScriptOriginOptions(false, true, false), native_context,
1623 language_mode);
1624 CHECK(!info.is_null());
1625 }
1626
1627 // Progress code age until it's old and ready for GC.
1628 while (!info.ToHandleChecked()->code()->IsOld()) {
1629 // To guarantee progress, we have to MakeOlder with different parities.
1630 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
1631 // always NO_MARKING_PARITY and the code age only progresses if the parity
1632 // is different.
1633 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
1634 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
1635 }
1636
1637 heap->CollectAllGarbage();
1638 // Ensure code aging cleared the entry from the cache.
1639 info = compilation_cache->LookupScript(
1640 source, Handle<Object>(), 0, 0,
1641 v8::ScriptOriginOptions(false, true, false), native_context,
1642 language_mode);
1643 CHECK(info.is_null());
1644
1645 {
1646 v8::HandleScope scope(CcTest::isolate());
1647 CompileRun(raw_source);
1648 }
1649
1650 // On first compilation, only a hash is inserted in the code cache. We can't
1651 // find that value.
1652 info = compilation_cache->LookupScript(
1653 source, Handle<Object>(), 0, 0,
1654 v8::ScriptOriginOptions(false, true, false), native_context,
1655 language_mode);
1656 CHECK(info.is_null());
1657
1658 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1659 compilation_cache->MarkCompactPrologue();
1660 }
1661
1662 {
1663 v8::HandleScope scope(CcTest::isolate());
1664 CompileRun(raw_source);
1665 }
1666
1667 // If we aged the cache before caching the script, ensure that we didn't cache
1668 // on next compilation.
1669 info = compilation_cache->LookupScript(
1670 source, Handle<Object>(), 0, 0,
1671 v8::ScriptOriginOptions(false, true, false), native_context,
1672 language_mode);
1673 CHECK(info.is_null());
1674}
1675
1676
1677static void OptimizeEmptyFunction(const char* name) {
1678 HandleScope scope(CcTest::i_isolate());
1679 EmbeddedVector<char, 256> source;
1680 SNPrintF(source,
1681 "function %s() { return 0; }"
1682 "%s(); %s();"
1683 "%%OptimizeFunctionOnNextCall(%s);"
1684 "%s();",
1685 name, name, name, name, name);
1686 CompileRun(source.start());
1687}
1688
1689
1690// Count the number of native contexts in the weak list of native contexts.
1691int CountNativeContexts() {
1692 int count = 0;
1693 Object* object = CcTest::heap()->native_contexts_list();
1694 while (!object->IsUndefined()) {
1695 count++;
1696 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1697 }
1698 return count;
1699}
1700
1701
1702// Count the number of user functions in the weak list of optimized
1703// functions attached to a native context.
1704static int CountOptimizedUserFunctions(v8::Local<v8::Context> context) {
1705 int count = 0;
1706 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1707 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1708 while (object->IsJSFunction() &&
1709 !JSFunction::cast(object)->shared()->IsBuiltin()) {
1710 count++;
1711 object = JSFunction::cast(object)->next_function_link();
1712 }
1713 return count;
1714}
1715
1716
1717TEST(TestInternalWeakLists) {
1718 FLAG_always_opt = false;
1719 FLAG_allow_natives_syntax = true;
1720 v8::V8::Initialize();
1721
1722 // Some flags turn Scavenge collections into Mark-sweep collections
1723 // and hence are incompatible with this test case.
1724 if (FLAG_gc_global || FLAG_stress_compaction) return;
1725 FLAG_retain_maps_for_n_gc = 0;
1726
1727 static const int kNumTestContexts = 10;
1728
1729 Isolate* isolate = CcTest::i_isolate();
1730 Heap* heap = isolate->heap();
1731 HandleScope scope(isolate);
1732 v8::Local<v8::Context> ctx[kNumTestContexts];
1733 if (!isolate->use_crankshaft()) return;
1734
1735 CHECK_EQ(0, CountNativeContexts());
1736
1737 // Create a number of global contests which gets linked together.
1738 for (int i = 0; i < kNumTestContexts; i++) {
1739 ctx[i] = v8::Context::New(CcTest::isolate());
1740
1741 // Collect garbage that might have been created by one of the
1742 // installed extensions.
1743 isolate->compilation_cache()->Clear();
1744 heap->CollectAllGarbage();
1745
1746 CHECK_EQ(i + 1, CountNativeContexts());
1747
1748 ctx[i]->Enter();
1749
1750 // Create a handle scope so no function objects get stuck in the outer
1751 // handle scope.
1752 HandleScope scope(isolate);
1753 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1754 OptimizeEmptyFunction("f1");
1755 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1756 OptimizeEmptyFunction("f2");
1757 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1758 OptimizeEmptyFunction("f3");
1759 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1760 OptimizeEmptyFunction("f4");
1761 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1762 OptimizeEmptyFunction("f5");
1763 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1764
1765 // Remove function f1, and
1766 CompileRun("f1=null");
1767
1768 // Scavenge treats these references as strong.
1769 for (int j = 0; j < 10; j++) {
1770 CcTest::heap()->CollectGarbage(NEW_SPACE);
1771 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1772 }
1773
1774 // Mark compact handles the weak references.
1775 isolate->compilation_cache()->Clear();
1776 heap->CollectAllGarbage();
1777 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1778
1779 // Get rid of f3 and f5 in the same way.
1780 CompileRun("f3=null");
1781 for (int j = 0; j < 10; j++) {
1782 CcTest::heap()->CollectGarbage(NEW_SPACE);
1783 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1784 }
1785 CcTest::heap()->CollectAllGarbage();
1786 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1787 CompileRun("f5=null");
1788 for (int j = 0; j < 10; j++) {
1789 CcTest::heap()->CollectGarbage(NEW_SPACE);
1790 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1791 }
1792 CcTest::heap()->CollectAllGarbage();
1793 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1794
1795 ctx[i]->Exit();
1796 }
1797
1798 // Force compilation cache cleanup.
1799 CcTest::heap()->NotifyContextDisposed(true);
1800 CcTest::heap()->CollectAllGarbage();
1801
1802 // Dispose the native contexts one by one.
1803 for (int i = 0; i < kNumTestContexts; i++) {
1804 // TODO(dcarney): is there a better way to do this?
1805 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1806 *unsafe = CcTest::heap()->undefined_value();
1807 ctx[i].Clear();
1808
1809 // Scavenge treats these references as strong.
1810 for (int j = 0; j < 10; j++) {
1811 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1812 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1813 }
1814
1815 // Mark compact handles the weak references.
1816 CcTest::heap()->CollectAllGarbage();
1817 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1818 }
1819
1820 CHECK_EQ(0, CountNativeContexts());
1821}
1822
1823
1824// Count the number of native contexts in the weak list of native contexts
1825// causing a GC after the specified number of elements.
1826static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1827 Heap* heap = isolate->heap();
1828 int count = 0;
1829 Handle<Object> object(heap->native_contexts_list(), isolate);
1830 while (!object->IsUndefined()) {
1831 count++;
1832 if (count == n) heap->CollectAllGarbage();
1833 object =
1834 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1835 isolate);
1836 }
1837 return count;
1838}
1839
1840
1841// Count the number of user functions in the weak list of optimized
1842// functions attached to a native context causing a GC after the
1843// specified number of elements.
1844static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1845 int n) {
1846 int count = 0;
1847 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1848 Isolate* isolate = icontext->GetIsolate();
1849 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1850 isolate);
1851 while (object->IsJSFunction() &&
1852 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1853 count++;
1854 if (count == n) isolate->heap()->CollectAllGarbage();
1855 object = Handle<Object>(
1856 Object::cast(JSFunction::cast(*object)->next_function_link()),
1857 isolate);
1858 }
1859 return count;
1860}
1861
1862
1863TEST(TestInternalWeakListsTraverseWithGC) {
1864 FLAG_always_opt = false;
1865 FLAG_allow_natives_syntax = true;
1866 v8::V8::Initialize();
1867
1868 static const int kNumTestContexts = 10;
1869
1870 Isolate* isolate = CcTest::i_isolate();
1871 HandleScope scope(isolate);
1872 v8::Local<v8::Context> ctx[kNumTestContexts];
1873 if (!isolate->use_crankshaft()) return;
1874
1875 CHECK_EQ(0, CountNativeContexts());
1876
1877 // Create an number of contexts and check the length of the weak list both
1878 // with and without GCs while iterating the list.
1879 for (int i = 0; i < kNumTestContexts; i++) {
1880 ctx[i] = v8::Context::New(CcTest::isolate());
1881 CHECK_EQ(i + 1, CountNativeContexts());
1882 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1883 }
1884
1885 ctx[0]->Enter();
1886
1887 // Compile a number of functions the length of the weak list of optimized
1888 // functions both with and without GCs while iterating the list.
1889 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1890 OptimizeEmptyFunction("f1");
1891 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1892 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1893 OptimizeEmptyFunction("f2");
1894 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1895 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1896 OptimizeEmptyFunction("f3");
1897 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1898 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1899 OptimizeEmptyFunction("f4");
1900 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1901 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1902 OptimizeEmptyFunction("f5");
1903 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1904 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1905
1906 ctx[0]->Exit();
1907}
1908
1909
1910TEST(TestSizeOfRegExpCode) {
1911 if (!FLAG_regexp_optimization) return;
1912
1913 v8::V8::Initialize();
1914
1915 Isolate* isolate = CcTest::i_isolate();
1916 HandleScope scope(isolate);
1917
1918 LocalContext context;
1919
1920 // Adjust source below and this check to match
1921 // RegExpImple::kRegExpTooLargeToOptimize.
1922 CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1923
1924 // Compile a regexp that is much larger if we are using regexp optimizations.
1925 CompileRun(
1926 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1927 "var half_size_reg_exp;"
1928 "while (reg_exp_source.length < 20 * 1024) {"
1929 " half_size_reg_exp = reg_exp_source;"
1930 " reg_exp_source = reg_exp_source + reg_exp_source;"
1931 "}"
1932 // Flatten string.
1933 "reg_exp_source.match(/f/);");
1934
1935 // Get initial heap size after several full GCs, which will stabilize
1936 // the heap size and return with sweeping finished completely.
1937 CcTest::heap()->CollectAllGarbage();
1938 CcTest::heap()->CollectAllGarbage();
1939 CcTest::heap()->CollectAllGarbage();
1940 CcTest::heap()->CollectAllGarbage();
1941 CcTest::heap()->CollectAllGarbage();
1942 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1943 if (collector->sweeping_in_progress()) {
1944 collector->EnsureSweepingCompleted();
1945 }
1946 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1947
1948 CompileRun("'foo'.match(reg_exp_source);");
1949 CcTest::heap()->CollectAllGarbage();
1950 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1951
1952 CompileRun("'foo'.match(half_size_reg_exp);");
1953 CcTest::heap()->CollectAllGarbage();
1954 int size_with_optimized_regexp =
1955 static_cast<int>(CcTest::heap()->SizeOfObjects());
1956
1957 int size_of_regexp_code = size_with_regexp - initial_size;
1958
1959 // On some platforms the debug-code flag causes huge amounts of regexp code
1960 // to be emitted, breaking this test.
1961 if (!FLAG_debug_code) {
1962 CHECK_LE(size_of_regexp_code, 1 * MB);
1963 }
1964
1965 // Small regexp is half the size, but compiles to more than twice the code
1966 // due to the optimization steps.
1967 CHECK_GE(size_with_optimized_regexp,
1968 size_with_regexp + size_of_regexp_code * 2);
1969}
1970
1971
1972HEAP_TEST(TestSizeOfObjects) {
1973 v8::V8::Initialize();
1974
1975 // Get initial heap size after several full GCs, which will stabilize
1976 // the heap size and return with sweeping finished completely.
1977 CcTest::heap()->CollectAllGarbage();
1978 CcTest::heap()->CollectAllGarbage();
1979 CcTest::heap()->CollectAllGarbage();
1980 CcTest::heap()->CollectAllGarbage();
1981 CcTest::heap()->CollectAllGarbage();
1982 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1983 if (collector->sweeping_in_progress()) {
1984 collector->EnsureSweepingCompleted();
1985 }
1986 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1987
1988 {
1989 // Allocate objects on several different old-space pages so that
1990 // concurrent sweeper threads will be busy sweeping the old space on
1991 // subsequent GC runs.
1992 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1993 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1994 for (int i = 1; i <= 100; i++) {
1995 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1996 CHECK_EQ(initial_size + i * filler_size,
1997 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1998 }
1999 }
2000
2001 // The heap size should go back to initial size after a full GC, even
2002 // though sweeping didn't finish yet.
2003 CcTest::heap()->CollectAllGarbage();
2004
2005 // Normally sweeping would not be complete here, but no guarantees.
2006
2007 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2008
2009 // Waiting for sweeper threads should not change heap size.
2010 if (collector->sweeping_in_progress()) {
2011 collector->EnsureSweepingCompleted();
2012 }
2013 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2014}
2015
2016
2017TEST(TestAlignmentCalculations) {
2018 // Maximum fill amounts are consistent.
2019 int maximum_double_misalignment = kDoubleSize - kPointerSize;
2020 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
2021 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
2022 CHECK_EQ(0, max_word_fill);
2023 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
2024 CHECK_EQ(maximum_double_misalignment, max_double_fill);
2025 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
2026 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
2027 int max_simd128_unaligned_fill =
2028 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
2029 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
2030
2031 Address base = static_cast<Address>(NULL);
2032 int fill = 0;
2033
2034 // Word alignment never requires fill.
2035 fill = Heap::GetFillToAlign(base, kWordAligned);
2036 CHECK_EQ(0, fill);
2037 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
2038 CHECK_EQ(0, fill);
2039
2040 // No fill is required when address is double aligned.
2041 fill = Heap::GetFillToAlign(base, kDoubleAligned);
2042 CHECK_EQ(0, fill);
2043 // Fill is required if address is not double aligned.
2044 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
2045 CHECK_EQ(maximum_double_misalignment, fill);
2046 // kDoubleUnaligned has the opposite fill amounts.
2047 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2048 CHECK_EQ(maximum_double_misalignment, fill);
2049 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2050 CHECK_EQ(0, fill);
2051
2052 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2053 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2054 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2055 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2056 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2057 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2058 CHECK_EQ(kPointerSize, fill);
2059 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2060 CHECK_EQ(0, fill);
2061}
2062
2063
2064static HeapObject* NewSpaceAllocateAligned(int size,
2065 AllocationAlignment alignment) {
2066 Heap* heap = CcTest::heap();
2067 AllocationResult allocation =
2068 heap->new_space()->AllocateRawAligned(size, alignment);
2069 HeapObject* obj = NULL;
2070 allocation.To(&obj);
2071 heap->CreateFillerObjectAt(obj->address(), size);
2072 return obj;
2073}
2074
2075
2076// Get new space allocation into the desired alignment.
2077static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2078 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2079 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2080 if (fill) {
2081 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2082 }
2083 return *top_addr;
2084}
2085
2086
2087TEST(TestAlignedAllocation) {
2088 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2089 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2090 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2091 Address start;
2092 HeapObject* obj;
2093 HeapObject* filler;
2094 if (double_misalignment) {
2095 // Allocate a pointer sized object that must be double aligned at an
2096 // aligned address.
2097 start = AlignNewSpace(kDoubleAligned, 0);
2098 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2099 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2100 // There is no filler.
2101 CHECK_EQ(kPointerSize, *top_addr - start);
2102
2103 // Allocate a second pointer sized object that must be double aligned at an
2104 // unaligned address.
2105 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2106 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2107 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2108 // There is a filler object before the object.
2109 filler = HeapObject::FromAddress(start);
2110 CHECK(obj != filler && filler->IsFiller() &&
2111 filler->Size() == kPointerSize);
2112 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2113
2114 // Similarly for kDoubleUnaligned.
2115 start = AlignNewSpace(kDoubleUnaligned, 0);
2116 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2117 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2118 CHECK_EQ(kPointerSize, *top_addr - start);
2119 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2120 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2121 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2122 // There is a filler object before the object.
2123 filler = HeapObject::FromAddress(start);
2124 CHECK(obj != filler && filler->IsFiller() &&
2125 filler->Size() == kPointerSize);
2126 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2127 }
2128
2129 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2130 // on platform.
2131 start = AlignNewSpace(kSimd128Unaligned, 0);
2132 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2133 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2134 // There is no filler.
2135 CHECK_EQ(kPointerSize, *top_addr - start);
2136 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2137 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2138 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2139 // There is a filler object before the object.
2140 filler = HeapObject::FromAddress(start);
2141 CHECK(obj != filler && filler->IsFiller() &&
2142 filler->Size() == kSimd128Size - kPointerSize);
2143 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2144
2145 if (double_misalignment) {
2146 // Test the 2 other alignments possible on 32 bit platforms.
2147 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2148 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2149 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2150 // There is a filler object before the object.
2151 filler = HeapObject::FromAddress(start);
2152 CHECK(obj != filler && filler->IsFiller() &&
2153 filler->Size() == 2 * kPointerSize);
2154 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2155 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2156 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2157 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2158 // There is a filler object before the object.
2159 filler = HeapObject::FromAddress(start);
2160 CHECK(obj != filler && filler->IsFiller() &&
2161 filler->Size() == kPointerSize);
2162 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2163 }
2164}
2165
2166
2167static HeapObject* OldSpaceAllocateAligned(int size,
2168 AllocationAlignment alignment) {
2169 Heap* heap = CcTest::heap();
2170 AllocationResult allocation =
2171 heap->old_space()->AllocateRawAligned(size, alignment);
2172 HeapObject* obj = NULL;
2173 allocation.To(&obj);
2174 heap->CreateFillerObjectAt(obj->address(), size);
2175 return obj;
2176}
2177
2178
2179// Get old space allocation into the desired alignment.
2180static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2181 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2182 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2183 int allocation = fill + offset;
2184 if (allocation) {
2185 OldSpaceAllocateAligned(allocation, kWordAligned);
2186 }
2187 Address top = *top_addr;
2188 // Now force the remaining allocation onto the free list.
2189 CcTest::heap()->old_space()->EmptyAllocationInfo();
2190 return top;
2191}
2192
2193
2194// Test the case where allocation must be done from the free list, so filler
2195// may precede or follow the object.
2196TEST(TestAlignedOverAllocation) {
2197 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2198 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2199 Address start;
2200 HeapObject* obj;
2201 HeapObject* filler1;
2202 HeapObject* filler2;
2203 if (double_misalignment) {
2204 start = AlignOldSpace(kDoubleAligned, 0);
2205 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2206 // The object is aligned, and a filler object is created after.
2207 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2208 filler1 = HeapObject::FromAddress(start + kPointerSize);
2209 CHECK(obj != filler1 && filler1->IsFiller() &&
2210 filler1->Size() == kPointerSize);
2211 // Try the opposite alignment case.
2212 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2213 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2214 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2215 filler1 = HeapObject::FromAddress(start);
2216 CHECK(obj != filler1);
2217 CHECK(filler1->IsFiller());
2218 CHECK(filler1->Size() == kPointerSize);
2219 CHECK(obj != filler1 && filler1->IsFiller() &&
2220 filler1->Size() == kPointerSize);
2221
2222 // Similarly for kDoubleUnaligned.
2223 start = AlignOldSpace(kDoubleUnaligned, 0);
2224 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2225 // The object is aligned, and a filler object is created after.
2226 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2227 filler1 = HeapObject::FromAddress(start + kPointerSize);
2228 CHECK(obj != filler1 && filler1->IsFiller() &&
2229 filler1->Size() == kPointerSize);
2230 // Try the opposite alignment case.
2231 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2232 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2233 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2234 filler1 = HeapObject::FromAddress(start);
2235 CHECK(obj != filler1 && filler1->IsFiller() &&
2236 filler1->Size() == kPointerSize);
2237 }
2238
2239 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2240 // on platform.
2241 start = AlignOldSpace(kSimd128Unaligned, 0);
2242 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2243 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2244 // There is a filler object after the object.
2245 filler1 = HeapObject::FromAddress(start + kPointerSize);
2246 CHECK(obj != filler1 && filler1->IsFiller() &&
2247 filler1->Size() == kSimd128Size - kPointerSize);
2248 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2249 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2250 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2251 // There is a filler object before the object.
2252 filler1 = HeapObject::FromAddress(start);
2253 CHECK(obj != filler1 && filler1->IsFiller() &&
2254 filler1->Size() == kSimd128Size - kPointerSize);
2255
2256 if (double_misalignment) {
2257 // Test the 2 other alignments possible on 32 bit platforms.
2258 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2259 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2260 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2261 // There are filler objects before and after the object.
2262 filler1 = HeapObject::FromAddress(start);
2263 CHECK(obj != filler1 && filler1->IsFiller() &&
2264 filler1->Size() == 2 * kPointerSize);
2265 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2266 CHECK(obj != filler2 && filler2->IsFiller() &&
2267 filler2->Size() == kPointerSize);
2268 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2269 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2270 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2271 // There are filler objects before and after the object.
2272 filler1 = HeapObject::FromAddress(start);
2273 CHECK(obj != filler1 && filler1->IsFiller() &&
2274 filler1->Size() == kPointerSize);
2275 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2276 CHECK(obj != filler2 && filler2->IsFiller() &&
2277 filler2->Size() == 2 * kPointerSize);
2278 }
2279}
2280
2281
2282TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2283 CcTest::InitializeVM();
2284 HeapIterator iterator(CcTest::heap());
2285 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2286 intptr_t size_of_objects_2 = 0;
2287 for (HeapObject* obj = iterator.next();
2288 obj != NULL;
2289 obj = iterator.next()) {
2290 if (!obj->IsFreeSpace()) {
2291 size_of_objects_2 += obj->Size();
2292 }
2293 }
2294 // Delta must be within 5% of the larger result.
2295 // TODO(gc): Tighten this up by distinguishing between byte
2296 // arrays that are real and those that merely mark free space
2297 // on the heap.
2298 if (size_of_objects_1 > size_of_objects_2) {
2299 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2300 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2301 "Iterator: %" V8_PTR_PREFIX "d, "
2302 "delta: %" V8_PTR_PREFIX "d\n",
2303 size_of_objects_1, size_of_objects_2, delta);
2304 CHECK_GT(size_of_objects_1 / 20, delta);
2305 } else {
2306 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2307 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2308 "Iterator: %" V8_PTR_PREFIX "d, "
2309 "delta: %" V8_PTR_PREFIX "d\n",
2310 size_of_objects_1, size_of_objects_2, delta);
2311 CHECK_GT(size_of_objects_2 / 20, delta);
2312 }
2313}
2314
2315
2316static void FillUpNewSpace(NewSpace* new_space) {
2317 // Fill up new space to the point that it is completely full. Make sure
2318 // that the scavenger does not undo the filling.
2319 Heap* heap = new_space->heap();
2320 Isolate* isolate = heap->isolate();
2321 Factory* factory = isolate->factory();
2322 HandleScope scope(isolate);
2323 AlwaysAllocateScope always_allocate(isolate);
2324 intptr_t available = new_space->Capacity() - new_space->Size();
2325 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2326 for (intptr_t i = 0; i < number_of_fillers; i++) {
2327 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2328 }
2329}
2330
2331
2332TEST(GrowAndShrinkNewSpace) {
2333 CcTest::InitializeVM();
2334 Heap* heap = CcTest::heap();
2335 NewSpace* new_space = heap->new_space();
2336
2337 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2338 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2339 // The max size cannot exceed the reserved size, since semispaces must be
2340 // always within the reserved space. We can't test new space growing and
2341 // shrinking if the reserved size is the same as the minimum (initial) size.
2342 return;
2343 }
2344
2345 // Explicitly growing should double the space capacity.
2346 intptr_t old_capacity, new_capacity;
2347 old_capacity = new_space->TotalCapacity();
2348 new_space->Grow();
2349 new_capacity = new_space->TotalCapacity();
2350 CHECK(2 * old_capacity == new_capacity);
2351
2352 old_capacity = new_space->TotalCapacity();
2353 FillUpNewSpace(new_space);
2354 new_capacity = new_space->TotalCapacity();
2355 CHECK(old_capacity == new_capacity);
2356
2357 // Explicitly shrinking should not affect space capacity.
2358 old_capacity = new_space->TotalCapacity();
2359 new_space->Shrink();
2360 new_capacity = new_space->TotalCapacity();
2361 CHECK(old_capacity == new_capacity);
2362
2363 // Let the scavenger empty the new space.
2364 heap->CollectGarbage(NEW_SPACE);
2365 CHECK_LE(new_space->Size(), old_capacity);
2366
2367 // Explicitly shrinking should halve the space capacity.
2368 old_capacity = new_space->TotalCapacity();
2369 new_space->Shrink();
2370 new_capacity = new_space->TotalCapacity();
2371 CHECK(old_capacity == 2 * new_capacity);
2372
2373 // Consecutive shrinking should not affect space capacity.
2374 old_capacity = new_space->TotalCapacity();
2375 new_space->Shrink();
2376 new_space->Shrink();
2377 new_space->Shrink();
2378 new_capacity = new_space->TotalCapacity();
2379 CHECK(old_capacity == new_capacity);
2380}
2381
2382
2383TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2384 CcTest::InitializeVM();
2385 Heap* heap = CcTest::heap();
2386 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2387 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2388 // The max size cannot exceed the reserved size, since semispaces must be
2389 // always within the reserved space. We can't test new space growing and
2390 // shrinking if the reserved size is the same as the minimum (initial) size.
2391 return;
2392 }
2393
2394 v8::HandleScope scope(CcTest::isolate());
2395 NewSpace* new_space = heap->new_space();
2396 intptr_t old_capacity, new_capacity;
2397 old_capacity = new_space->TotalCapacity();
2398 new_space->Grow();
2399 new_capacity = new_space->TotalCapacity();
2400 CHECK(2 * old_capacity == new_capacity);
2401 FillUpNewSpace(new_space);
2402 heap->CollectAllAvailableGarbage();
2403 new_capacity = new_space->TotalCapacity();
2404 CHECK(old_capacity == new_capacity);
2405}
2406
2407
2408static int NumberOfGlobalObjects() {
2409 int count = 0;
2410 HeapIterator iterator(CcTest::heap());
2411 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2412 if (obj->IsJSGlobalObject()) count++;
2413 }
2414 return count;
2415}
2416
2417
2418// Test that we don't embed maps from foreign contexts into
2419// optimized code.
2420TEST(LeakNativeContextViaMap) {
2421 i::FLAG_allow_natives_syntax = true;
2422 v8::Isolate* isolate = CcTest::isolate();
2423 v8::HandleScope outer_scope(isolate);
2424 v8::Persistent<v8::Context> ctx1p;
2425 v8::Persistent<v8::Context> ctx2p;
2426 {
2427 v8::HandleScope scope(isolate);
2428 ctx1p.Reset(isolate, v8::Context::New(isolate));
2429 ctx2p.Reset(isolate, v8::Context::New(isolate));
2430 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2431 }
2432
2433 CcTest::heap()->CollectAllAvailableGarbage();
2434 CHECK_EQ(2, NumberOfGlobalObjects());
2435
2436 {
2437 v8::HandleScope inner_scope(isolate);
2438 CompileRun("var v = {x: 42}");
2439 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2440 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2441 v8::Local<v8::Value> v =
2442 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2443 ctx2->Enter();
2444 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2445 v8::Local<v8::Value> res = CompileRun(
2446 "function f() { return o.x; }"
2447 "for (var i = 0; i < 10; ++i) f();"
2448 "%OptimizeFunctionOnNextCall(f);"
2449 "f();");
2450 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2451 CHECK(ctx2->Global()
2452 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2453 .FromJust());
2454 ctx2->Exit();
2455 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2456 ctx1p.Reset();
2457 isolate->ContextDisposedNotification();
2458 }
2459 CcTest::heap()->CollectAllAvailableGarbage();
2460 CHECK_EQ(1, NumberOfGlobalObjects());
2461 ctx2p.Reset();
2462 CcTest::heap()->CollectAllAvailableGarbage();
2463 CHECK_EQ(0, NumberOfGlobalObjects());
2464}
2465
2466
2467// Test that we don't embed functions from foreign contexts into
2468// optimized code.
2469TEST(LeakNativeContextViaFunction) {
2470 i::FLAG_allow_natives_syntax = true;
2471 v8::Isolate* isolate = CcTest::isolate();
2472 v8::HandleScope outer_scope(isolate);
2473 v8::Persistent<v8::Context> ctx1p;
2474 v8::Persistent<v8::Context> ctx2p;
2475 {
2476 v8::HandleScope scope(isolate);
2477 ctx1p.Reset(isolate, v8::Context::New(isolate));
2478 ctx2p.Reset(isolate, v8::Context::New(isolate));
2479 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2480 }
2481
2482 CcTest::heap()->CollectAllAvailableGarbage();
2483 CHECK_EQ(2, NumberOfGlobalObjects());
2484
2485 {
2486 v8::HandleScope inner_scope(isolate);
2487 CompileRun("var v = function() { return 42; }");
2488 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2489 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2490 v8::Local<v8::Value> v =
2491 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2492 ctx2->Enter();
2493 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2494 v8::Local<v8::Value> res = CompileRun(
2495 "function f(x) { return x(); }"
2496 "for (var i = 0; i < 10; ++i) f(o);"
2497 "%OptimizeFunctionOnNextCall(f);"
2498 "f(o);");
2499 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2500 CHECK(ctx2->Global()
2501 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2502 .FromJust());
2503 ctx2->Exit();
2504 ctx1->Exit();
2505 ctx1p.Reset();
2506 isolate->ContextDisposedNotification();
2507 }
2508 CcTest::heap()->CollectAllAvailableGarbage();
2509 CHECK_EQ(1, NumberOfGlobalObjects());
2510 ctx2p.Reset();
2511 CcTest::heap()->CollectAllAvailableGarbage();
2512 CHECK_EQ(0, NumberOfGlobalObjects());
2513}
2514
2515
2516TEST(LeakNativeContextViaMapKeyed) {
2517 i::FLAG_allow_natives_syntax = true;
2518 v8::Isolate* isolate = CcTest::isolate();
2519 v8::HandleScope outer_scope(isolate);
2520 v8::Persistent<v8::Context> ctx1p;
2521 v8::Persistent<v8::Context> ctx2p;
2522 {
2523 v8::HandleScope scope(isolate);
2524 ctx1p.Reset(isolate, v8::Context::New(isolate));
2525 ctx2p.Reset(isolate, v8::Context::New(isolate));
2526 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2527 }
2528
2529 CcTest::heap()->CollectAllAvailableGarbage();
2530 CHECK_EQ(2, NumberOfGlobalObjects());
2531
2532 {
2533 v8::HandleScope inner_scope(isolate);
2534 CompileRun("var v = [42, 43]");
2535 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2536 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2537 v8::Local<v8::Value> v =
2538 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2539 ctx2->Enter();
2540 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2541 v8::Local<v8::Value> res = CompileRun(
2542 "function f() { return o[0]; }"
2543 "for (var i = 0; i < 10; ++i) f();"
2544 "%OptimizeFunctionOnNextCall(f);"
2545 "f();");
2546 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2547 CHECK(ctx2->Global()
2548 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2549 .FromJust());
2550 ctx2->Exit();
2551 ctx1->Exit();
2552 ctx1p.Reset();
2553 isolate->ContextDisposedNotification();
2554 }
2555 CcTest::heap()->CollectAllAvailableGarbage();
2556 CHECK_EQ(1, NumberOfGlobalObjects());
2557 ctx2p.Reset();
2558 CcTest::heap()->CollectAllAvailableGarbage();
2559 CHECK_EQ(0, NumberOfGlobalObjects());
2560}
2561
2562
2563TEST(LeakNativeContextViaMapProto) {
2564 i::FLAG_allow_natives_syntax = true;
2565 v8::Isolate* isolate = CcTest::isolate();
2566 v8::HandleScope outer_scope(isolate);
2567 v8::Persistent<v8::Context> ctx1p;
2568 v8::Persistent<v8::Context> ctx2p;
2569 {
2570 v8::HandleScope scope(isolate);
2571 ctx1p.Reset(isolate, v8::Context::New(isolate));
2572 ctx2p.Reset(isolate, v8::Context::New(isolate));
2573 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2574 }
2575
2576 CcTest::heap()->CollectAllAvailableGarbage();
2577 CHECK_EQ(2, NumberOfGlobalObjects());
2578
2579 {
2580 v8::HandleScope inner_scope(isolate);
2581 CompileRun("var v = { y: 42}");
2582 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2583 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2584 v8::Local<v8::Value> v =
2585 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2586 ctx2->Enter();
2587 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2588 v8::Local<v8::Value> res = CompileRun(
2589 "function f() {"
2590 " var p = {x: 42};"
2591 " p.__proto__ = o;"
2592 " return p.x;"
2593 "}"
2594 "for (var i = 0; i < 10; ++i) f();"
2595 "%OptimizeFunctionOnNextCall(f);"
2596 "f();");
2597 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2598 CHECK(ctx2->Global()
2599 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2600 .FromJust());
2601 ctx2->Exit();
2602 ctx1->Exit();
2603 ctx1p.Reset();
2604 isolate->ContextDisposedNotification();
2605 }
2606 CcTest::heap()->CollectAllAvailableGarbage();
2607 CHECK_EQ(1, NumberOfGlobalObjects());
2608 ctx2p.Reset();
2609 CcTest::heap()->CollectAllAvailableGarbage();
2610 CHECK_EQ(0, NumberOfGlobalObjects());
2611}
2612
2613
2614TEST(InstanceOfStubWriteBarrier) {
2615 i::FLAG_allow_natives_syntax = true;
2616#ifdef VERIFY_HEAP
2617 i::FLAG_verify_heap = true;
2618#endif
2619
2620 CcTest::InitializeVM();
2621 if (!CcTest::i_isolate()->use_crankshaft()) return;
2622 if (i::FLAG_force_marking_deque_overflows) return;
2623 v8::HandleScope outer_scope(CcTest::isolate());
2624 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2625
2626 {
2627 v8::HandleScope scope(CcTest::isolate());
2628 CompileRun(
2629 "function foo () { }"
2630 "function mkbar () { return new (new Function(\"\")) (); }"
2631 "function f (x) { return (x instanceof foo); }"
2632 "function g () { f(mkbar()); }"
2633 "f(new foo()); f(new foo());"
2634 "%OptimizeFunctionOnNextCall(f);"
2635 "f(new foo()); g();");
2636 }
2637
2638 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2639 marking->Stop();
2640 CcTest::heap()->StartIncrementalMarking();
2641
2642 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2643 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2644 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2645
2646 CHECK(f->IsOptimized());
2647
2648 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2649 !marking->IsStopped()) {
2650 // Discard any pending GC requests otherwise we will get GC when we enter
2651 // code below.
2652 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2653 }
2654
2655 CHECK(marking->IsMarking());
2656
2657 {
2658 v8::HandleScope scope(CcTest::isolate());
2659 v8::Local<v8::Object> global = CcTest::global();
2660 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2661 global->Get(ctx, v8_str("g")).ToLocalChecked());
2662 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2663 }
2664
2665 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2666 CcTest::heap()->CollectGarbage(OLD_SPACE);
2667}
2668
2669
2670TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2671 i::FLAG_stress_compaction = false;
2672 i::FLAG_allow_natives_syntax = true;
2673#ifdef VERIFY_HEAP
2674 i::FLAG_verify_heap = true;
2675#endif
2676
2677 CcTest::InitializeVM();
2678 if (!CcTest::i_isolate()->use_crankshaft()) return;
2679 v8::HandleScope outer_scope(CcTest::isolate());
2680 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2681
2682 {
2683 v8::HandleScope scope(CcTest::isolate());
2684 CompileRun(
2685 "function f () {"
2686 " var s = 0;"
2687 " for (var i = 0; i < 100; i++) s += i;"
2688 " return s;"
2689 "}"
2690 "f(); f();"
2691 "%OptimizeFunctionOnNextCall(f);"
2692 "f();");
2693 }
2694 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2695 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2696 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2697 CHECK(f->IsOptimized());
2698
2699 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2700 marking->Stop();
2701 CcTest::heap()->StartIncrementalMarking();
2702 // The following calls will increment CcTest::heap()->global_ic_age().
2703 CcTest::isolate()->ContextDisposedNotification();
2704 SimulateIncrementalMarking(CcTest::heap());
2705 CcTest::heap()->CollectAllGarbage();
2706 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2707 CHECK_EQ(0, f->shared()->opt_count());
2708 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2709}
2710
2711
2712TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2713 i::FLAG_stress_compaction = false;
2714 i::FLAG_allow_natives_syntax = true;
2715#ifdef VERIFY_HEAP
2716 i::FLAG_verify_heap = true;
2717#endif
2718
2719 CcTest::InitializeVM();
2720 if (!CcTest::i_isolate()->use_crankshaft()) return;
2721 v8::HandleScope outer_scope(CcTest::isolate());
2722 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2723
2724 {
2725 v8::HandleScope scope(CcTest::isolate());
2726 CompileRun(
2727 "function f () {"
2728 " var s = 0;"
2729 " for (var i = 0; i < 100; i++) s += i;"
2730 " return s;"
2731 "}"
2732 "f(); f();"
2733 "%OptimizeFunctionOnNextCall(f);"
2734 "f();");
2735 }
2736 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2737 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2738 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2739
2740 CHECK(f->IsOptimized());
2741
2742 CcTest::heap()->incremental_marking()->Stop();
2743
2744 // The following two calls will increment CcTest::heap()->global_ic_age().
2745 CcTest::isolate()->ContextDisposedNotification();
2746 CcTest::heap()->CollectAllGarbage();
2747
2748 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2749 CHECK_EQ(0, f->shared()->opt_count());
2750 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2751}
2752
2753
2754HEAP_TEST(GCFlags) {
2755 CcTest::InitializeVM();
2756 Heap* heap = CcTest::heap();
2757
2758 heap->set_current_gc_flags(Heap::kNoGCFlags);
2759 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2760
2761 // Set the flags to check whether we appropriately resets them after the GC.
2762 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2763 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2764 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2765
2766 MarkCompactCollector* collector = heap->mark_compact_collector();
2767 if (collector->sweeping_in_progress()) {
2768 collector->EnsureSweepingCompleted();
2769 }
2770
2771 IncrementalMarking* marking = heap->incremental_marking();
2772 marking->Stop();
2773 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2774 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2775
2776 heap->CollectGarbage(NEW_SPACE);
2777 // NewSpace scavenges should not overwrite the flags.
2778 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2779
2780 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2781 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2782}
2783
2784
2785TEST(IdleNotificationFinishMarking) {
2786 i::FLAG_allow_natives_syntax = true;
2787 CcTest::InitializeVM();
2788 SimulateFullSpace(CcTest::heap()->old_space());
2789 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2790 marking->Stop();
2791 CcTest::heap()->StartIncrementalMarking();
2792
2793 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2794
2795 // TODO(hpayer): We cannot write proper unit test right now for heap.
2796 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2797 // marking delay counter.
2798
2799 // Perform a huge incremental marking step but don't complete marking.
2800 intptr_t bytes_processed = 0;
2801 do {
2802 bytes_processed =
2803 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2804 IncrementalMarking::FORCE_MARKING,
2805 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2806 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2807 } while (bytes_processed);
2808
2809 // The next invocations of incremental marking are not going to complete
2810 // marking
2811 // since the completion threshold is not reached
2812 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2813 i++) {
2814 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2815 IncrementalMarking::FORCE_MARKING,
2816 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2817 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2818 }
2819
2820 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2821
2822 // The next idle notification has to finish incremental marking.
2823 const double kLongIdleTime = 1000.0;
2824 CcTest::isolate()->IdleNotificationDeadline(
2825 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2826 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2827 kLongIdleTime);
2828 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2829}
2830
2831
2832// Test that HAllocateObject will always return an object in new-space.
2833TEST(OptimizedAllocationAlwaysInNewSpace) {
2834 i::FLAG_allow_natives_syntax = true;
2835 CcTest::InitializeVM();
2836 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2837 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2838 v8::HandleScope scope(CcTest::isolate());
2839 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2840 SimulateFullSpace(CcTest::heap()->new_space());
2841 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2842 v8::Local<v8::Value> res = CompileRun(
2843 "function c(x) {"
2844 " this.x = x;"
2845 " for (var i = 0; i < 32; i++) {"
2846 " this['x' + i] = x;"
2847 " }"
2848 "}"
2849 "function f(x) { return new c(x); };"
2850 "f(1); f(2); f(3);"
2851 "%OptimizeFunctionOnNextCall(f);"
2852 "f(4);");
2853
2854 CHECK_EQ(4, res.As<v8::Object>()
2855 ->GetRealNamedProperty(ctx, v8_str("x"))
2856 .ToLocalChecked()
2857 ->Int32Value(ctx)
2858 .FromJust());
2859
2860 i::Handle<JSReceiver> o =
2861 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2862
2863 CHECK(CcTest::heap()->InNewSpace(*o));
2864}
2865
2866
2867TEST(OptimizedPretenuringAllocationFolding) {
2868 i::FLAG_allow_natives_syntax = true;
2869 i::FLAG_expose_gc = true;
2870 CcTest::InitializeVM();
2871 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2872 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2873 v8::HandleScope scope(CcTest::isolate());
2874 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2875 // Grow new space unitl maximum capacity reached.
2876 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2877 CcTest::heap()->new_space()->Grow();
2878 }
2879
2880 i::ScopedVector<char> source(1024);
2881 i::SNPrintF(
2882 source,
2883 "var number_elements = %d;"
2884 "var elements = new Array();"
2885 "function f() {"
2886 " for (var i = 0; i < number_elements; i++) {"
2887 " elements[i] = [[{}], [1.1]];"
2888 " }"
2889 " return elements[number_elements-1]"
2890 "};"
2891 "f(); gc();"
2892 "f(); f();"
2893 "%%OptimizeFunctionOnNextCall(f);"
2894 "f();",
2895 AllocationSite::kPretenureMinimumCreated);
2896
2897 v8::Local<v8::Value> res = CompileRun(source.start());
2898
2899 v8::Local<v8::Value> int_array =
2900 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2901 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2902 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2903 v8::Local<v8::Value> double_array =
2904 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2905 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2906 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2907
2908 i::Handle<JSReceiver> o =
2909 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2910 CHECK(CcTest::heap()->InOldSpace(*o));
2911 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2912 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2913 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2914 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2915}
2916
2917
2918TEST(OptimizedPretenuringObjectArrayLiterals) {
2919 i::FLAG_allow_natives_syntax = true;
2920 i::FLAG_expose_gc = true;
2921 CcTest::InitializeVM();
2922 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2923 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2924 v8::HandleScope scope(CcTest::isolate());
2925
2926 // Grow new space unitl maximum capacity reached.
2927 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2928 CcTest::heap()->new_space()->Grow();
2929 }
2930
2931 i::ScopedVector<char> source(1024);
2932 i::SNPrintF(
2933 source,
2934 "var number_elements = %d;"
2935 "var elements = new Array(number_elements);"
2936 "function f() {"
2937 " for (var i = 0; i < number_elements; i++) {"
2938 " elements[i] = [{}, {}, {}];"
2939 " }"
2940 " return elements[number_elements - 1];"
2941 "};"
2942 "f(); gc();"
2943 "f(); f();"
2944 "%%OptimizeFunctionOnNextCall(f);"
2945 "f();",
2946 AllocationSite::kPretenureMinimumCreated);
2947
2948 v8::Local<v8::Value> res = CompileRun(source.start());
2949
2950 i::Handle<JSObject> o = Handle<JSObject>::cast(
2951 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2952
2953 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2954 CHECK(CcTest::heap()->InOldSpace(*o));
2955}
2956
2957
2958TEST(OptimizedPretenuringMixedInObjectProperties) {
2959 i::FLAG_allow_natives_syntax = true;
2960 i::FLAG_expose_gc = true;
2961 CcTest::InitializeVM();
2962 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2963 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2964 v8::HandleScope scope(CcTest::isolate());
2965
2966 // Grow new space unitl maximum capacity reached.
2967 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2968 CcTest::heap()->new_space()->Grow();
2969 }
2970
2971
2972 i::ScopedVector<char> source(1024);
2973 i::SNPrintF(
2974 source,
2975 "var number_elements = %d;"
2976 "var elements = new Array(number_elements);"
2977 "function f() {"
2978 " for (var i = 0; i < number_elements; i++) {"
2979 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2980 " }"
2981 " return elements[number_elements - 1];"
2982 "};"
2983 "f(); gc();"
2984 "f(); f();"
2985 "%%OptimizeFunctionOnNextCall(f);"
2986 "f();",
2987 AllocationSite::kPretenureMinimumCreated);
2988
2989 v8::Local<v8::Value> res = CompileRun(source.start());
2990
2991 i::Handle<JSObject> o = Handle<JSObject>::cast(
2992 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2993
2994 CHECK(CcTest::heap()->InOldSpace(*o));
2995 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2996 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2997 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2998 if (!o->IsUnboxedDoubleField(idx2)) {
2999 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
3000 } else {
3001 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
3002 }
3003
3004 JSObject* inner_object =
3005 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
3006 CHECK(CcTest::heap()->InOldSpace(inner_object));
3007 if (!inner_object->IsUnboxedDoubleField(idx1)) {
3008 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
3009 } else {
3010 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
3011 }
3012 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
3013}
3014
3015
3016TEST(OptimizedPretenuringDoubleArrayProperties) {
3017 i::FLAG_allow_natives_syntax = true;
3018 i::FLAG_expose_gc = true;
3019 CcTest::InitializeVM();
3020 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3021 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3022 v8::HandleScope scope(CcTest::isolate());
3023
3024 // Grow new space unitl maximum capacity reached.
3025 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3026 CcTest::heap()->new_space()->Grow();
3027 }
3028
3029 i::ScopedVector<char> source(1024);
3030 i::SNPrintF(
3031 source,
3032 "var number_elements = %d;"
3033 "var elements = new Array(number_elements);"
3034 "function f() {"
3035 " for (var i = 0; i < number_elements; i++) {"
3036 " elements[i] = {a: 1.1, b: 2.2};"
3037 " }"
3038 " return elements[i - 1];"
3039 "};"
3040 "f(); gc();"
3041 "f(); f();"
3042 "%%OptimizeFunctionOnNextCall(f);"
3043 "f();",
3044 AllocationSite::kPretenureMinimumCreated);
3045
3046 v8::Local<v8::Value> res = CompileRun(source.start());
3047
3048 i::Handle<JSObject> o = Handle<JSObject>::cast(
3049 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3050
3051 CHECK(CcTest::heap()->InOldSpace(*o));
3052 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3053}
3054
3055
3056TEST(OptimizedPretenuringdoubleArrayLiterals) {
3057 i::FLAG_allow_natives_syntax = true;
3058 i::FLAG_expose_gc = true;
3059 CcTest::InitializeVM();
3060 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3061 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3062 v8::HandleScope scope(CcTest::isolate());
3063
3064 // Grow new space unitl maximum capacity reached.
3065 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3066 CcTest::heap()->new_space()->Grow();
3067 }
3068
3069 i::ScopedVector<char> source(1024);
3070 i::SNPrintF(
3071 source,
3072 "var number_elements = %d;"
3073 "var elements = new Array(number_elements);"
3074 "function f() {"
3075 " for (var i = 0; i < number_elements; i++) {"
3076 " elements[i] = [1.1, 2.2, 3.3];"
3077 " }"
3078 " return elements[number_elements - 1];"
3079 "};"
3080 "f(); gc();"
3081 "f(); f();"
3082 "%%OptimizeFunctionOnNextCall(f);"
3083 "f();",
3084 AllocationSite::kPretenureMinimumCreated);
3085
3086 v8::Local<v8::Value> res = CompileRun(source.start());
3087
3088 i::Handle<JSObject> o = Handle<JSObject>::cast(
3089 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3090
3091 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3092 CHECK(CcTest::heap()->InOldSpace(*o));
3093}
3094
3095
3096TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3097 i::FLAG_allow_natives_syntax = true;
3098 i::FLAG_expose_gc = true;
3099 CcTest::InitializeVM();
3100 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3101 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3102 v8::HandleScope scope(CcTest::isolate());
3103 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3104 // Grow new space unitl maximum capacity reached.
3105 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3106 CcTest::heap()->new_space()->Grow();
3107 }
3108
3109 i::ScopedVector<char> source(1024);
3110 i::SNPrintF(
3111 source,
3112 "var number_elements = 100;"
3113 "var elements = new Array(number_elements);"
3114 "function f() {"
3115 " for (var i = 0; i < number_elements; i++) {"
3116 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3117 " }"
3118 " return elements[number_elements - 1];"
3119 "};"
3120 "f(); gc();"
3121 "f(); f();"
3122 "%%OptimizeFunctionOnNextCall(f);"
3123 "f();");
3124
3125 v8::Local<v8::Value> res = CompileRun(source.start());
3126
3127 v8::Local<v8::Value> int_array =
3128 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3129 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
3130 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
3131 v8::Local<v8::Value> double_array =
3132 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3133 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
3134 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
3135
3136 Handle<JSObject> o = Handle<JSObject>::cast(
3137 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3138 CHECK(CcTest::heap()->InOldSpace(*o));
3139 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3140 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3141 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3142 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3143}
3144
3145
3146TEST(OptimizedPretenuringNestedObjectLiterals) {
3147 i::FLAG_allow_natives_syntax = true;
3148 i::FLAG_expose_gc = true;
3149 CcTest::InitializeVM();
3150 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3151 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3152 v8::HandleScope scope(CcTest::isolate());
3153 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3154 // Grow new space unitl maximum capacity reached.
3155 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3156 CcTest::heap()->new_space()->Grow();
3157 }
3158
3159 i::ScopedVector<char> source(1024);
3160 i::SNPrintF(
3161 source,
3162 "var number_elements = %d;"
3163 "var elements = new Array(number_elements);"
3164 "function f() {"
3165 " for (var i = 0; i < number_elements; i++) {"
3166 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3167 " }"
3168 " return elements[number_elements - 1];"
3169 "};"
3170 "f(); gc();"
3171 "f(); f();"
3172 "%%OptimizeFunctionOnNextCall(f);"
3173 "f();",
3174 AllocationSite::kPretenureMinimumCreated);
3175
3176 v8::Local<v8::Value> res = CompileRun(source.start());
3177
3178 v8::Local<v8::Value> int_array_1 =
3179 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3180 Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
3181 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
3182 v8::Local<v8::Value> int_array_2 =
3183 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3184 Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
3185 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
3186
3187 Handle<JSObject> o = Handle<JSObject>::cast(
3188 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3189 CHECK(CcTest::heap()->InOldSpace(*o));
3190 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3191 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3192 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3193 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3194}
3195
3196
3197TEST(OptimizedPretenuringNestedDoubleLiterals) {
3198 i::FLAG_allow_natives_syntax = true;
3199 i::FLAG_expose_gc = true;
3200 CcTest::InitializeVM();
3201 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3202 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3203 v8::HandleScope scope(CcTest::isolate());
3204 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3205 // Grow new space unitl maximum capacity reached.
3206 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3207 CcTest::heap()->new_space()->Grow();
3208 }
3209
3210 i::ScopedVector<char> source(1024);
3211 i::SNPrintF(
3212 source,
3213 "var number_elements = %d;"
3214 "var elements = new Array(number_elements);"
3215 "function f() {"
3216 " for (var i = 0; i < number_elements; i++) {"
3217 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3218 " }"
3219 " return elements[number_elements - 1];"
3220 "};"
3221 "f(); gc();"
3222 "f(); f();"
3223 "%%OptimizeFunctionOnNextCall(f);"
3224 "f();",
3225 AllocationSite::kPretenureMinimumCreated);
3226
3227 v8::Local<v8::Value> res = CompileRun(source.start());
3228
3229 v8::Local<v8::Value> double_array_1 =
3230 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3231 i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
3232 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
3233 v8::Local<v8::Value> double_array_2 =
3234 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3235 i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
3236 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
3237
3238 i::Handle<JSObject> o = Handle<JSObject>::cast(
3239 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3240 CHECK(CcTest::heap()->InOldSpace(*o));
3241 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3242 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3243 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3244 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3245}
3246
3247
3248// Test regular array literals allocation.
3249TEST(OptimizedAllocationArrayLiterals) {
3250 i::FLAG_allow_natives_syntax = true;
3251 CcTest::InitializeVM();
3252 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3253 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3254 v8::HandleScope scope(CcTest::isolate());
3255 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3256 v8::Local<v8::Value> res = CompileRun(
3257 "function f() {"
3258 " var numbers = new Array(1, 2, 3);"
3259 " numbers[0] = 3.14;"
3260 " return numbers;"
3261 "};"
3262 "f(); f(); f();"
3263 "%OptimizeFunctionOnNextCall(f);"
3264 "f();");
3265 CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
3266 ->Get(ctx, v8_str("0"))
3267 .ToLocalChecked()
3268 ->Int32Value(ctx)
3269 .FromJust());
3270
3271 i::Handle<JSObject> o = Handle<JSObject>::cast(
3272 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3273
3274 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3275}
3276
3277
3278static int CountMapTransitions(Map* map) {
3279 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3280}
3281
3282
3283// Test that map transitions are cleared and maps are collected with
3284// incremental marking as well.
3285TEST(Regress1465) {
3286 i::FLAG_stress_compaction = false;
3287 i::FLAG_allow_natives_syntax = true;
3288 i::FLAG_trace_incremental_marking = true;
3289 i::FLAG_retain_maps_for_n_gc = 0;
3290 CcTest::InitializeVM();
3291 v8::HandleScope scope(CcTest::isolate());
3292 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3293 static const int transitions_count = 256;
3294
3295 CompileRun("function F() {}");
3296 {
3297 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3298 for (int i = 0; i < transitions_count; i++) {
3299 EmbeddedVector<char, 64> buffer;
3300 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3301 CompileRun(buffer.start());
3302 }
3303 CompileRun("var root = new F;");
3304 }
3305
3306 i::Handle<JSReceiver> root =
3307 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3308 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3309
3310 // Count number of live transitions before marking.
3311 int transitions_before = CountMapTransitions(root->map());
3312 CompileRun("%DebugPrint(root);");
3313 CHECK_EQ(transitions_count, transitions_before);
3314
3315 SimulateIncrementalMarking(CcTest::heap());
3316 CcTest::heap()->CollectAllGarbage();
3317
3318 // Count number of live transitions after marking. Note that one transition
3319 // is left, because 'o' still holds an instance of one transition target.
3320 int transitions_after = CountMapTransitions(root->map());
3321 CompileRun("%DebugPrint(root);");
3322 CHECK_EQ(1, transitions_after);
3323}
3324
3325
3326#ifdef DEBUG
3327static void AddTransitions(int transitions_count) {
3328 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3329 for (int i = 0; i < transitions_count; i++) {
3330 EmbeddedVector<char, 64> buffer;
3331 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3332 CompileRun(buffer.start());
3333 }
3334}
3335
3336
3337static i::Handle<JSObject> GetByName(const char* name) {
3338 return i::Handle<JSObject>::cast(
3339 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3340 CcTest::global()
3341 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
3342 .ToLocalChecked())));
3343}
3344
3345
3346static void AddPropertyTo(
3347 int gc_count, Handle<JSObject> object, const char* property_name) {
3348 Isolate* isolate = CcTest::i_isolate();
3349 Factory* factory = isolate->factory();
3350 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3351 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3352 i::FLAG_gc_interval = gc_count;
3353 i::FLAG_gc_global = true;
3354 i::FLAG_retain_maps_for_n_gc = 0;
3355 CcTest::heap()->set_allocation_timeout(gc_count);
3356 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3357}
3358
3359
3360TEST(TransitionArrayShrinksDuringAllocToZero) {
3361 i::FLAG_stress_compaction = false;
3362 i::FLAG_allow_natives_syntax = true;
3363 CcTest::InitializeVM();
3364 v8::HandleScope scope(CcTest::isolate());
3365 static const int transitions_count = 10;
3366 CompileRun("function F() { }");
3367 AddTransitions(transitions_count);
3368 CompileRun("var root = new F;");
3369 Handle<JSObject> root = GetByName("root");
3370
3371 // Count number of live transitions before marking.
3372 int transitions_before = CountMapTransitions(root->map());
3373 CHECK_EQ(transitions_count, transitions_before);
3374
3375 // Get rid of o
3376 CompileRun("o = new F;"
3377 "root = new F");
3378 root = GetByName("root");
3379 AddPropertyTo(2, root, "funny");
3380 CcTest::heap()->CollectGarbage(NEW_SPACE);
3381
3382 // Count number of live transitions after marking. Note that one transition
3383 // is left, because 'o' still holds an instance of one transition target.
3384 int transitions_after = CountMapTransitions(
3385 Map::cast(root->map()->GetBackPointer()));
3386 CHECK_EQ(1, transitions_after);
3387}
3388
3389
3390TEST(TransitionArrayShrinksDuringAllocToOne) {
3391 i::FLAG_stress_compaction = false;
3392 i::FLAG_allow_natives_syntax = true;
3393 CcTest::InitializeVM();
3394 v8::HandleScope scope(CcTest::isolate());
3395 static const int transitions_count = 10;
3396 CompileRun("function F() {}");
3397 AddTransitions(transitions_count);
3398 CompileRun("var root = new F;");
3399 Handle<JSObject> root = GetByName("root");
3400
3401 // Count number of live transitions before marking.
3402 int transitions_before = CountMapTransitions(root->map());
3403 CHECK_EQ(transitions_count, transitions_before);
3404
3405 root = GetByName("root");
3406 AddPropertyTo(2, root, "funny");
3407 CcTest::heap()->CollectGarbage(NEW_SPACE);
3408
3409 // Count number of live transitions after marking. Note that one transition
3410 // is left, because 'o' still holds an instance of one transition target.
3411 int transitions_after = CountMapTransitions(
3412 Map::cast(root->map()->GetBackPointer()));
3413 CHECK_EQ(2, transitions_after);
3414}
3415
3416
3417TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3418 i::FLAG_stress_compaction = false;
3419 i::FLAG_allow_natives_syntax = true;
3420 CcTest::InitializeVM();
3421 v8::HandleScope scope(CcTest::isolate());
3422 static const int transitions_count = 10;
3423 CompileRun("function F() {}");
3424 AddTransitions(transitions_count);
3425 CompileRun("var root = new F;");
3426 Handle<JSObject> root = GetByName("root");
3427
3428 // Count number of live transitions before marking.
3429 int transitions_before = CountMapTransitions(root->map());
3430 CHECK_EQ(transitions_count, transitions_before);
3431
3432 root = GetByName("root");
3433 AddPropertyTo(0, root, "prop9");
3434 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3435
3436 // Count number of live transitions after marking. Note that one transition
3437 // is left, because 'o' still holds an instance of one transition target.
3438 int transitions_after = CountMapTransitions(
3439 Map::cast(root->map()->GetBackPointer()));
3440 CHECK_EQ(1, transitions_after);
3441}
3442
3443
3444TEST(TransitionArraySimpleToFull) {
3445 i::FLAG_stress_compaction = false;
3446 i::FLAG_allow_natives_syntax = true;
3447 CcTest::InitializeVM();
3448 v8::HandleScope scope(CcTest::isolate());
3449 static const int transitions_count = 1;
3450 CompileRun("function F() {}");
3451 AddTransitions(transitions_count);
3452 CompileRun("var root = new F;");
3453 Handle<JSObject> root = GetByName("root");
3454
3455 // Count number of live transitions before marking.
3456 int transitions_before = CountMapTransitions(root->map());
3457 CHECK_EQ(transitions_count, transitions_before);
3458
3459 CompileRun("o = new F;"
3460 "root = new F");
3461 root = GetByName("root");
3462 CHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3463 AddPropertyTo(2, root, "happy");
3464
3465 // Count number of live transitions after marking. Note that one transition
3466 // is left, because 'o' still holds an instance of one transition target.
3467 int transitions_after = CountMapTransitions(
3468 Map::cast(root->map()->GetBackPointer()));
3469 CHECK_EQ(1, transitions_after);
3470}
3471#endif // DEBUG
3472
3473
3474TEST(Regress2143a) {
3475 i::FLAG_incremental_marking = true;
3476 CcTest::InitializeVM();
3477 v8::HandleScope scope(CcTest::isolate());
3478
3479 // Prepare a map transition from the root object together with a yet
3480 // untransitioned root object.
3481 CompileRun("var root = new Object;"
3482 "root.foo = 0;"
3483 "root = new Object;");
3484
3485 SimulateIncrementalMarking(CcTest::heap());
3486
3487 // Compile a StoreIC that performs the prepared map transition. This
3488 // will restart incremental marking and should make sure the root is
3489 // marked grey again.
3490 CompileRun("function f(o) {"
3491 " o.foo = 0;"
3492 "}"
3493 "f(new Object);"
3494 "f(root);");
3495
3496 // This bug only triggers with aggressive IC clearing.
3497 CcTest::heap()->AgeInlineCaches();
3498
3499 // Explicitly request GC to perform final marking step and sweeping.
3500 CcTest::heap()->CollectAllGarbage();
3501
3502 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3503 CcTest::global()
3504 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3505 .ToLocalChecked()));
3506
3507 // The root object should be in a sane state.
3508 CHECK(root->IsJSObject());
3509 CHECK(root->map()->IsMap());
3510}
3511
3512
3513TEST(Regress2143b) {
3514 i::FLAG_incremental_marking = true;
3515 i::FLAG_allow_natives_syntax = true;
3516 CcTest::InitializeVM();
3517 v8::HandleScope scope(CcTest::isolate());
3518
3519 // Prepare a map transition from the root object together with a yet
3520 // untransitioned root object.
3521 CompileRun("var root = new Object;"
3522 "root.foo = 0;"
3523 "root = new Object;");
3524
3525 SimulateIncrementalMarking(CcTest::heap());
3526
3527 // Compile an optimized LStoreNamedField that performs the prepared
3528 // map transition. This will restart incremental marking and should
3529 // make sure the root is marked grey again.
3530 CompileRun("function f(o) {"
3531 " o.foo = 0;"
3532 "}"
3533 "f(new Object);"
3534 "f(new Object);"
3535 "%OptimizeFunctionOnNextCall(f);"
3536 "f(root);"
3537 "%DeoptimizeFunction(f);");
3538
3539 // This bug only triggers with aggressive IC clearing.
3540 CcTest::heap()->AgeInlineCaches();
3541
3542 // Explicitly request GC to perform final marking step and sweeping.
3543 CcTest::heap()->CollectAllGarbage();
3544
3545 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3546 CcTest::global()
3547 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3548 .ToLocalChecked()));
3549
3550 // The root object should be in a sane state.
3551 CHECK(root->IsJSObject());
3552 CHECK(root->map()->IsMap());
3553}
3554
3555
3556TEST(ReleaseOverReservedPages) {
3557 if (FLAG_never_compact) return;
3558 i::FLAG_trace_gc = true;
3559 // The optimizer can allocate stuff, messing up the test.
3560 i::FLAG_crankshaft = false;
3561 i::FLAG_always_opt = false;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003562 // Parallel compaction increases fragmentation, depending on how existing
3563 // memory is distributed. Since this is non-deterministic because of
3564 // concurrent sweeping, we disable it for this test.
3565 i::FLAG_parallel_compaction = false;
3566 // Concurrent sweeping adds non determinism, depending on when memory is
3567 // available for further reuse.
3568 i::FLAG_concurrent_sweeping = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003569 CcTest::InitializeVM();
3570 Isolate* isolate = CcTest::i_isolate();
3571 Factory* factory = isolate->factory();
3572 Heap* heap = isolate->heap();
3573 v8::HandleScope scope(CcTest::isolate());
3574 static const int number_of_test_pages = 20;
3575
3576 // Prepare many pages with low live-bytes count.
3577 PagedSpace* old_space = heap->old_space();
3578 CHECK_EQ(1, old_space->CountTotalPages());
3579 for (int i = 0; i < number_of_test_pages; i++) {
3580 AlwaysAllocateScope always_allocate(isolate);
3581 SimulateFullSpace(old_space);
3582 factory->NewFixedArray(1, TENURED);
3583 }
3584 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3585
3586 // Triggering one GC will cause a lot of garbage to be discovered but
3587 // even spread across all allocated pages.
3588 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3589 "triggered for preparation");
3590 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3591
3592 // Triggering subsequent GCs should cause at least half of the pages
3593 // to be released to the OS after at most two cycles.
3594 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3595 "triggered by test 1");
3596 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3597 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3598 "triggered by test 2");
3599 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3600
3601 // Triggering a last-resort GC should cause all pages to be released to the
3602 // OS so that other processes can seize the memory. If we get a failure here
3603 // where there are 2 pages left instead of 1, then we should increase the
3604 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3605 // first page should be small in order to reduce memory used when the VM
3606 // boots, but if the 20 small arrays don't fit on the first page then that's
3607 // an indication that it is too small.
3608 heap->CollectAllAvailableGarbage("triggered really hard");
3609 CHECK_EQ(1, old_space->CountTotalPages());
3610}
3611
3612static int forced_gc_counter = 0;
3613
3614void MockUseCounterCallback(v8::Isolate* isolate,
3615 v8::Isolate::UseCounterFeature feature) {
3616 isolate->GetCurrentContext();
3617 if (feature == v8::Isolate::kForcedGC) {
3618 forced_gc_counter++;
3619 }
3620}
3621
3622
3623TEST(CountForcedGC) {
3624 i::FLAG_expose_gc = true;
3625 CcTest::InitializeVM();
3626 Isolate* isolate = CcTest::i_isolate();
3627 v8::HandleScope scope(CcTest::isolate());
3628
3629 isolate->SetUseCounterCallback(MockUseCounterCallback);
3630
3631 forced_gc_counter = 0;
3632 const char* source = "gc();";
3633 CompileRun(source);
3634 CHECK_GT(forced_gc_counter, 0);
3635}
3636
3637
3638#ifdef OBJECT_PRINT
3639TEST(PrintSharedFunctionInfo) {
3640 CcTest::InitializeVM();
3641 v8::HandleScope scope(CcTest::isolate());
3642 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3643 const char* source = "f = function() { return 987654321; }\n"
3644 "g = function() { return 123456789; }\n";
3645 CompileRun(source);
3646 i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3647 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3648 CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3649
3650 OFStream os(stdout);
3651 g->shared()->Print(os);
3652 os << std::endl;
3653}
3654#endif // OBJECT_PRINT
3655
3656
3657TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3658 if (i::FLAG_always_opt) return;
3659 CcTest::InitializeVM();
3660 v8::HandleScope scope(CcTest::isolate());
3661 v8::Local<v8::Value> fun1, fun2;
3662 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3663 {
3664 CompileRun("function fun() {};");
3665 fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3666 }
3667
3668 {
3669 CompileRun("function fun() {};");
3670 fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3671 }
3672
3673 // Prepare function f that contains type feedback for the two closures.
3674 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3675 CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3676 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3677
3678 Handle<JSFunction> f = Handle<JSFunction>::cast(
3679 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3680 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3681
3682 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3683 FeedbackVectorHelper feedback_helper(feedback_vector);
3684
3685 int expected_slots = 2;
3686 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3687 int slot1 = 0;
3688 int slot2 = 1;
3689 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3690 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3691
3692 SimulateIncrementalMarking(CcTest::heap());
3693 CcTest::heap()->CollectAllGarbage();
3694
3695 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3696 ->cleared());
3697 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3698 ->cleared());
3699}
3700
3701
3702static Code* FindFirstIC(Code* code, Code::Kind kind) {
3703 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3704 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3705 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3706 RelocInfo* info = it.rinfo();
3707 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3708 if (target->is_inline_cache_stub() && target->kind() == kind) {
3709 return target;
3710 }
3711 }
3712 return NULL;
3713}
3714
3715
3716static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3717 InlineCacheState desired_state) {
3718 Handle<TypeFeedbackVector> vector =
3719 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3720 FeedbackVectorHelper helper(vector);
3721 FeedbackVectorSlot slot = helper.slot(slot_index);
3722 if (vector->GetKind(slot) == FeedbackVectorSlotKind::LOAD_IC) {
3723 LoadICNexus nexus(vector, slot);
3724 CHECK(nexus.StateFromFeedback() == desired_state);
3725 } else {
3726 CHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
3727 KeyedLoadICNexus nexus(vector, slot);
3728 CHECK(nexus.StateFromFeedback() == desired_state);
3729 }
3730}
3731
3732
3733static void CheckVectorICCleared(Handle<JSFunction> f, int slot_index) {
3734 Handle<TypeFeedbackVector> vector =
3735 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3736 FeedbackVectorSlot slot(slot_index);
3737 LoadICNexus nexus(vector, slot);
3738 CHECK(IC::IsCleared(&nexus));
3739}
3740
3741
3742TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3743 if (i::FLAG_always_opt) return;
3744 CcTest::InitializeVM();
3745 v8::HandleScope scope(CcTest::isolate());
3746 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3747 // Prepare function f that contains a monomorphic IC for object
3748 // originating from the same native context.
3749 CompileRun(
3750 "function fun() { this.x = 1; };"
3751 "function f(o) { return new o(); } f(fun); f(fun);");
3752 Handle<JSFunction> f = Handle<JSFunction>::cast(
3753 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3754 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3755
3756 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3757 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3758
3759 SimulateIncrementalMarking(CcTest::heap());
3760 CcTest::heap()->CollectAllGarbage();
3761
3762 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3763}
3764
3765
3766TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3767 if (i::FLAG_always_opt) return;
3768 CcTest::InitializeVM();
3769 Isolate* isolate = CcTest::i_isolate();
3770 v8::HandleScope scope(CcTest::isolate());
3771 v8::Local<v8::Value> fun1;
3772 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3773
3774 {
3775 LocalContext env;
3776 CompileRun("function fun() { this.x = 1; };");
3777 fun1 = env->Global()->Get(env.local(), v8_str("fun")).ToLocalChecked();
3778 }
3779
3780 // Prepare function f that contains a monomorphic constructor for object
3781 // originating from a different native context.
3782 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3783 CompileRun(
3784 "function fun() { this.x = 1; };"
3785 "function f(o) { return new o(); } f(fun1); f(fun1);");
3786 Handle<JSFunction> f = Handle<JSFunction>::cast(
3787 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3788 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3789
3790
3791 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3792 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3793
3794 // Fire context dispose notification.
3795 CcTest::isolate()->ContextDisposedNotification();
3796 SimulateIncrementalMarking(CcTest::heap());
3797 CcTest::heap()->CollectAllGarbage();
3798
3799 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3800 vector->Get(FeedbackVectorSlot(0)));
3801}
3802
3803
3804TEST(IncrementalMarkingPreservesMonomorphicIC) {
3805 if (i::FLAG_always_opt) return;
3806 CcTest::InitializeVM();
3807 v8::HandleScope scope(CcTest::isolate());
3808 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3809 // Prepare function f that contains a monomorphic IC for object
3810 // originating from the same native context.
3811 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3812 "function f(o) { return o.x; } f(obj); f(obj);");
3813 Handle<JSFunction> f = Handle<JSFunction>::cast(
3814 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3815 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3816
3817 CheckVectorIC(f, 0, MONOMORPHIC);
3818
3819 SimulateIncrementalMarking(CcTest::heap());
3820 CcTest::heap()->CollectAllGarbage();
3821
3822 CheckVectorIC(f, 0, MONOMORPHIC);
3823}
3824
3825
3826TEST(IncrementalMarkingClearsMonomorphicIC) {
3827 if (i::FLAG_always_opt) return;
3828 CcTest::InitializeVM();
3829 v8::HandleScope scope(CcTest::isolate());
3830 v8::Local<v8::Value> obj1;
3831 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3832
3833 {
3834 LocalContext env;
3835 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3836 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3837 }
3838
3839 // Prepare function f that contains a monomorphic IC for object
3840 // originating from a different native context.
3841 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3842 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3843 Handle<JSFunction> f = Handle<JSFunction>::cast(
3844 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3845 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3846
3847 CheckVectorIC(f, 0, MONOMORPHIC);
3848
3849 // Fire context dispose notification.
3850 CcTest::isolate()->ContextDisposedNotification();
3851 SimulateIncrementalMarking(CcTest::heap());
3852 CcTest::heap()->CollectAllGarbage();
3853
3854 CheckVectorICCleared(f, 0);
3855}
3856
3857
3858TEST(IncrementalMarkingPreservesPolymorphicIC) {
3859 if (i::FLAG_always_opt) return;
3860 CcTest::InitializeVM();
3861 v8::HandleScope scope(CcTest::isolate());
3862 v8::Local<v8::Value> obj1, obj2;
3863 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3864
3865 {
3866 LocalContext env;
3867 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3868 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3869 }
3870
3871 {
3872 LocalContext env;
3873 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3874 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3875 }
3876
3877 // Prepare function f that contains a polymorphic IC for objects
3878 // originating from two different native contexts.
3879 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3880 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3881 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3882 Handle<JSFunction> f = Handle<JSFunction>::cast(
3883 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3884 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3885
3886 CheckVectorIC(f, 0, POLYMORPHIC);
3887
3888 // Fire context dispose notification.
3889 SimulateIncrementalMarking(CcTest::heap());
3890 CcTest::heap()->CollectAllGarbage();
3891
3892 CheckVectorIC(f, 0, POLYMORPHIC);
3893}
3894
3895
3896TEST(IncrementalMarkingClearsPolymorphicIC) {
3897 if (i::FLAG_always_opt) return;
3898 CcTest::InitializeVM();
3899 v8::HandleScope scope(CcTest::isolate());
3900 v8::Local<v8::Value> obj1, obj2;
3901 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3902
3903 {
3904 LocalContext env;
3905 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3906 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3907 }
3908
3909 {
3910 LocalContext env;
3911 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3912 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3913 }
3914
3915 // Prepare function f that contains a polymorphic IC for objects
3916 // originating from two different native contexts.
3917 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3918 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3919 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3920 Handle<JSFunction> f = Handle<JSFunction>::cast(
3921 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3922 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3923
3924 CheckVectorIC(f, 0, POLYMORPHIC);
3925
3926 // Fire context dispose notification.
3927 CcTest::isolate()->ContextDisposedNotification();
3928 SimulateIncrementalMarking(CcTest::heap());
3929 CcTest::heap()->CollectAllGarbage();
3930
3931 CheckVectorICCleared(f, 0);
3932}
3933
3934
3935class SourceResource : public v8::String::ExternalOneByteStringResource {
3936 public:
3937 explicit SourceResource(const char* data)
3938 : data_(data), length_(strlen(data)) { }
3939
3940 virtual void Dispose() {
3941 i::DeleteArray(data_);
3942 data_ = NULL;
3943 }
3944
3945 const char* data() const { return data_; }
3946
3947 size_t length() const { return length_; }
3948
3949 bool IsDisposed() { return data_ == NULL; }
3950
3951 private:
3952 const char* data_;
3953 size_t length_;
3954};
3955
3956
3957void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3958 const char* accessor) {
3959 // Test that the data retained by the Error.stack accessor is released
3960 // after the first time the accessor is fired. We use external string
3961 // to check whether the data is being released since the external string
3962 // resource's callback is fired when the external string is GC'ed.
3963 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3964 v8::HandleScope scope(isolate);
3965 SourceResource* resource = new SourceResource(i::StrDup(source));
3966 {
3967 v8::HandleScope scope(isolate);
3968 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3969 v8::Local<v8::String> source_string =
3970 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3971 i_isolate->heap()->CollectAllAvailableGarbage();
3972 v8::Script::Compile(ctx, source_string)
3973 .ToLocalChecked()
3974 ->Run(ctx)
3975 .ToLocalChecked();
3976 CHECK(!resource->IsDisposed());
3977 }
3978 // i_isolate->heap()->CollectAllAvailableGarbage();
3979 CHECK(!resource->IsDisposed());
3980
3981 CompileRun(accessor);
3982 i_isolate->heap()->CollectAllAvailableGarbage();
3983
3984 // External source has been released.
3985 CHECK(resource->IsDisposed());
3986 delete resource;
3987}
3988
3989
3990UNINITIALIZED_TEST(ReleaseStackTraceData) {
3991 if (i::FLAG_always_opt) {
3992 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3993 // See: https://codereview.chromium.org/181833004/
3994 return;
3995 }
3996 FLAG_use_ic = false; // ICs retain objects.
3997 FLAG_concurrent_recompilation = false;
3998 v8::Isolate::CreateParams create_params;
3999 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4000 v8::Isolate* isolate = v8::Isolate::New(create_params);
4001 {
4002 v8::Isolate::Scope isolate_scope(isolate);
4003 v8::HandleScope handle_scope(isolate);
4004 v8::Context::New(isolate)->Enter();
4005 static const char* source1 = "var error = null; "
4006 /* Normal Error */ "try { "
4007 " throw new Error(); "
4008 "} catch (e) { "
4009 " error = e; "
4010 "} ";
4011 static const char* source2 = "var error = null; "
4012 /* Stack overflow */ "try { "
4013 " (function f() { f(); })(); "
4014 "} catch (e) { "
4015 " error = e; "
4016 "} ";
4017 static const char* source3 = "var error = null; "
4018 /* Normal Error */ "try { "
4019 /* as prototype */ " throw new Error(); "
4020 "} catch (e) { "
4021 " error = {}; "
4022 " error.__proto__ = e; "
4023 "} ";
4024 static const char* source4 = "var error = null; "
4025 /* Stack overflow */ "try { "
4026 /* as prototype */ " (function f() { f(); })(); "
4027 "} catch (e) { "
4028 " error = {}; "
4029 " error.__proto__ = e; "
4030 "} ";
4031 static const char* getter = "error.stack";
4032 static const char* setter = "error.stack = 0";
4033
4034 ReleaseStackTraceDataTest(isolate, source1, setter);
4035 ReleaseStackTraceDataTest(isolate, source2, setter);
4036 // We do not test source3 and source4 with setter, since the setter is
4037 // supposed to (untypically) write to the receiver, not the holder. This is
4038 // to emulate the behavior of a data property.
4039
4040 ReleaseStackTraceDataTest(isolate, source1, getter);
4041 ReleaseStackTraceDataTest(isolate, source2, getter);
4042 ReleaseStackTraceDataTest(isolate, source3, getter);
4043 ReleaseStackTraceDataTest(isolate, source4, getter);
4044 }
4045 isolate->Dispose();
4046}
4047
4048
4049TEST(Regress159140) {
4050 i::FLAG_allow_natives_syntax = true;
4051 CcTest::InitializeVM();
4052 Isolate* isolate = CcTest::i_isolate();
4053 LocalContext env;
4054 Heap* heap = isolate->heap();
4055 HandleScope scope(isolate);
4056
4057 // Perform one initial GC to enable code flushing.
4058 heap->CollectAllGarbage();
4059
4060 // Prepare several closures that are all eligible for code flushing
4061 // because all reachable ones are not optimized. Make sure that the
4062 // optimized code object is directly reachable through a handle so
4063 // that it is marked black during incremental marking.
4064 Handle<Code> code;
4065 {
4066 HandleScope inner_scope(isolate);
4067 CompileRun("function h(x) {}"
4068 "function mkClosure() {"
4069 " return function(x) { return x + 1; };"
4070 "}"
4071 "var f = mkClosure();"
4072 "var g = mkClosure();"
4073 "f(1); f(2);"
4074 "g(1); g(2);"
4075 "h(1); h(2);"
4076 "%OptimizeFunctionOnNextCall(f); f(3);"
4077 "%OptimizeFunctionOnNextCall(h); h(3);");
4078
4079 Handle<JSFunction> f = Handle<JSFunction>::cast(
4080 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4081 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4082 CHECK(f->is_compiled());
4083 CompileRun("f = null;");
4084
4085 Handle<JSFunction> g = Handle<JSFunction>::cast(
4086 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4087 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4088 CHECK(g->is_compiled());
4089 const int kAgingThreshold = 6;
4090 for (int i = 0; i < kAgingThreshold; i++) {
4091 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4092 }
4093
4094 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4095 }
4096
4097 // Simulate incremental marking so that the functions are enqueued as
4098 // code flushing candidates. Then optimize one function. Finally
4099 // finish the GC to complete code flushing.
4100 SimulateIncrementalMarking(heap);
4101 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4102 heap->CollectAllGarbage();
4103
4104 // Unoptimized code is missing and the deoptimizer will go ballistic.
4105 CompileRun("g('bozo');");
4106}
4107
4108
4109TEST(Regress165495) {
4110 i::FLAG_allow_natives_syntax = true;
4111 CcTest::InitializeVM();
4112 Isolate* isolate = CcTest::i_isolate();
4113 Heap* heap = isolate->heap();
4114 HandleScope scope(isolate);
4115
4116 // Perform one initial GC to enable code flushing.
4117 heap->CollectAllGarbage();
4118
4119 // Prepare an optimized closure that the optimized code map will get
4120 // populated. Then age the unoptimized code to trigger code flushing
4121 // but make sure the optimized code is unreachable.
4122 {
4123 HandleScope inner_scope(isolate);
4124 LocalContext env;
4125 CompileRun("function mkClosure() {"
4126 " return function(x) { return x + 1; };"
4127 "}"
4128 "var f = mkClosure();"
4129 "f(1); f(2);"
4130 "%OptimizeFunctionOnNextCall(f); f(3);");
4131
4132 Handle<JSFunction> f = Handle<JSFunction>::cast(
4133 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4134 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4135 CHECK(f->is_compiled());
4136 const int kAgingThreshold = 6;
4137 for (int i = 0; i < kAgingThreshold; i++) {
4138 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4139 }
4140
4141 CompileRun("f = null;");
4142 }
4143
4144 // Simulate incremental marking so that unoptimized code is flushed
4145 // even though it still is cached in the optimized code map.
4146 SimulateIncrementalMarking(heap);
4147 heap->CollectAllGarbage();
4148
4149 // Make a new closure that will get code installed from the code map.
4150 // Unoptimized code is missing and the deoptimizer will go ballistic.
4151 CompileRun("var g = mkClosure(); g('bozo');");
4152}
4153
4154
4155TEST(Regress169209) {
4156 i::FLAG_stress_compaction = false;
4157 i::FLAG_allow_natives_syntax = true;
4158
4159 CcTest::InitializeVM();
4160 Isolate* isolate = CcTest::i_isolate();
4161 Heap* heap = isolate->heap();
4162 HandleScope scope(isolate);
4163
4164 // Perform one initial GC to enable code flushing.
4165 heap->CollectAllGarbage();
4166
4167 // Prepare a shared function info eligible for code flushing for which
4168 // the unoptimized code will be replaced during optimization.
4169 Handle<SharedFunctionInfo> shared1;
4170 {
4171 HandleScope inner_scope(isolate);
4172 LocalContext env;
4173 CompileRun("function f() { return 'foobar'; }"
4174 "function g(x) { if (x) f(); }"
4175 "f();"
4176 "g(false);"
4177 "g(false);");
4178
4179 Handle<JSFunction> f = Handle<JSFunction>::cast(
4180 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4181 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4182 CHECK(f->is_compiled());
4183 const int kAgingThreshold = 6;
4184 for (int i = 0; i < kAgingThreshold; i++) {
4185 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4186 }
4187
4188 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4189 }
4190
4191 // Prepare a shared function info eligible for code flushing that will
4192 // represent the dangling tail of the candidate list.
4193 Handle<SharedFunctionInfo> shared2;
4194 {
4195 HandleScope inner_scope(isolate);
4196 LocalContext env;
4197 CompileRun("function flushMe() { return 0; }"
4198 "flushMe(1);");
4199
4200 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4201 *v8::Local<v8::Function>::Cast(CcTest::global()
4202 ->Get(env.local(), v8_str("flushMe"))
4203 .ToLocalChecked())));
4204 CHECK(f->is_compiled());
4205 const int kAgingThreshold = 6;
4206 for (int i = 0; i < kAgingThreshold; i++) {
4207 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4208 }
4209
4210 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4211 }
4212
4213 // Simulate incremental marking and collect code flushing candidates.
4214 SimulateIncrementalMarking(heap);
4215 CHECK(shared1->code()->gc_metadata() != NULL);
4216
4217 // Optimize function and make sure the unoptimized code is replaced.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004218 CompileRun("%OptimizeFunctionOnNextCall(g);"
4219 "g(false);");
4220
4221 // Finish garbage collection cycle.
4222 heap->CollectAllGarbage();
4223 CHECK(shared1->code()->gc_metadata() == NULL);
4224}
4225
4226
4227TEST(Regress169928) {
4228 i::FLAG_allow_natives_syntax = true;
4229 i::FLAG_crankshaft = false;
4230 CcTest::InitializeVM();
4231 Isolate* isolate = CcTest::i_isolate();
4232 LocalContext env;
4233 Factory* factory = isolate->factory();
4234 v8::HandleScope scope(CcTest::isolate());
4235
4236 // Some flags turn Scavenge collections into Mark-sweep collections
4237 // and hence are incompatible with this test case.
4238 if (FLAG_gc_global || FLAG_stress_compaction) return;
4239
4240 // Prepare the environment
4241 CompileRun("function fastliteralcase(literal, value) {"
4242 " literal[0] = value;"
4243 " return literal;"
4244 "}"
4245 "function get_standard_literal() {"
4246 " var literal = [1, 2, 3];"
4247 " return literal;"
4248 "}"
4249 "obj = fastliteralcase(get_standard_literal(), 1);"
4250 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4251 "obj = fastliteralcase(get_standard_literal(), 2);");
4252
4253 // prepare the heap
4254 v8::Local<v8::String> mote_code_string =
4255 v8_str("fastliteralcase(mote, 2.5);");
4256
4257 v8::Local<v8::String> array_name = v8_str("mote");
4258 CHECK(CcTest::global()
4259 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4260 .FromJust());
4261
4262 // First make sure we flip spaces
4263 CcTest::heap()->CollectGarbage(NEW_SPACE);
4264
4265 // Allocate the object.
4266 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4267 array_data->set(0, Smi::FromInt(1));
4268 array_data->set(1, Smi::FromInt(2));
4269
4270 AllocateAllButNBytes(CcTest::heap()->new_space(),
4271 JSArray::kSize + AllocationMemento::kSize +
4272 kPointerSize);
4273
4274 Handle<JSArray> array =
4275 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4276
4277 CHECK_EQ(Smi::FromInt(2), array->length());
4278 CHECK(array->HasFastSmiOrObjectElements());
4279
4280 // We need filler the size of AllocationMemento object, plus an extra
4281 // fill pointer value.
4282 HeapObject* obj = NULL;
4283 AllocationResult allocation =
4284 CcTest::heap()->new_space()->AllocateRawUnaligned(
4285 AllocationMemento::kSize + kPointerSize);
4286 CHECK(allocation.To(&obj));
4287 Address addr_obj = obj->address();
4288 CcTest::heap()->CreateFillerObjectAt(
4289 addr_obj, AllocationMemento::kSize + kPointerSize);
4290
4291 // Give the array a name, making sure not to allocate strings.
4292 v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
4293 CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
4294
4295 // This should crash with a protection violation if we are running a build
4296 // with the bug.
4297 AlwaysAllocateScope aa_scope(isolate);
4298 v8::Script::Compile(env.local(), mote_code_string)
4299 .ToLocalChecked()
4300 ->Run(env.local())
4301 .ToLocalChecked();
4302}
4303
4304
4305#ifdef DEBUG
4306TEST(Regress513507) {
4307 i::FLAG_flush_optimized_code_cache = false;
4308 i::FLAG_allow_natives_syntax = true;
4309 i::FLAG_gc_global = true;
4310 CcTest::InitializeVM();
4311 Isolate* isolate = CcTest::i_isolate();
4312 LocalContext env;
4313 Heap* heap = isolate->heap();
4314 HandleScope scope(isolate);
4315
4316 // Prepare function whose optimized code map we can use.
4317 Handle<SharedFunctionInfo> shared;
4318 {
4319 HandleScope inner_scope(isolate);
4320 CompileRun("function f() { return 1 }"
4321 "f(); %OptimizeFunctionOnNextCall(f); f();");
4322
4323 Handle<JSFunction> f = Handle<JSFunction>::cast(
4324 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4325 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4326 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4327 CompileRun("f = null");
4328 }
4329
4330 // Prepare optimized code that we can use.
4331 Handle<Code> code;
4332 {
4333 HandleScope inner_scope(isolate);
4334 CompileRun("function g() { return 2 }"
4335 "g(); %OptimizeFunctionOnNextCall(g); g();");
4336
4337 Handle<JSFunction> g = Handle<JSFunction>::cast(
4338 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4339 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4340 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4341 if (!code->is_optimized_code()) return;
4342 }
4343
4344 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4345 Handle<LiteralsArray> lit =
4346 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4347 Handle<Context> context(isolate->context());
4348
4349 // Add the new code several times to the optimized code map and also set an
4350 // allocation timeout so that expanding the code map will trigger a GC.
4351 heap->set_allocation_timeout(5);
4352 FLAG_gc_interval = 1000;
4353 for (int i = 0; i < 10; ++i) {
4354 BailoutId id = BailoutId(i);
4355 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4356 }
4357}
4358#endif // DEBUG
4359
4360
4361TEST(Regress514122) {
4362 i::FLAG_flush_optimized_code_cache = false;
4363 i::FLAG_allow_natives_syntax = true;
4364 CcTest::InitializeVM();
4365 Isolate* isolate = CcTest::i_isolate();
4366 LocalContext env;
4367 Heap* heap = isolate->heap();
4368 HandleScope scope(isolate);
4369
4370 // Perfrom one initial GC to enable code flushing.
4371 CcTest::heap()->CollectAllGarbage();
4372
4373 // Prepare function whose optimized code map we can use.
4374 Handle<SharedFunctionInfo> shared;
4375 {
4376 HandleScope inner_scope(isolate);
4377 CompileRun("function f() { return 1 }"
4378 "f(); %OptimizeFunctionOnNextCall(f); f();");
4379
4380 Handle<JSFunction> f = Handle<JSFunction>::cast(
4381 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4382 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4383 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4384 CompileRun("f = null");
4385 }
4386
4387 // Prepare optimized code that we can use.
4388 Handle<Code> code;
4389 {
4390 HandleScope inner_scope(isolate);
4391 CompileRun("function g() { return 2 }"
4392 "g(); %OptimizeFunctionOnNextCall(g); g();");
4393
4394 Handle<JSFunction> g = Handle<JSFunction>::cast(
4395 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4396 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4397 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4398 if (!code->is_optimized_code()) return;
4399 }
4400
4401 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4402 Handle<LiteralsArray> lit =
4403 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4404 Handle<Context> context(isolate->context());
4405
4406 // Add the code several times to the optimized code map.
4407 for (int i = 0; i < 3; ++i) {
4408 HandleScope inner_scope(isolate);
4409 BailoutId id = BailoutId(i);
4410 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4411 }
4412 shared->optimized_code_map()->Print();
4413
4414 // Add the code with a literals array to be evacuated.
4415 Page* evac_page;
4416 {
4417 HandleScope inner_scope(isolate);
4418 AlwaysAllocateScope always_allocate(isolate);
4419 // Make sure literal is placed on an old-space evacuation candidate.
4420 SimulateFullSpace(heap->old_space());
4421
4422 // Make sure there the number of literals is > 0.
4423 Handle<LiteralsArray> lit =
4424 LiteralsArray::New(isolate, vector, 23, TENURED);
4425
4426 evac_page = Page::FromAddress(lit->address());
4427 BailoutId id = BailoutId(100);
4428 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4429 }
4430
4431 // Heap is ready, force {lit_page} to become an evacuation candidate and
4432 // simulate incremental marking to enqueue optimized code map.
4433 FLAG_manual_evacuation_candidates_selection = true;
4434 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4435 SimulateIncrementalMarking(heap);
4436
4437 // No matter whether reachable or not, {boomer} is doomed.
4438 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4439
4440 // Add the code several times to the optimized code map. This will leave old
4441 // copies of the optimized code map unreachable but still marked.
4442 for (int i = 3; i < 6; ++i) {
4443 HandleScope inner_scope(isolate);
4444 BailoutId id = BailoutId(i);
4445 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4446 }
4447
4448 // Trigger a GC to flush out the bug.
4449 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4450 boomer->Print();
4451}
4452
4453
4454TEST(OptimizedCodeMapReuseEntries) {
4455 i::FLAG_flush_optimized_code_cache = false;
4456 i::FLAG_allow_natives_syntax = true;
4457 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
4458 // run this test.
4459 if (i::FLAG_turbo) return;
4460 CcTest::InitializeVM();
4461 v8::Isolate* v8_isolate = CcTest::isolate();
4462 Isolate* isolate = CcTest::i_isolate();
4463 Heap* heap = isolate->heap();
4464 HandleScope scope(isolate);
4465
4466 // Create 3 contexts, allow the 2nd one to be disposed, and verify that
4467 // a 4th context will re-use the weak slots in the optimized code map
4468 // to hold data, rather than expanding the map.
4469 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
4470 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
4471 v8::ScriptCompiler::Source script_source(
4472 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
4473 .ToLocalChecked());
4474 v8::Local<v8::UnboundScript> indep =
4475 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
4476 .ToLocalChecked();
4477 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
4478 // Perfrom one initial GC to enable code flushing.
4479 heap->CollectAllGarbage();
4480
4481 c1->Enter();
4482 indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
4483 CompileRun(toplevel);
4484
4485 Handle<SharedFunctionInfo> shared;
4486 Handle<JSFunction> foo = Handle<JSFunction>::cast(
4487 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4488 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
4489 CHECK(foo->shared()->is_compiled());
4490 shared = handle(foo->shared());
4491 c1->Exit();
4492
4493 {
4494 HandleScope scope(isolate);
4495 v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
4496 c2->Enter();
4497 indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
4498 CompileRun(toplevel);
4499 c2->Exit();
4500 }
4501
4502 {
4503 HandleScope scope(isolate);
4504 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
4505 c3->Enter();
4506 indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
4507 CompileRun(toplevel);
4508 c3->Exit();
4509
4510 // Now, collect garbage. Context c2 should have no roots to it, and it's
4511 // entry in the optimized code map should be free for a new context.
4512 for (int i = 0; i < 4; i++) {
4513 heap->CollectAllGarbage();
4514 }
4515
4516 Handle<FixedArray> optimized_code_map =
4517 handle(shared->optimized_code_map());
4518 // There should be 3 entries in the map.
4519 CHECK_EQ(
4520 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
4521 SharedFunctionInfo::kEntryLength));
4522 // But one of them (formerly for c2) should be cleared.
4523 int cleared_count = 0;
4524 for (int i = SharedFunctionInfo::kEntriesStart;
4525 i < optimized_code_map->length();
4526 i += SharedFunctionInfo::kEntryLength) {
4527 cleared_count +=
4528 WeakCell::cast(
4529 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4530 ->cleared()
4531 ? 1
4532 : 0;
4533 }
4534 CHECK_EQ(1, cleared_count);
4535
4536 // Verify that a new context uses the cleared entry rather than creating a
4537 // new
4538 // optimized code map array.
4539 v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
4540 c4->Enter();
4541 indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
4542 CompileRun(toplevel);
4543 c4->Exit();
4544 CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
4545
4546 // Now each entry is in use.
4547 cleared_count = 0;
4548 for (int i = SharedFunctionInfo::kEntriesStart;
4549 i < optimized_code_map->length();
4550 i += SharedFunctionInfo::kEntryLength) {
4551 cleared_count +=
4552 WeakCell::cast(
4553 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4554 ->cleared()
4555 ? 1
4556 : 0;
4557 }
4558 CHECK_EQ(0, cleared_count);
4559 }
4560}
4561
4562
4563TEST(Regress513496) {
4564 i::FLAG_flush_optimized_code_cache = false;
4565 i::FLAG_allow_natives_syntax = true;
4566 CcTest::InitializeVM();
4567 Isolate* isolate = CcTest::i_isolate();
4568 Heap* heap = isolate->heap();
4569 HandleScope scope(isolate);
4570
4571 // Perfrom one initial GC to enable code flushing.
4572 CcTest::heap()->CollectAllGarbage();
4573
4574 // Prepare an optimized closure with containing an inlined function. Then age
4575 // the inlined unoptimized code to trigger code flushing but make sure the
4576 // outer optimized code is kept in the optimized code map.
4577 Handle<SharedFunctionInfo> shared;
4578 {
4579 LocalContext context;
4580 HandleScope inner_scope(isolate);
4581 CompileRun(
4582 "function g(x) { return x + 1 }"
4583 "function mkClosure() {"
4584 " return function(x) { return g(x); };"
4585 "}"
4586 "var f = mkClosure();"
4587 "f(1); f(2);"
4588 "%OptimizeFunctionOnNextCall(f); f(3);");
4589
4590 Handle<JSFunction> g = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4591 *v8::Local<v8::Function>::Cast(CcTest::global()
4592 ->Get(context.local(), v8_str("g"))
4593 .ToLocalChecked())));
4594 CHECK(g->shared()->is_compiled());
4595 const int kAgingThreshold = 6;
4596 for (int i = 0; i < kAgingThreshold; i++) {
4597 g->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4598 }
4599
4600 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4601 *v8::Local<v8::Function>::Cast(CcTest::global()
4602 ->Get(context.local(), v8_str("f"))
4603 .ToLocalChecked())));
4604 CHECK(f->is_compiled());
4605 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4606 CompileRun("f = null");
4607 }
4608
4609 // Lookup the optimized code and keep it alive.
4610 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4611 isolate->context()->native_context(), BailoutId::None());
4612 Handle<Code> optimized_code(result.code, isolate);
4613
4614 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4615 // though the optimized code for 'f' is reachable via the optimized code map.
4616 heap->CollectAllGarbage();
4617
4618 // Make a new closure that will get code installed from the code map.
4619 // Unoptimized code is missing and the deoptimizer will go ballistic.
4620 CompileRun("var h = mkClosure(); h('bozo');");
4621}
4622
4623
4624TEST(LargeObjectSlotRecording) {
4625 FLAG_manual_evacuation_candidates_selection = true;
4626 CcTest::InitializeVM();
4627 Isolate* isolate = CcTest::i_isolate();
4628 Heap* heap = isolate->heap();
4629 HandleScope scope(isolate);
4630
4631 // Create an object on an evacuation candidate.
4632 SimulateFullSpace(heap->old_space());
4633 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4634 Page* evac_page = Page::FromAddress(lit->address());
4635 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4636 FixedArray* old_location = *lit;
4637
4638 // Allocate a large object.
4639 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB);
4640 CHECK(size > Page::kMaxRegularHeapObjectSize);
4641 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
4642 CHECK(heap->lo_space()->Contains(*lo));
4643
4644 // Start incremental marking to active write barrier.
4645 SimulateIncrementalMarking(heap, false);
4646 heap->incremental_marking()->AdvanceIncrementalMarking(
4647 10000000, 10000000, IncrementalMarking::IdleStepActions());
4648
4649 // Create references from the large object to the object on the evacuation
4650 // candidate.
4651 const int kStep = size / 10;
4652 for (int i = 0; i < size; i += kStep) {
4653 lo->set(i, *lit);
4654 CHECK(lo->get(i) == old_location);
4655 }
4656
4657 // Move the evaucation candidate object.
4658 CcTest::heap()->CollectAllGarbage();
4659
4660 // Verify that the pointers in the large object got updated.
4661 for (int i = 0; i < size; i += kStep) {
4662 CHECK_EQ(lo->get(i), *lit);
4663 CHECK(lo->get(i) != old_location);
4664 }
4665}
4666
4667
4668class DummyVisitor : public ObjectVisitor {
4669 public:
4670 void VisitPointers(Object** start, Object** end) override {}
4671};
4672
4673
4674TEST(DeferredHandles) {
4675 CcTest::InitializeVM();
4676 Isolate* isolate = CcTest::i_isolate();
4677 Heap* heap = isolate->heap();
4678 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4679 HandleScopeData* data = isolate->handle_scope_data();
4680 Handle<Object> init(heap->empty_string(), isolate);
4681 while (data->next < data->limit) {
4682 Handle<Object> obj(heap->empty_string(), isolate);
4683 }
4684 // An entire block of handles has been filled.
4685 // Next handle would require a new block.
4686 CHECK(data->next == data->limit);
4687
4688 DeferredHandleScope deferred(isolate);
4689 DummyVisitor visitor;
4690 isolate->handle_scope_implementer()->Iterate(&visitor);
4691 delete deferred.Detach();
4692}
4693
4694
4695TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4696 CcTest::InitializeVM();
4697 v8::HandleScope scope(CcTest::isolate());
4698 CompileRun("function f(n) {"
4699 " var a = new Array(n);"
4700 " for (var i = 0; i < n; i += 100) a[i] = i;"
4701 "};"
4702 "f(10 * 1024 * 1024);");
4703 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4704 if (marking->IsStopped()) {
4705 CcTest::heap()->StartIncrementalMarking();
4706 }
4707 // This big step should be sufficient to mark the whole array.
4708 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4709 CHECK(marking->IsComplete() ||
4710 marking->IsReadyToOverApproximateWeakClosure());
4711}
4712
4713
4714TEST(DisableInlineAllocation) {
4715 i::FLAG_allow_natives_syntax = true;
4716 CcTest::InitializeVM();
4717 v8::HandleScope scope(CcTest::isolate());
4718 CompileRun("function test() {"
4719 " var x = [];"
4720 " for (var i = 0; i < 10; i++) {"
4721 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4722 " }"
4723 "}"
4724 "function run() {"
4725 " %OptimizeFunctionOnNextCall(test);"
4726 " test();"
4727 " %DeoptimizeFunction(test);"
4728 "}");
4729
4730 // Warm-up with inline allocation enabled.
4731 CompileRun("test(); test(); run();");
4732
4733 // Run test with inline allocation disabled.
4734 CcTest::heap()->DisableInlineAllocation();
4735 CompileRun("run()");
4736
4737 // Run test with inline allocation re-enabled.
4738 CcTest::heap()->EnableInlineAllocation();
4739 CompileRun("run()");
4740}
4741
4742
4743static int AllocationSitesCount(Heap* heap) {
4744 int count = 0;
4745 for (Object* site = heap->allocation_sites_list();
4746 !(site->IsUndefined());
4747 site = AllocationSite::cast(site)->weak_next()) {
4748 count++;
4749 }
4750 return count;
4751}
4752
4753
4754TEST(EnsureAllocationSiteDependentCodesProcessed) {
4755 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4756 i::FLAG_allow_natives_syntax = true;
4757 CcTest::InitializeVM();
4758 Isolate* isolate = CcTest::i_isolate();
4759 v8::internal::Heap* heap = CcTest::heap();
4760 GlobalHandles* global_handles = isolate->global_handles();
4761
4762 if (!isolate->use_crankshaft()) return;
4763
4764 // The allocation site at the head of the list is ours.
4765 Handle<AllocationSite> site;
4766 {
4767 LocalContext context;
4768 v8::HandleScope scope(context->GetIsolate());
4769
4770 int count = AllocationSitesCount(heap);
4771 CompileRun("var bar = function() { return (new Array()); };"
4772 "var a = bar();"
4773 "bar();"
4774 "bar();");
4775
4776 // One allocation site should have been created.
4777 int new_count = AllocationSitesCount(heap);
4778 CHECK_EQ(new_count, (count + 1));
4779 site = Handle<AllocationSite>::cast(
4780 global_handles->Create(
4781 AllocationSite::cast(heap->allocation_sites_list())));
4782
4783 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4784
4785 CHECK_EQ(DependentCode::kAllocationSiteTransitionChangedGroup,
4786 site->dependent_code()->group());
4787 CHECK_EQ(1, site->dependent_code()->count());
4788 CHECK(site->dependent_code()->object_at(0)->IsWeakCell());
4789 Code* function_bar = Code::cast(
4790 WeakCell::cast(site->dependent_code()->object_at(0))->value());
4791 Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
4792 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4793 CcTest::global()
4794 ->Get(context.local(), v8_str("bar"))
4795 .ToLocalChecked())));
4796 CHECK_EQ(bar_handle->code(), function_bar);
4797 }
4798
4799 // Now make sure that a gc should get rid of the function, even though we
4800 // still have the allocation site alive.
4801 for (int i = 0; i < 4; i++) {
4802 heap->CollectAllGarbage();
4803 }
4804
4805 // The site still exists because of our global handle, but the code is no
4806 // longer referred to by dependent_code().
4807 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4808 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4809}
4810
4811
4812TEST(CellsInOptimizedCodeAreWeak) {
4813 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4814 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4815 i::FLAG_allow_natives_syntax = true;
4816 CcTest::InitializeVM();
4817 Isolate* isolate = CcTest::i_isolate();
4818 v8::internal::Heap* heap = CcTest::heap();
4819
4820 if (!isolate->use_crankshaft()) return;
4821 HandleScope outer_scope(heap->isolate());
4822 Handle<Code> code;
4823 {
4824 LocalContext context;
4825 HandleScope scope(heap->isolate());
4826
4827 CompileRun(
4828 "bar = (function() {"
4829 " function bar() {"
4830 " return foo(1);"
4831 " };"
4832 " var foo = function(x) { with (x) { return 1 + x; } };"
4833 " %NeverOptimizeFunction(foo);"
4834 " bar(foo);"
4835 " bar(foo);"
4836 " bar(foo);"
4837 " %OptimizeFunctionOnNextCall(bar);"
4838 " bar(foo);"
4839 " return bar;})();");
4840
4841 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4842 *v8::Local<v8::Function>::Cast(CcTest::global()
4843 ->Get(context.local(), v8_str("bar"))
4844 .ToLocalChecked())));
4845 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4846 }
4847
4848 // Now make sure that a gc should get rid of the function
4849 for (int i = 0; i < 4; i++) {
4850 heap->CollectAllGarbage();
4851 }
4852
4853 CHECK(code->marked_for_deoptimization());
4854}
4855
4856
4857TEST(ObjectsInOptimizedCodeAreWeak) {
4858 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4859 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4860 i::FLAG_allow_natives_syntax = true;
4861 CcTest::InitializeVM();
4862 Isolate* isolate = CcTest::i_isolate();
4863 v8::internal::Heap* heap = CcTest::heap();
4864
4865 if (!isolate->use_crankshaft()) return;
4866 HandleScope outer_scope(heap->isolate());
4867 Handle<Code> code;
4868 {
4869 LocalContext context;
4870 HandleScope scope(heap->isolate());
4871
4872 CompileRun(
4873 "function bar() {"
4874 " return foo(1);"
4875 "};"
4876 "function foo(x) { with (x) { return 1 + x; } };"
4877 "%NeverOptimizeFunction(foo);"
4878 "bar();"
4879 "bar();"
4880 "bar();"
4881 "%OptimizeFunctionOnNextCall(bar);"
4882 "bar();");
4883
4884 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4885 *v8::Local<v8::Function>::Cast(CcTest::global()
4886 ->Get(context.local(), v8_str("bar"))
4887 .ToLocalChecked())));
4888 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4889 }
4890
4891 // Now make sure that a gc should get rid of the function
4892 for (int i = 0; i < 4; i++) {
4893 heap->CollectAllGarbage();
4894 }
4895
4896 CHECK(code->marked_for_deoptimization());
4897}
4898
4899
4900TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4901 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4902 if (!i::FLAG_incremental_marking) return;
4903 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4904 i::FLAG_allow_natives_syntax = true;
4905 i::FLAG_compilation_cache = false;
4906 i::FLAG_retain_maps_for_n_gc = 0;
4907 CcTest::InitializeVM();
4908 Isolate* isolate = CcTest::i_isolate();
4909
4910 // Do not run for no-snap builds.
4911 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4912
4913 v8::internal::Heap* heap = CcTest::heap();
4914
4915 // Get a clean slate regarding optimized functions on the heap.
4916 i::Deoptimizer::DeoptimizeAll(isolate);
4917 heap->CollectAllGarbage();
4918
4919 if (!isolate->use_crankshaft()) return;
4920 HandleScope outer_scope(heap->isolate());
4921 for (int i = 0; i < 3; i++) {
4922 SimulateIncrementalMarking(heap);
4923 {
4924 LocalContext context;
4925 HandleScope scope(heap->isolate());
4926 EmbeddedVector<char, 256> source;
4927 SNPrintF(source,
4928 "function bar%d() {"
4929 " return foo%d(1);"
4930 "};"
4931 "function foo%d(x) { with (x) { return 1 + x; } };"
4932 "bar%d();"
4933 "bar%d();"
4934 "bar%d();"
4935 "%%OptimizeFunctionOnNextCall(bar%d);"
4936 "bar%d();",
4937 i, i, i, i, i, i, i, i);
4938 CompileRun(source.start());
4939 }
4940 heap->CollectAllGarbage();
4941 }
4942 int elements = 0;
4943 if (heap->weak_object_to_code_table()->IsHashTable()) {
4944 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4945 elements = t->NumberOfElements();
4946 }
4947 CHECK_EQ(0, elements);
4948}
4949
4950
4951static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4952 const char* name) {
4953 EmbeddedVector<char, 256> source;
4954 SNPrintF(source,
4955 "function %s() { return 0; }"
4956 "%s(); %s();"
4957 "%%OptimizeFunctionOnNextCall(%s);"
4958 "%s();", name, name, name, name, name);
4959 CompileRun(source.start());
4960 i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4961 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4962 CcTest::global()
4963 ->Get(isolate->GetCurrentContext(), v8_str(name))
4964 .ToLocalChecked())));
4965 return fun;
4966}
4967
4968
4969static int GetCodeChainLength(Code* code) {
4970 int result = 0;
4971 while (code->next_code_link()->IsCode()) {
4972 result++;
4973 code = Code::cast(code->next_code_link());
4974 }
4975 return result;
4976}
4977
4978
4979TEST(NextCodeLinkIsWeak) {
4980 i::FLAG_always_opt = false;
4981 i::FLAG_allow_natives_syntax = true;
4982 CcTest::InitializeVM();
4983 Isolate* isolate = CcTest::i_isolate();
4984 v8::internal::Heap* heap = CcTest::heap();
4985
4986 if (!isolate->use_crankshaft()) return;
4987 HandleScope outer_scope(heap->isolate());
4988 Handle<Code> code;
4989 heap->CollectAllAvailableGarbage();
4990 int code_chain_length_before, code_chain_length_after;
4991 {
4992 HandleScope scope(heap->isolate());
4993 Handle<JSFunction> mortal =
4994 OptimizeDummyFunction(CcTest::isolate(), "mortal");
4995 Handle<JSFunction> immortal =
4996 OptimizeDummyFunction(CcTest::isolate(), "immortal");
4997 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4998 code_chain_length_before = GetCodeChainLength(immortal->code());
4999 // Keep the immortal code and let the mortal code die.
5000 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
5001 CompileRun("mortal = null; immortal = null;");
5002 }
5003 heap->CollectAllAvailableGarbage();
5004 // Now mortal code should be dead.
5005 code_chain_length_after = GetCodeChainLength(*code);
5006 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
5007}
5008
5009
5010static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
5011 i::byte buffer[i::Assembler::kMinimalBufferSize];
5012 MacroAssembler masm(isolate, buffer, sizeof(buffer),
5013 v8::internal::CodeObjectRequired::kYes);
5014 CodeDesc desc;
5015 masm.Push(isolate->factory()->undefined_value());
5016 masm.Drop(1);
5017 masm.GetCode(&desc);
5018 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
5019 Handle<Code> code = isolate->factory()->NewCode(
5020 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
5021 CHECK(code->IsCode());
5022 return code;
5023}
5024
5025
5026TEST(NextCodeLinkIsWeak2) {
5027 i::FLAG_allow_natives_syntax = true;
5028 CcTest::InitializeVM();
5029 Isolate* isolate = CcTest::i_isolate();
5030 v8::internal::Heap* heap = CcTest::heap();
5031
5032 if (!isolate->use_crankshaft()) return;
5033 HandleScope outer_scope(heap->isolate());
5034 heap->CollectAllAvailableGarbage();
5035 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
5036 Handle<Code> new_head;
5037 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5038 {
5039 HandleScope scope(heap->isolate());
5040 Handle<Code> immortal = DummyOptimizedCode(isolate);
5041 Handle<Code> mortal = DummyOptimizedCode(isolate);
5042 mortal->set_next_code_link(*old_head);
5043 immortal->set_next_code_link(*mortal);
5044 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5045 new_head = scope.CloseAndEscape(immortal);
5046 }
5047 heap->CollectAllAvailableGarbage();
5048 // Now mortal code should be dead.
5049 CHECK_EQ(*old_head, new_head->next_code_link());
5050}
5051
5052
5053static bool weak_ic_cleared = false;
5054
5055static void ClearWeakIC(
5056 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5057 printf("clear weak is called\n");
5058 weak_ic_cleared = true;
5059 data.GetParameter()->Reset();
5060}
5061
5062
5063TEST(WeakFunctionInConstructor) {
5064 if (i::FLAG_always_opt) return;
5065 i::FLAG_stress_compaction = false;
5066 CcTest::InitializeVM();
5067 v8::Isolate* isolate = CcTest::isolate();
5068 LocalContext env;
5069 v8::HandleScope scope(isolate);
5070 CompileRun(
5071 "function createObj(obj) {"
5072 " return new obj();"
5073 "}");
5074 i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
5075 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
5076 CcTest::global()
5077 ->Get(env.local(), v8_str("createObj"))
5078 .ToLocalChecked())));
5079
5080 v8::Persistent<v8::Object> garbage;
5081 {
5082 v8::HandleScope scope(isolate);
5083 const char* source =
5084 " (function() {"
5085 " function hat() { this.x = 5; }"
5086 " createObj(hat);"
5087 " createObj(hat);"
5088 " return hat;"
5089 " })();";
5090 garbage.Reset(isolate, CompileRun(env.local(), source)
5091 .ToLocalChecked()
5092 ->ToObject(env.local())
5093 .ToLocalChecked());
5094 }
5095 weak_ic_cleared = false;
5096 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5097 Heap* heap = CcTest::i_isolate()->heap();
5098 heap->CollectAllGarbage();
5099 CHECK(weak_ic_cleared);
5100
5101 // We've determined the constructor in createObj has had it's weak cell
5102 // cleared. Now, verify that one additional call with a new function
5103 // allows monomorphicity.
5104 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5105 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5106 for (int i = 0; i < 20; i++) {
5107 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5108 CHECK(slot_value->IsWeakCell());
5109 if (WeakCell::cast(slot_value)->cleared()) break;
5110 heap->CollectAllGarbage();
5111 }
5112
5113 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5114 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5115 CompileRun(
5116 "function coat() { this.x = 6; }"
5117 "createObj(coat);");
5118 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5119 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5120}
5121
5122
5123// Checks that the value returned by execution of the source is weak.
5124void CheckWeakness(const char* source) {
5125 i::FLAG_stress_compaction = false;
5126 CcTest::InitializeVM();
5127 v8::Isolate* isolate = CcTest::isolate();
5128 LocalContext env;
5129 v8::HandleScope scope(isolate);
5130 v8::Persistent<v8::Object> garbage;
5131 {
5132 v8::HandleScope scope(isolate);
5133 garbage.Reset(isolate, CompileRun(env.local(), source)
5134 .ToLocalChecked()
5135 ->ToObject(env.local())
5136 .ToLocalChecked());
5137 }
5138 weak_ic_cleared = false;
5139 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5140 Heap* heap = CcTest::i_isolate()->heap();
5141 heap->CollectAllGarbage();
5142 CHECK(weak_ic_cleared);
5143}
5144
5145
5146// Each of the following "weak IC" tests creates an IC that embeds a map with
5147// the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5148TEST(WeakMapInMonomorphicLoadIC) {
5149 CheckWeakness("function loadIC(obj) {"
5150 " return obj.name;"
5151 "}"
5152 " (function() {"
5153 " var proto = {'name' : 'weak'};"
5154 " var obj = Object.create(proto);"
5155 " loadIC(obj);"
5156 " loadIC(obj);"
5157 " loadIC(obj);"
5158 " return proto;"
5159 " })();");
5160}
5161
5162
5163TEST(WeakMapInPolymorphicLoadIC) {
5164 CheckWeakness(
5165 "function loadIC(obj) {"
5166 " return obj.name;"
5167 "}"
5168 " (function() {"
5169 " var proto = {'name' : 'weak'};"
5170 " var obj = Object.create(proto);"
5171 " loadIC(obj);"
5172 " loadIC(obj);"
5173 " loadIC(obj);"
5174 " var poly = Object.create(proto);"
5175 " poly.x = true;"
5176 " loadIC(poly);"
5177 " return proto;"
5178 " })();");
5179}
5180
5181
5182TEST(WeakMapInMonomorphicKeyedLoadIC) {
5183 CheckWeakness("function keyedLoadIC(obj, field) {"
5184 " return obj[field];"
5185 "}"
5186 " (function() {"
5187 " var proto = {'name' : 'weak'};"
5188 " var obj = Object.create(proto);"
5189 " keyedLoadIC(obj, 'name');"
5190 " keyedLoadIC(obj, 'name');"
5191 " keyedLoadIC(obj, 'name');"
5192 " return proto;"
5193 " })();");
5194}
5195
5196
5197TEST(WeakMapInPolymorphicKeyedLoadIC) {
5198 CheckWeakness(
5199 "function keyedLoadIC(obj, field) {"
5200 " return obj[field];"
5201 "}"
5202 " (function() {"
5203 " var proto = {'name' : 'weak'};"
5204 " var obj = Object.create(proto);"
5205 " keyedLoadIC(obj, 'name');"
5206 " keyedLoadIC(obj, 'name');"
5207 " keyedLoadIC(obj, 'name');"
5208 " var poly = Object.create(proto);"
5209 " poly.x = true;"
5210 " keyedLoadIC(poly, 'name');"
5211 " return proto;"
5212 " })();");
5213}
5214
5215
5216TEST(WeakMapInMonomorphicStoreIC) {
5217 CheckWeakness("function storeIC(obj, value) {"
5218 " obj.name = value;"
5219 "}"
5220 " (function() {"
5221 " var proto = {'name' : 'weak'};"
5222 " var obj = Object.create(proto);"
5223 " storeIC(obj, 'x');"
5224 " storeIC(obj, 'x');"
5225 " storeIC(obj, 'x');"
5226 " return proto;"
5227 " })();");
5228}
5229
5230
5231TEST(WeakMapInPolymorphicStoreIC) {
5232 CheckWeakness(
5233 "function storeIC(obj, value) {"
5234 " obj.name = value;"
5235 "}"
5236 " (function() {"
5237 " var proto = {'name' : 'weak'};"
5238 " var obj = Object.create(proto);"
5239 " storeIC(obj, 'x');"
5240 " storeIC(obj, 'x');"
5241 " storeIC(obj, 'x');"
5242 " var poly = Object.create(proto);"
5243 " poly.x = true;"
5244 " storeIC(poly, 'x');"
5245 " return proto;"
5246 " })();");
5247}
5248
5249
5250TEST(WeakMapInMonomorphicKeyedStoreIC) {
5251 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5252 " obj[field] = value;"
5253 "}"
5254 " (function() {"
5255 " var proto = {'name' : 'weak'};"
5256 " var obj = Object.create(proto);"
5257 " keyedStoreIC(obj, 'x');"
5258 " keyedStoreIC(obj, 'x');"
5259 " keyedStoreIC(obj, 'x');"
5260 " return proto;"
5261 " })();");
5262}
5263
5264
5265TEST(WeakMapInPolymorphicKeyedStoreIC) {
5266 CheckWeakness(
5267 "function keyedStoreIC(obj, field, value) {"
5268 " obj[field] = value;"
5269 "}"
5270 " (function() {"
5271 " var proto = {'name' : 'weak'};"
5272 " var obj = Object.create(proto);"
5273 " keyedStoreIC(obj, 'x');"
5274 " keyedStoreIC(obj, 'x');"
5275 " keyedStoreIC(obj, 'x');"
5276 " var poly = Object.create(proto);"
5277 " poly.x = true;"
5278 " keyedStoreIC(poly, 'x');"
5279 " return proto;"
5280 " })();");
5281}
5282
5283
5284TEST(WeakMapInMonomorphicCompareNilIC) {
5285 CheckWeakness("function compareNilIC(obj) {"
5286 " return obj == null;"
5287 "}"
5288 " (function() {"
5289 " var proto = {'name' : 'weak'};"
5290 " var obj = Object.create(proto);"
5291 " compareNilIC(obj);"
5292 " compareNilIC(obj);"
5293 " compareNilIC(obj);"
5294 " return proto;"
5295 " })();");
5296}
5297
5298
5299Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5300 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5301 Handle<Object> obj =
5302 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5303 return Handle<JSFunction>::cast(obj);
5304}
5305
5306
5307void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5308 int slot_index, InlineCacheState state) {
5309 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5310 kind == Code::CALL_IC) {
5311 TypeFeedbackVector* vector = shared->feedback_vector();
5312 FeedbackVectorSlot slot(slot_index);
5313 if (kind == Code::LOAD_IC) {
5314 LoadICNexus nexus(vector, slot);
5315 CHECK_EQ(nexus.StateFromFeedback(), state);
5316 } else if (kind == Code::KEYED_LOAD_IC) {
5317 KeyedLoadICNexus nexus(vector, slot);
5318 CHECK_EQ(nexus.StateFromFeedback(), state);
5319 } else if (kind == Code::CALL_IC) {
5320 CallICNexus nexus(vector, slot);
5321 CHECK_EQ(nexus.StateFromFeedback(), state);
5322 }
5323 } else {
5324 Code* ic = FindFirstIC(code, kind);
5325 CHECK(ic->is_inline_cache_stub());
5326 CHECK(ic->ic_state() == state);
5327 }
5328}
5329
5330
5331TEST(MonomorphicStaysMonomorphicAfterGC) {
5332 if (FLAG_always_opt) return;
5333 CcTest::InitializeVM();
5334 Isolate* isolate = CcTest::i_isolate();
5335 Heap* heap = isolate->heap();
5336 v8::HandleScope scope(CcTest::isolate());
5337 CompileRun(
5338 "function loadIC(obj) {"
5339 " return obj.name;"
5340 "}"
5341 "function testIC() {"
5342 " var proto = {'name' : 'weak'};"
5343 " var obj = Object.create(proto);"
5344 " loadIC(obj);"
5345 " loadIC(obj);"
5346 " loadIC(obj);"
5347 " return proto;"
5348 "};");
5349 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5350 {
5351 v8::HandleScope scope(CcTest::isolate());
5352 CompileRun("(testIC())");
5353 }
5354 heap->CollectAllGarbage();
5355 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5356 {
5357 v8::HandleScope scope(CcTest::isolate());
5358 CompileRun("(testIC())");
5359 }
5360 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5361}
5362
5363
5364TEST(PolymorphicStaysPolymorphicAfterGC) {
5365 if (FLAG_always_opt) return;
5366 CcTest::InitializeVM();
5367 Isolate* isolate = CcTest::i_isolate();
5368 Heap* heap = isolate->heap();
5369 v8::HandleScope scope(CcTest::isolate());
5370 CompileRun(
5371 "function loadIC(obj) {"
5372 " return obj.name;"
5373 "}"
5374 "function testIC() {"
5375 " var proto = {'name' : 'weak'};"
5376 " var obj = Object.create(proto);"
5377 " loadIC(obj);"
5378 " loadIC(obj);"
5379 " loadIC(obj);"
5380 " var poly = Object.create(proto);"
5381 " poly.x = true;"
5382 " loadIC(poly);"
5383 " return proto;"
5384 "};");
5385 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5386 {
5387 v8::HandleScope scope(CcTest::isolate());
5388 CompileRun("(testIC())");
5389 }
5390 heap->CollectAllGarbage();
5391 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5392 {
5393 v8::HandleScope scope(CcTest::isolate());
5394 CompileRun("(testIC())");
5395 }
5396 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5397}
5398
5399
5400TEST(WeakCell) {
5401 CcTest::InitializeVM();
5402 Isolate* isolate = CcTest::i_isolate();
5403 v8::internal::Heap* heap = CcTest::heap();
5404 v8::internal::Factory* factory = isolate->factory();
5405
5406 HandleScope outer_scope(isolate);
5407 Handle<WeakCell> weak_cell1;
5408 {
5409 HandleScope inner_scope(isolate);
5410 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5411 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5412 }
5413
5414 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5415 Handle<WeakCell> weak_cell2;
5416 {
5417 HandleScope inner_scope(isolate);
5418 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5419 }
5420 CHECK(weak_cell1->value()->IsFixedArray());
5421 CHECK_EQ(*survivor, weak_cell2->value());
5422 heap->CollectGarbage(NEW_SPACE);
5423 CHECK(weak_cell1->value()->IsFixedArray());
5424 CHECK_EQ(*survivor, weak_cell2->value());
5425 heap->CollectGarbage(NEW_SPACE);
5426 CHECK(weak_cell1->value()->IsFixedArray());
5427 CHECK_EQ(*survivor, weak_cell2->value());
5428 heap->CollectAllAvailableGarbage();
5429 CHECK(weak_cell1->cleared());
5430 CHECK_EQ(*survivor, weak_cell2->value());
5431}
5432
5433
5434TEST(WeakCellsWithIncrementalMarking) {
5435 CcTest::InitializeVM();
5436 Isolate* isolate = CcTest::i_isolate();
5437 v8::internal::Heap* heap = CcTest::heap();
5438 v8::internal::Factory* factory = isolate->factory();
5439
5440 const int N = 16;
5441 HandleScope outer_scope(isolate);
5442 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5443 Handle<WeakCell> weak_cells[N];
5444
5445 for (int i = 0; i < N; i++) {
5446 HandleScope inner_scope(isolate);
5447 Handle<HeapObject> value =
5448 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5449 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5450 CHECK(weak_cell->value()->IsFixedArray());
5451 IncrementalMarking* marking = heap->incremental_marking();
5452 if (marking->IsStopped()) {
5453 heap->StartIncrementalMarking();
5454 }
5455 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5456 heap->CollectGarbage(NEW_SPACE);
5457 CHECK(weak_cell->value()->IsFixedArray());
5458 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5459 }
5460 heap->CollectAllGarbage();
5461 CHECK_EQ(*survivor, weak_cells[0]->value());
5462 for (int i = 1; i < N; i++) {
5463 CHECK(weak_cells[i]->cleared());
5464 }
5465}
5466
5467
5468#ifdef DEBUG
5469TEST(AddInstructionChangesNewSpacePromotion) {
5470 i::FLAG_allow_natives_syntax = true;
5471 i::FLAG_expose_gc = true;
5472 i::FLAG_stress_compaction = true;
5473 i::FLAG_gc_interval = 1000;
5474 CcTest::InitializeVM();
5475 if (!i::FLAG_allocation_site_pretenuring) return;
5476 v8::HandleScope scope(CcTest::isolate());
5477 Isolate* isolate = CcTest::i_isolate();
5478 Heap* heap = isolate->heap();
5479 LocalContext env;
5480 CompileRun(
5481 "function add(a, b) {"
5482 " return a + b;"
5483 "}"
5484 "add(1, 2);"
5485 "add(\"a\", \"b\");"
5486 "var oldSpaceObject;"
5487 "gc();"
5488 "function crash(x) {"
5489 " var object = {a: null, b: null};"
5490 " var result = add(1.5, x | 0);"
5491 " object.a = result;"
5492 " oldSpaceObject = object;"
5493 " return object;"
5494 "}"
5495 "crash(1);"
5496 "crash(1);"
5497 "%OptimizeFunctionOnNextCall(crash);"
5498 "crash(1);");
5499
5500 v8::Local<v8::Object> global = CcTest::global();
5501 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5502 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5503 v8::Local<v8::Value> args1[] = {v8_num(1)};
5504 heap->DisableInlineAllocation();
5505 heap->set_allocation_timeout(1);
5506 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5507 heap->CollectAllGarbage();
5508}
5509
5510
5511void OnFatalErrorExpectOOM(const char* location, const char* message) {
5512 // Exit with 0 if the location matches our expectation.
5513 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5514}
5515
5516
5517TEST(CEntryStubOOM) {
5518 i::FLAG_allow_natives_syntax = true;
5519 CcTest::InitializeVM();
5520 v8::HandleScope scope(CcTest::isolate());
5521 CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
5522
5523 v8::Local<v8::Value> result = CompileRun(
5524 "%SetFlags('--gc-interval=1');"
5525 "var a = [];"
5526 "a.__proto__ = [];"
5527 "a.unshift(1)");
5528
5529 CHECK(result->IsNumber());
5530}
5531
5532#endif // DEBUG
5533
5534
5535static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5536
5537
5538static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5539 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5540}
5541
5542
5543UNINITIALIZED_TEST(Regress538257) {
5544 i::FLAG_manual_evacuation_candidates_selection = true;
5545 v8::Isolate::CreateParams create_params;
5546 // Set heap limits.
5547 create_params.constraints.set_max_semi_space_size(1 * Page::kPageSize / MB);
5548 create_params.constraints.set_max_old_space_size(6 * Page::kPageSize / MB);
5549 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5550 v8::Isolate* isolate = v8::Isolate::New(create_params);
5551 isolate->Enter();
5552 {
5553 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5554 HandleScope handle_scope(i_isolate);
5555 PagedSpace* old_space = i_isolate->heap()->old_space();
5556 const int kMaxObjects = 10000;
5557 const int kFixedArrayLen = 512;
5558 Handle<FixedArray> objects[kMaxObjects];
5559 for (int i = 0; (i < kMaxObjects) && old_space->CanExpand(Page::kPageSize);
5560 i++) {
5561 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5562 Page::FromAddress(objects[i]->address())
5563 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5564 }
5565 SimulateFullSpace(old_space);
5566 i_isolate->heap()->CollectGarbage(OLD_SPACE);
5567 // If we get this far, we've successfully aborted compaction. Any further
5568 // allocations might trigger OOM.
5569 }
5570 isolate->Exit();
5571 isolate->Dispose();
5572}
5573
5574
5575TEST(Regress357137) {
5576 CcTest::InitializeVM();
5577 v8::Isolate* isolate = CcTest::isolate();
5578 v8::HandleScope hscope(isolate);
5579 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5580 global->Set(
5581 v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
5582 .ToLocalChecked(),
5583 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5584 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5585 CHECK(!context.IsEmpty());
5586 v8::Context::Scope cscope(context);
5587
5588 v8::Local<v8::Value> result = CompileRun(
5589 "var locals = '';"
5590 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5591 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5592 "interrupt();" // This triggers a fake stack overflow in f.
5593 "f()()");
5594 CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
5595}
5596
5597
5598TEST(Regress507979) {
5599 const int kFixedArrayLen = 10;
5600 CcTest::InitializeVM();
5601 Isolate* isolate = CcTest::i_isolate();
5602 Heap* heap = isolate->heap();
5603 HandleScope handle_scope(isolate);
5604
5605 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5606 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005607 CHECK(heap->InNewSpace(*o1));
5608 CHECK(heap->InNewSpace(*o2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005609
5610 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5611
5612 // Replace parts of an object placed before a live object with a filler. This
5613 // way the filler object shares the mark bits with the following live object.
5614 o1->Shrink(kFixedArrayLen - 1);
5615
5616 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5617 // Let's not optimize the loop away.
5618 CHECK(obj->address() != nullptr);
5619 }
5620}
5621
5622
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005623UNINITIALIZED_TEST(PromotionQueue) {
5624 i::FLAG_expose_gc = true;
5625 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5626 i::FLAG_min_semi_space_size = i::FLAG_max_semi_space_size;
5627 v8::Isolate::CreateParams create_params;
5628 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5629 v8::Isolate* isolate = v8::Isolate::New(create_params);
5630 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5631 {
5632 v8::Isolate::Scope isolate_scope(isolate);
5633 v8::HandleScope handle_scope(isolate);
5634 v8::Context::New(isolate)->Enter();
5635 Heap* heap = i_isolate->heap();
5636 NewSpace* new_space = heap->new_space();
5637
5638 // In this test we will try to overwrite the promotion queue which is at the
5639 // end of to-space. To actually make that possible, we need at least two
5640 // semi-space pages and take advantage of fragmentation.
5641 // (1) Use a semi-space consisting of two pages.
5642 // (2) Create a few small long living objects and call the scavenger to
5643 // move them to the other semi-space.
5644 // (3) Create a huge object, i.e., remainder of first semi-space page and
5645 // create another huge object which should be of maximum allocatable memory
5646 // size of the second semi-space page.
5647 // (4) Call the scavenger again.
5648 // What will happen is: the scavenger will promote the objects created in
5649 // (2) and will create promotion queue entries at the end of the second
5650 // semi-space page during the next scavenge when it promotes the objects to
5651 // the old generation. The first allocation of (3) will fill up the first
5652 // semi-space page. The second allocation in (3) will not fit into the
5653 // first semi-space page, but it will overwrite the promotion queue which
5654 // are in the second semi-space page. If the right guards are in place, the
5655 // promotion queue will be evacuated in that case.
5656
5657
5658 CHECK(new_space->IsAtMaximumCapacity());
5659 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5660
5661 // Call the scavenger two times to get an empty new space
5662 heap->CollectGarbage(NEW_SPACE);
5663 heap->CollectGarbage(NEW_SPACE);
5664
5665 // First create a few objects which will survive a scavenge, and will get
5666 // promoted to the old generation later on. These objects will create
5667 // promotion queue entries at the end of the second semi-space page.
5668 const int number_handles = 12;
5669 Handle<FixedArray> handles[number_handles];
5670 for (int i = 0; i < number_handles; i++) {
5671 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5672 }
5673
5674 heap->CollectGarbage(NEW_SPACE);
5675 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5676
5677 // Fill-up the first semi-space page.
5678 FillUpOnePage(new_space);
5679
5680 // Create a small object to initialize the bump pointer on the second
5681 // semi-space page.
5682 Handle<FixedArray> small =
5683 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5684 CHECK(heap->InNewSpace(*small));
5685
5686 // Fill-up the second semi-space page.
5687 FillUpOnePage(new_space);
5688
5689 // This scavenge will corrupt memory if the promotion queue is not
5690 // evacuated.
5691 heap->CollectGarbage(NEW_SPACE);
5692 }
5693 isolate->Dispose();
5694}
5695
5696
5697TEST(Regress388880) {
5698 i::FLAG_expose_gc = true;
5699 CcTest::InitializeVM();
5700 v8::HandleScope scope(CcTest::isolate());
5701 Isolate* isolate = CcTest::i_isolate();
5702 Factory* factory = isolate->factory();
5703 Heap* heap = isolate->heap();
5704
5705 Handle<Map> map1 = Map::Create(isolate, 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005706 Handle<String> name = factory->NewStringFromStaticChars("foo");
5707 name = factory->InternalizeString(name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005708 Handle<Map> map2 =
Ben Murdoch097c5b22016-05-18 11:27:45 +01005709 Map::CopyWithField(map1, name, FieldType::Any(isolate), NONE,
5710 Representation::Tagged(), OMIT_TRANSITION)
5711 .ToHandleChecked();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005712
5713 int desired_offset = Page::kPageSize - map1->instance_size();
5714
5715 // Allocate padding objects in old pointer space so, that object allocated
5716 // afterwards would end at the end of the page.
5717 SimulateFullSpace(heap->old_space());
5718 int padding_size = desired_offset - Page::kObjectStartOffset;
5719 CreatePadding(heap, padding_size, TENURED);
5720
5721 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5722 o->set_properties(*factory->empty_fixed_array());
5723
5724 // Ensure that the object allocated where we need it.
5725 Page* page = Page::FromAddress(o->address());
5726 CHECK_EQ(desired_offset, page->Offset(o->address()));
5727
5728 // Now we have an object right at the end of the page.
5729
5730 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5731 // that would cause crash.
5732 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5733 marking->Stop();
5734 CcTest::heap()->StartIncrementalMarking();
5735 CHECK(marking->IsMarking());
5736
5737 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5738 // when it calls heap->AdjustLiveBytes(...).
5739 JSObject::MigrateToMap(o, map2);
5740}
5741
5742
5743TEST(Regress3631) {
5744 i::FLAG_expose_gc = true;
5745 CcTest::InitializeVM();
5746 v8::HandleScope scope(CcTest::isolate());
5747 Isolate* isolate = CcTest::i_isolate();
5748 Heap* heap = isolate->heap();
5749 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5750 v8::Local<v8::Value> result = CompileRun(
5751 "var weak_map = new WeakMap();"
5752 "var future_keys = [];"
5753 "for (var i = 0; i < 50; i++) {"
5754 " var key = {'k' : i + 0.1};"
5755 " weak_map.set(key, 1);"
5756 " future_keys.push({'x' : i + 0.2});"
5757 "}"
5758 "weak_map");
5759 if (marking->IsStopped()) {
5760 CcTest::heap()->StartIncrementalMarking();
5761 }
5762 // Incrementally mark the backing store.
5763 Handle<JSReceiver> obj =
5764 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5765 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5766 while (!Marking::IsBlack(
5767 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5768 !marking->IsStopped()) {
5769 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5770 }
5771 // Stash the backing store in a handle.
5772 Handle<Object> save(weak_map->table(), isolate);
5773 // The following line will update the backing store.
5774 CompileRun(
5775 "for (var i = 0; i < 50; i++) {"
5776 " weak_map.set(future_keys[i], i);"
5777 "}");
5778 heap->incremental_marking()->set_should_hurry(true);
5779 heap->CollectGarbage(OLD_SPACE);
5780}
5781
5782
5783TEST(Regress442710) {
5784 CcTest::InitializeVM();
5785 Isolate* isolate = CcTest::i_isolate();
5786 Heap* heap = isolate->heap();
5787 Factory* factory = isolate->factory();
5788
5789 HandleScope sc(isolate);
5790 Handle<JSGlobalObject> global(
5791 CcTest::i_isolate()->context()->global_object());
5792 Handle<JSArray> array = factory->NewJSArray(2);
5793
5794 Handle<String> name = factory->InternalizeUtf8String("testArray");
5795 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5796 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5797 heap->CollectGarbage(OLD_SPACE);
5798}
5799
5800
5801HEAP_TEST(NumberStringCacheSize) {
5802 // Test that the number-string cache has not been resized in the snapshot.
5803 CcTest::InitializeVM();
5804 Isolate* isolate = CcTest::i_isolate();
5805 if (!isolate->snapshot_available()) return;
5806 Heap* heap = isolate->heap();
5807 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5808 heap->number_string_cache()->length());
5809}
5810
5811
5812TEST(Regress3877) {
5813 CcTest::InitializeVM();
5814 Isolate* isolate = CcTest::i_isolate();
5815 Heap* heap = isolate->heap();
5816 Factory* factory = isolate->factory();
5817 HandleScope scope(isolate);
5818 CompileRun("function cls() { this.x = 10; }");
5819 Handle<WeakCell> weak_prototype;
5820 {
5821 HandleScope inner_scope(isolate);
5822 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5823 Handle<JSReceiver> proto =
5824 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5825 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5826 }
5827 CHECK(!weak_prototype->cleared());
5828 CompileRun(
5829 "var a = { };"
5830 "a.x = new cls();"
5831 "cls.prototype = null;");
5832 for (int i = 0; i < 4; i++) {
5833 heap->CollectAllGarbage();
5834 }
5835 // The map of a.x keeps prototype alive
5836 CHECK(!weak_prototype->cleared());
5837 // Change the map of a.x and make the previous map garbage collectable.
5838 CompileRun("a.x.__proto__ = {};");
5839 for (int i = 0; i < 4; i++) {
5840 heap->CollectAllGarbage();
5841 }
5842 CHECK(weak_prototype->cleared());
5843}
5844
5845
5846Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5847 HandleScope inner_scope(isolate);
5848 Handle<Map> map = Map::Create(isolate, 1);
5849 v8::Local<v8::Value> result =
5850 CompileRun("(function () { return {x : 10}; })();");
5851 Handle<JSReceiver> proto =
5852 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5853 Map::SetPrototype(map, proto);
5854 heap->AddRetainedMap(map);
5855 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5856}
5857
5858
5859void CheckMapRetainingFor(int n) {
5860 FLAG_retain_maps_for_n_gc = n;
5861 Isolate* isolate = CcTest::i_isolate();
5862 Heap* heap = isolate->heap();
5863 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5864 CHECK(!weak_cell->cleared());
5865 for (int i = 0; i < n; i++) {
5866 SimulateIncrementalMarking(heap);
5867 heap->CollectGarbage(OLD_SPACE);
5868 }
5869 CHECK(!weak_cell->cleared());
5870 SimulateIncrementalMarking(heap);
5871 heap->CollectGarbage(OLD_SPACE);
5872 CHECK(weak_cell->cleared());
5873}
5874
5875
5876TEST(MapRetaining) {
5877 CcTest::InitializeVM();
5878 v8::HandleScope scope(CcTest::isolate());
5879 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5880 CheckMapRetainingFor(0);
5881 CheckMapRetainingFor(1);
5882 CheckMapRetainingFor(7);
5883}
5884
5885
5886TEST(RegressArrayListGC) {
5887 FLAG_retain_maps_for_n_gc = 1;
5888 FLAG_incremental_marking = 0;
5889 FLAG_gc_global = true;
5890 CcTest::InitializeVM();
5891 v8::HandleScope scope(CcTest::isolate());
5892 Isolate* isolate = CcTest::i_isolate();
5893 Heap* heap = isolate->heap();
5894 AddRetainedMap(isolate, heap);
5895 Handle<Map> map = Map::Create(isolate, 1);
5896 heap->CollectGarbage(OLD_SPACE);
5897 // Force GC in old space on next addition of retained map.
5898 Map::WeakCellForMap(map);
5899 SimulateFullSpace(CcTest::heap()->new_space());
5900 for (int i = 0; i < 10; i++) {
5901 heap->AddRetainedMap(map);
5902 }
5903 heap->CollectGarbage(OLD_SPACE);
5904}
5905
5906
5907#ifdef DEBUG
5908TEST(PathTracer) {
5909 CcTest::InitializeVM();
5910 v8::HandleScope scope(CcTest::isolate());
5911
5912 v8::Local<v8::Value> result = CompileRun("'abc'");
5913 Handle<Object> o = v8::Utils::OpenHandle(*result);
5914 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5915}
5916#endif // DEBUG
5917
5918
5919TEST(WritableVsImmortalRoots) {
5920 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5921 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5922 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5923 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5924 // A root value can be writable, immortal, or neither, but not both.
5925 CHECK(!immortal || !writable);
5926 }
5927}
5928
5929
5930static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5931 int initial_length,
5932 int elements_to_trim) {
5933 v8::HandleScope scope(CcTest::isolate());
5934 Isolate* isolate = CcTest::i_isolate();
5935 Factory* factory = isolate->factory();
5936 Heap* heap = isolate->heap();
5937
5938 Handle<FixedTypedArrayBase> array =
5939 factory->NewFixedTypedArray(initial_length, type, true);
5940 int old_size = array->size();
5941 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5942 elements_to_trim);
5943
5944 // Check that free space filler is at the right place and did not smash the
5945 // array header.
5946 CHECK(array->IsFixedArrayBase());
5947 CHECK_EQ(initial_length - elements_to_trim, array->length());
5948 int new_size = array->size();
5949 if (new_size != old_size) {
5950 // Free space filler should be created in this case.
5951 Address next_obj_address = array->address() + array->size();
5952 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5953 }
5954 heap->CollectAllAvailableGarbage();
5955}
5956
5957
5958TEST(Regress472513) {
5959 CcTest::InitializeVM();
5960 v8::HandleScope scope(CcTest::isolate());
5961
5962 // The combination of type/initial_length/elements_to_trim triggered
5963 // typed array header smashing with free space filler (crbug/472513).
5964
5965 // 64-bit cases.
5966 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5967 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5968 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5969 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5970 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5971 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5972
5973 // 32-bit cases.
5974 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5975 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5976 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5977 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5978 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5979}
5980
5981
5982TEST(WeakFixedArray) {
5983 CcTest::InitializeVM();
5984 v8::HandleScope scope(CcTest::isolate());
5985
5986 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5987 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5988 array->Remove(number);
5989 array->Compact<WeakFixedArray::NullCallback>();
5990 WeakFixedArray::Add(array, number);
5991}
5992
5993
5994TEST(PreprocessStackTrace) {
5995 // Do not automatically trigger early GC.
5996 FLAG_gc_interval = -1;
5997 CcTest::InitializeVM();
5998 v8::HandleScope scope(CcTest::isolate());
5999 v8::TryCatch try_catch(CcTest::isolate());
6000 CompileRun("throw new Error();");
6001 CHECK(try_catch.HasCaught());
6002 Isolate* isolate = CcTest::i_isolate();
6003 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
6004 Handle<Name> key = isolate->factory()->stack_trace_symbol();
6005 Handle<Object> stack_trace =
6006 JSObject::GetProperty(exception, key).ToHandleChecked();
6007 Handle<Object> code =
6008 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6009 CHECK(code->IsCode());
6010
6011 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
6012
6013 Handle<Object> pos =
6014 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6015 CHECK(pos->IsSmi());
6016
6017 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
6018 int array_length = Smi::cast(stack_trace_array->length())->value();
6019 for (int i = 0; i < array_length; i++) {
6020 Handle<Object> element =
6021 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
6022 CHECK(!element->IsCode());
6023 }
6024}
6025
6026
6027static bool utils_has_been_collected = false;
6028
6029static void UtilsHasBeenCollected(
6030 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
6031 utils_has_been_collected = true;
6032 data.GetParameter()->Reset();
6033}
6034
6035
6036TEST(BootstrappingExports) {
6037 // Expose utils object and delete it to observe that it is indeed
6038 // being garbage-collected.
6039 FLAG_expose_natives_as = "utils";
6040 CcTest::InitializeVM();
6041 v8::Isolate* isolate = CcTest::isolate();
6042 LocalContext env;
6043
6044 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
6045
6046 utils_has_been_collected = false;
6047
6048 v8::Persistent<v8::Object> utils;
6049
6050 {
6051 v8::HandleScope scope(isolate);
6052 v8::Local<v8::String> name = v8_str("utils");
6053 utils.Reset(isolate, CcTest::global()
6054 ->Get(env.local(), name)
6055 .ToLocalChecked()
6056 ->ToObject(env.local())
6057 .ToLocalChecked());
6058 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
6059 }
6060
6061 utils.SetWeak(&utils, UtilsHasBeenCollected,
6062 v8::WeakCallbackType::kParameter);
6063
6064 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6065
6066 CHECK(utils_has_been_collected);
6067}
6068
6069
6070TEST(Regress1878) {
6071 FLAG_allow_natives_syntax = true;
6072 CcTest::InitializeVM();
6073 v8::Isolate* isolate = CcTest::isolate();
6074 v8::HandleScope scope(isolate);
6075 v8::Local<v8::Function> constructor = v8::Utils::CallableToLocal(
6076 CcTest::i_isolate()->internal_array_function());
6077 LocalContext env;
6078 CHECK(CcTest::global()
6079 ->Set(env.local(), v8_str("InternalArray"), constructor)
6080 .FromJust());
6081
6082 v8::TryCatch try_catch(isolate);
6083
6084 CompileRun(
6085 "var a = Array();"
6086 "for (var i = 0; i < 1000; i++) {"
6087 " var ai = new InternalArray(10000);"
6088 " if (%HaveSameMap(ai, a)) throw Error();"
6089 " if (!%HasFastObjectElements(ai)) throw Error();"
6090 "}"
6091 "for (var i = 0; i < 1000; i++) {"
6092 " var ai = new InternalArray(10000);"
6093 " if (%HaveSameMap(ai, a)) throw Error();"
6094 " if (!%HasFastObjectElements(ai)) throw Error();"
6095 "}");
6096
6097 CHECK(!try_catch.HasCaught());
6098}
6099
6100
6101void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6102 CHECK(bytes >= FixedArray::kHeaderSize);
6103 CHECK(bytes % kPointerSize == 0);
6104 Factory* factory = isolate->factory();
6105 HandleScope scope(isolate);
6106 AlwaysAllocateScope always_allocate(isolate);
6107 int elements =
6108 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6109 Handle<FixedArray> array = factory->NewFixedArray(
6110 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6111 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6112 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6113}
6114
6115
6116TEST(NewSpaceAllocationCounter) {
6117 CcTest::InitializeVM();
6118 v8::HandleScope scope(CcTest::isolate());
6119 Isolate* isolate = CcTest::i_isolate();
6120 Heap* heap = isolate->heap();
6121 size_t counter1 = heap->NewSpaceAllocationCounter();
6122 heap->CollectGarbage(NEW_SPACE);
6123 const size_t kSize = 1024;
6124 AllocateInSpace(isolate, kSize, NEW_SPACE);
6125 size_t counter2 = heap->NewSpaceAllocationCounter();
6126 CHECK_EQ(kSize, counter2 - counter1);
6127 heap->CollectGarbage(NEW_SPACE);
6128 size_t counter3 = heap->NewSpaceAllocationCounter();
6129 CHECK_EQ(0U, counter3 - counter2);
6130 // Test counter overflow.
6131 size_t max_counter = -1;
6132 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6133 size_t start = heap->NewSpaceAllocationCounter();
6134 for (int i = 0; i < 20; i++) {
6135 AllocateInSpace(isolate, kSize, NEW_SPACE);
6136 size_t counter = heap->NewSpaceAllocationCounter();
6137 CHECK_EQ(kSize, counter - start);
6138 start = counter;
6139 }
6140}
6141
6142
6143TEST(OldSpaceAllocationCounter) {
6144 CcTest::InitializeVM();
6145 v8::HandleScope scope(CcTest::isolate());
6146 Isolate* isolate = CcTest::i_isolate();
6147 Heap* heap = isolate->heap();
6148 size_t counter1 = heap->OldGenerationAllocationCounter();
6149 heap->CollectGarbage(NEW_SPACE);
6150 heap->CollectGarbage(NEW_SPACE);
6151 const size_t kSize = 1024;
6152 AllocateInSpace(isolate, kSize, OLD_SPACE);
6153 size_t counter2 = heap->OldGenerationAllocationCounter();
6154 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6155 CHECK_LE(kSize, counter2 - counter1);
6156 heap->CollectGarbage(NEW_SPACE);
6157 size_t counter3 = heap->OldGenerationAllocationCounter();
6158 CHECK_EQ(0u, counter3 - counter2);
6159 AllocateInSpace(isolate, kSize, OLD_SPACE);
6160 heap->CollectGarbage(OLD_SPACE);
6161 size_t counter4 = heap->OldGenerationAllocationCounter();
6162 CHECK_LE(kSize, counter4 - counter3);
6163 // Test counter overflow.
6164 size_t max_counter = -1;
6165 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6166 size_t start = heap->OldGenerationAllocationCounter();
6167 for (int i = 0; i < 20; i++) {
6168 AllocateInSpace(isolate, kSize, OLD_SPACE);
6169 size_t counter = heap->OldGenerationAllocationCounter();
6170 CHECK_LE(kSize, counter - start);
6171 start = counter;
6172 }
6173}
6174
6175
6176TEST(NewSpaceAllocationThroughput) {
6177 CcTest::InitializeVM();
6178 v8::HandleScope scope(CcTest::isolate());
6179 Isolate* isolate = CcTest::i_isolate();
6180 Heap* heap = isolate->heap();
6181 GCTracer* tracer = heap->tracer();
6182 int time1 = 100;
6183 size_t counter1 = 1000;
6184 tracer->SampleAllocation(time1, counter1, 0);
6185 int time2 = 200;
6186 size_t counter2 = 2000;
6187 tracer->SampleAllocation(time2, counter2, 0);
6188 size_t throughput =
6189 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6190 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6191 int time3 = 1000;
6192 size_t counter3 = 30000;
6193 tracer->SampleAllocation(time3, counter3, 0);
6194 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6195 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6196}
6197
6198
6199TEST(NewSpaceAllocationThroughput2) {
6200 CcTest::InitializeVM();
6201 v8::HandleScope scope(CcTest::isolate());
6202 Isolate* isolate = CcTest::i_isolate();
6203 Heap* heap = isolate->heap();
6204 GCTracer* tracer = heap->tracer();
6205 int time1 = 100;
6206 size_t counter1 = 1000;
6207 tracer->SampleAllocation(time1, counter1, 0);
6208 int time2 = 200;
6209 size_t counter2 = 2000;
6210 tracer->SampleAllocation(time2, counter2, 0);
6211 size_t throughput =
6212 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6213 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6214 int time3 = 1000;
6215 size_t counter3 = 30000;
6216 tracer->SampleAllocation(time3, counter3, 0);
6217 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6218 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6219}
6220
6221
6222static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6223 Isolate* isolate = CcTest::i_isolate();
6224 Object* message =
6225 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6226 CHECK(message->IsTheHole());
6227}
6228
6229
6230TEST(MessageObjectLeak) {
6231 CcTest::InitializeVM();
6232 v8::Isolate* isolate = CcTest::isolate();
6233 v8::HandleScope scope(isolate);
6234 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6235 global->Set(
6236 v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
6237 .ToLocalChecked(),
6238 v8::FunctionTemplate::New(isolate, CheckLeak));
6239 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6240 v8::Context::Scope cscope(context);
6241
6242 const char* test =
6243 "try {"
6244 " throw 'message 1';"
6245 "} catch (e) {"
6246 "}"
6247 "check();"
6248 "L: try {"
6249 " throw 'message 2';"
6250 "} finally {"
6251 " break L;"
6252 "}"
6253 "check();";
6254 CompileRun(test);
6255
6256 const char* flag = "--turbo-filter=*";
6257 FlagList::SetFlagsFromString(flag, StrLength(flag));
6258 FLAG_always_opt = true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006259
6260 CompileRun(test);
6261}
6262
6263
6264static void CheckEqualSharedFunctionInfos(
6265 const v8::FunctionCallbackInfo<v8::Value>& args) {
6266 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6267 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6268 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6269 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6270 CHECK(fun1->shared() == fun2->shared());
6271}
6272
6273
6274static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6275 Isolate* isolate = CcTest::i_isolate();
6276 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6277 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6278 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6279 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6280 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6281}
6282
6283
6284TEST(CanonicalSharedFunctionInfo) {
6285 CcTest::InitializeVM();
6286 v8::Isolate* isolate = CcTest::isolate();
6287 v8::HandleScope scope(isolate);
6288 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6289 global->Set(isolate, "check", v8::FunctionTemplate::New(
6290 isolate, CheckEqualSharedFunctionInfos));
6291 global->Set(isolate, "remove",
6292 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6293 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6294 v8::Context::Scope cscope(context);
6295 CompileRun(
6296 "function f() { return function g() {}; }"
6297 "var g1 = f();"
6298 "remove(f);"
6299 "var g2 = f();"
6300 "check(g1, g2);");
6301
6302 CompileRun(
6303 "function f() { return (function() { return function g() {}; })(); }"
6304 "var g1 = f();"
6305 "remove(f);"
6306 "var g2 = f();"
6307 "check(g1, g2);");
6308}
6309
6310
6311TEST(OldGenerationAllocationThroughput) {
6312 CcTest::InitializeVM();
6313 v8::HandleScope scope(CcTest::isolate());
6314 Isolate* isolate = CcTest::i_isolate();
6315 Heap* heap = isolate->heap();
6316 GCTracer* tracer = heap->tracer();
6317 int time1 = 100;
6318 size_t counter1 = 1000;
6319 tracer->SampleAllocation(time1, 0, counter1);
6320 int time2 = 200;
6321 size_t counter2 = 2000;
6322 tracer->SampleAllocation(time2, 0, counter2);
6323 size_t throughput =
6324 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6325 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6326 int time3 = 1000;
6327 size_t counter3 = 30000;
6328 tracer->SampleAllocation(time3, 0, counter3);
6329 throughput =
6330 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6331 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6332}
6333
6334
6335TEST(AllocationThroughput) {
6336 CcTest::InitializeVM();
6337 v8::HandleScope scope(CcTest::isolate());
6338 Isolate* isolate = CcTest::i_isolate();
6339 Heap* heap = isolate->heap();
6340 GCTracer* tracer = heap->tracer();
6341 int time1 = 100;
6342 size_t counter1 = 1000;
6343 tracer->SampleAllocation(time1, counter1, counter1);
6344 int time2 = 200;
6345 size_t counter2 = 2000;
6346 tracer->SampleAllocation(time2, counter2, counter2);
6347 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6348 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6349 int time3 = 1000;
6350 size_t counter3 = 30000;
6351 tracer->SampleAllocation(time3, counter3, counter3);
6352 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6353 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6354}
6355
6356
6357TEST(ContextMeasure) {
6358 CcTest::InitializeVM();
6359 v8::HandleScope scope(CcTest::isolate());
6360 Isolate* isolate = CcTest::i_isolate();
6361 LocalContext context;
6362
6363 int size_upper_limit = 0;
6364 int count_upper_limit = 0;
6365 HeapIterator it(CcTest::heap());
6366 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6367 size_upper_limit += obj->Size();
6368 count_upper_limit++;
6369 }
6370
6371 ContextMeasure measure(*isolate->native_context());
6372
6373 PrintF("Context size : %d bytes\n", measure.Size());
6374 PrintF("Context object count: %d\n", measure.Count());
6375
6376 CHECK_LE(1000, measure.Count());
6377 CHECK_LE(50000, measure.Size());
6378
6379 CHECK_LE(measure.Count(), count_upper_limit);
6380 CHECK_LE(measure.Size(), size_upper_limit);
6381}
6382
6383
6384TEST(ScriptIterator) {
6385 CcTest::InitializeVM();
6386 v8::HandleScope scope(CcTest::isolate());
6387 Isolate* isolate = CcTest::i_isolate();
6388 Heap* heap = CcTest::heap();
6389 LocalContext context;
6390
6391 heap->CollectAllGarbage();
6392
6393 int script_count = 0;
6394 {
6395 HeapIterator it(heap);
6396 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6397 if (obj->IsScript()) script_count++;
6398 }
6399 }
6400
6401 {
6402 Script::Iterator iterator(isolate);
6403 while (iterator.Next()) script_count--;
6404 }
6405
6406 CHECK_EQ(0, script_count);
6407}
6408
6409
6410TEST(SharedFunctionInfoIterator) {
6411 CcTest::InitializeVM();
6412 v8::HandleScope scope(CcTest::isolate());
6413 Isolate* isolate = CcTest::i_isolate();
6414 Heap* heap = CcTest::heap();
6415 LocalContext context;
6416
6417 heap->CollectAllGarbage();
6418 heap->CollectAllGarbage();
6419
6420 int sfi_count = 0;
6421 {
6422 HeapIterator it(heap);
6423 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6424 if (!obj->IsSharedFunctionInfo()) continue;
6425 sfi_count++;
6426 }
6427 }
6428
6429 {
6430 SharedFunctionInfo::Iterator iterator(isolate);
6431 while (iterator.Next()) sfi_count--;
6432 }
6433
6434 CHECK_EQ(0, sfi_count);
6435}
6436
6437
6438template <typename T>
6439static UniqueId MakeUniqueId(const Persistent<T>& p) {
6440 return UniqueId(reinterpret_cast<uintptr_t>(*v8::Utils::OpenPersistent(p)));
6441}
6442
6443
6444TEST(Regress519319) {
6445 CcTest::InitializeVM();
6446 v8::Isolate* isolate = CcTest::isolate();
6447 v8::HandleScope scope(isolate);
6448 Heap* heap = CcTest::heap();
6449 LocalContext context;
6450
6451 v8::Persistent<Value> parent;
6452 v8::Persistent<Value> child;
6453
6454 parent.Reset(isolate, v8::Object::New(isolate));
6455 child.Reset(isolate, v8::Object::New(isolate));
6456
6457 SimulateFullSpace(heap->old_space());
6458 heap->CollectGarbage(OLD_SPACE);
6459 {
6460 UniqueId id = MakeUniqueId(parent);
6461 isolate->SetObjectGroupId(parent, id);
6462 isolate->SetReferenceFromGroup(id, child);
6463 }
6464 // The CollectGarbage call above starts sweeper threads.
6465 // The crash will happen if the following two functions
6466 // are called before sweeping finishes.
6467 heap->StartIncrementalMarking();
6468 heap->FinalizeIncrementalMarkingIfComplete("test");
6469}
6470
6471
6472HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6473 CcTest::InitializeVM();
6474 v8::HandleScope scope(CcTest::isolate());
6475 Heap* heap = CcTest::heap();
6476 Isolate* isolate = CcTest::i_isolate();
6477 MemoryReducer* memory_reducer = heap->memory_reducer_;
6478 memory_reducer->SampleAndGetJsCallsPerMs(0);
6479 isolate->IncrementJsCallsFromApiCounter();
6480 isolate->IncrementJsCallsFromApiCounter();
6481 isolate->IncrementJsCallsFromApiCounter();
6482 double calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(1);
6483 CheckDoubleEquals(3, calls_per_ms);
6484
6485 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(2);
6486 CheckDoubleEquals(0, calls_per_ms);
6487
6488 isolate->IncrementJsCallsFromApiCounter();
6489 isolate->IncrementJsCallsFromApiCounter();
6490 isolate->IncrementJsCallsFromApiCounter();
6491 isolate->IncrementJsCallsFromApiCounter();
6492 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(4);
6493 CheckDoubleEquals(2, calls_per_ms);
6494}
6495
Ben Murdoch097c5b22016-05-18 11:27:45 +01006496HEAP_TEST(Regress587004) {
6497 FLAG_concurrent_sweeping = false;
6498#ifdef VERIFY_HEAP
6499 FLAG_verify_heap = false;
6500#endif
6501 CcTest::InitializeVM();
6502 v8::HandleScope scope(CcTest::isolate());
6503 Heap* heap = CcTest::heap();
6504 Isolate* isolate = CcTest::i_isolate();
6505 Factory* factory = isolate->factory();
6506 const int N = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
6507 kPointerSize;
6508 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
6509 CHECK(heap->old_space()->Contains(*array));
6510 Handle<Object> number = factory->NewHeapNumber(1.0);
6511 CHECK(heap->InNewSpace(*number));
6512 for (int i = 0; i < N; i++) {
6513 array->set(i, *number);
6514 }
6515 heap->CollectGarbage(OLD_SPACE);
6516 SimulateFullSpace(heap->old_space());
6517 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
6518 heap->mark_compact_collector()->EnsureSweepingCompleted();
6519 ByteArray* byte_array;
6520 const int M = 256;
6521 // Don't allow old space expansion. The test works without this flag too,
6522 // but becomes very slow.
6523 heap->set_force_oom(true);
6524 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
6525 for (int j = 0; j < M; j++) {
6526 byte_array->set(j, 0x31);
6527 }
6528 }
6529 // Re-enable old space expansion to avoid OOM crash.
6530 heap->set_force_oom(false);
6531 heap->CollectGarbage(NEW_SPACE);
6532}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006533
6534} // namespace internal
6535} // namespace v8