blob: 524a553a151a5326bdc7b0aaa0305a6feaf4107b [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include <stdlib.h>
29#include <utility>
30
31#include "src/compilation-cache.h"
32#include "src/context-measure.h"
33#include "src/deoptimizer.h"
Ben Murdochc5610432016-08-08 18:44:38 +010034#include "src/elements.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035#include "src/execution.h"
36#include "src/factory.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010037#include "src/field-type.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038#include "src/global-handles.h"
39#include "src/heap/gc-tracer.h"
40#include "src/heap/memory-reducer.h"
41#include "src/ic/ic.h"
42#include "src/macro-assembler.h"
43#include "src/regexp/jsregexp.h"
44#include "src/snapshot/snapshot.h"
45#include "test/cctest/cctest.h"
46#include "test/cctest/heap/heap-tester.h"
Ben Murdoch61f157c2016-09-16 13:49:30 +010047#include "test/cctest/heap/heap-utils.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000048#include "test/cctest/test-feedback-vector.h"
49
50
51namespace v8 {
52namespace internal {
53
54static void CheckMap(Map* map, int type, int instance_size) {
55 CHECK(map->IsHeapObject());
56#ifdef DEBUG
57 CHECK(CcTest::heap()->Contains(map));
58#endif
59 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
60 CHECK_EQ(type, map->instance_type());
61 CHECK_EQ(instance_size, map->instance_size());
62}
63
64
65TEST(HeapMaps) {
66 CcTest::InitializeVM();
67 Heap* heap = CcTest::heap();
68 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
69 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
70#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
71 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
72 SIMD128_TYPES(SIMD128_TYPE)
73#undef SIMD128_TYPE
74 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
75 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
76}
77
78
79static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
80 CHECK(obj->IsOddball());
81 Handle<Object> handle(obj, isolate);
82 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
83 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
84}
85
86
87static void CheckSmi(Isolate* isolate, int value, const char* string) {
88 Handle<Object> handle(Smi::FromInt(value), isolate);
89 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
90 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
91}
92
93
94static void CheckNumber(Isolate* isolate, double value, const char* string) {
95 Handle<Object> number = isolate->factory()->NewNumber(value);
96 CHECK(number->IsNumber());
97 Handle<Object> print_string =
98 Object::ToString(isolate, number).ToHandleChecked();
99 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
100}
101
102
103static void CheckFindCodeObject(Isolate* isolate) {
104 // Test FindCodeObject
105#define __ assm.
106
107 Assembler assm(isolate, NULL, 0);
108
109 __ nop(); // supported on all architectures
110
111 CodeDesc desc;
112 assm.GetCode(&desc);
113 Handle<Code> code = isolate->factory()->NewCode(
114 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
115 CHECK(code->IsCode());
116
117 HeapObject* obj = HeapObject::cast(*code);
118 Address obj_addr = obj->address();
119
120 for (int i = 0; i < obj->Size(); i += kPointerSize) {
121 Object* found = isolate->FindCodeObject(obj_addr + i);
122 CHECK_EQ(*code, found);
123 }
124
125 Handle<Code> copy = isolate->factory()->NewCode(
126 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
127 HeapObject* obj_copy = HeapObject::cast(*copy);
128 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
129 obj_copy->Size() / 2);
130 CHECK(not_right != *code);
131}
132
133
134TEST(HandleNull) {
135 CcTest::InitializeVM();
136 Isolate* isolate = CcTest::i_isolate();
137 HandleScope outer_scope(isolate);
138 LocalContext context;
139 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
140 CHECK(!n.is_null());
141}
142
143
144TEST(HeapObjects) {
145 CcTest::InitializeVM();
146 Isolate* isolate = CcTest::i_isolate();
147 Factory* factory = isolate->factory();
148 Heap* heap = isolate->heap();
149
150 HandleScope sc(isolate);
151 Handle<Object> value = factory->NewNumber(1.000123);
152 CHECK(value->IsHeapNumber());
153 CHECK(value->IsNumber());
154 CHECK_EQ(1.000123, value->Number());
155
156 value = factory->NewNumber(1.0);
157 CHECK(value->IsSmi());
158 CHECK(value->IsNumber());
159 CHECK_EQ(1.0, value->Number());
160
161 value = factory->NewNumberFromInt(1024);
162 CHECK(value->IsSmi());
163 CHECK(value->IsNumber());
164 CHECK_EQ(1024.0, value->Number());
165
166 value = factory->NewNumberFromInt(Smi::kMinValue);
167 CHECK(value->IsSmi());
168 CHECK(value->IsNumber());
169 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
170
171 value = factory->NewNumberFromInt(Smi::kMaxValue);
172 CHECK(value->IsSmi());
173 CHECK(value->IsNumber());
174 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
175
176#if !defined(V8_TARGET_ARCH_64_BIT)
177 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
178 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
179 CHECK(value->IsHeapNumber());
180 CHECK(value->IsNumber());
181 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
182#endif
183
184 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
185 CHECK(value->IsHeapNumber());
186 CHECK(value->IsNumber());
187 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
188 value->Number());
189
190 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
191 CHECK(value->IsHeapNumber());
192 CHECK(value->IsNumber());
193 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
194 value->Number());
195
196 // nan oddball checks
197 CHECK(factory->nan_value()->IsNumber());
198 CHECK(std::isnan(factory->nan_value()->Number()));
199
200 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
201 CHECK(s->IsString());
202 CHECK_EQ(10, s->length());
203
204 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
205 Handle<JSGlobalObject> global(
206 CcTest::i_isolate()->context()->global_object());
207 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
208
209 // Check ToString for oddballs
210 CheckOddball(isolate, heap->true_value(), "true");
211 CheckOddball(isolate, heap->false_value(), "false");
212 CheckOddball(isolate, heap->null_value(), "null");
213 CheckOddball(isolate, heap->undefined_value(), "undefined");
214
215 // Check ToString for Smis
216 CheckSmi(isolate, 0, "0");
217 CheckSmi(isolate, 42, "42");
218 CheckSmi(isolate, -42, "-42");
219
220 // Check ToString for Numbers
221 CheckNumber(isolate, 1.1, "1.1");
222
223 CheckFindCodeObject(isolate);
224}
225
226
227template <typename T, typename LANE_TYPE, int LANES>
228static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
229 LANE_TYPE other_value) {
230 // Check against lane_values, and check that all lanes can be set to
231 // other_value without disturbing the other lanes.
232 for (int i = 0; i < LANES; i++) {
233 CHECK_EQ(lane_values[i], value->get_lane(i));
234 }
235 for (int i = 0; i < LANES; i++) {
236 value->set_lane(i, other_value); // change the value
237 for (int j = 0; j < LANES; j++) {
238 if (i != j)
239 CHECK_EQ(lane_values[j], value->get_lane(j));
240 else
241 CHECK_EQ(other_value, value->get_lane(j));
242 }
243 value->set_lane(i, lane_values[i]); // restore the lane
244 }
245 CHECK(value->BooleanValue()); // SIMD values are 'true'.
246}
247
248
249TEST(SimdObjects) {
250 CcTest::InitializeVM();
251 Isolate* isolate = CcTest::i_isolate();
252 Factory* factory = isolate->factory();
253
254 HandleScope sc(isolate);
255
256 // Float32x4
257 {
258 float lanes[4] = {1, 2, 3, 4};
259 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
260 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
261
262 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
263 CHECK(value->IsFloat32x4());
264 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
265
266 // Check special lane values.
267 value->set_lane(1, -0.0);
268 CHECK_EQ(-0.0f, value->get_lane(1));
269 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
270 value->set_lane(2, quiet_NaN);
271 CHECK(std::isnan(value->get_lane(2)));
272 value->set_lane(3, signaling_NaN);
273 CHECK(std::isnan(value->get_lane(3)));
274
275#ifdef OBJECT_PRINT
276 // Check value printing.
277 {
278 value = factory->NewFloat32x4(lanes);
279 std::ostringstream os;
280 value->Float32x4Print(os);
281 CHECK_EQ("1, 2, 3, 4", os.str());
282 }
283 {
284 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
285 value = factory->NewFloat32x4(special_lanes);
286 std::ostringstream os;
287 value->Float32x4Print(os);
288 // Value printing doesn't preserve signed zeroes.
289 CHECK_EQ("0, 0, NaN, NaN", os.str());
290 }
291#endif // OBJECT_PRINT
292 }
293 // Int32x4
294 {
295 int32_t lanes[4] = {1, 2, 3, 4};
296
297 Handle<Int32x4> value = factory->NewInt32x4(lanes);
298 CHECK(value->IsInt32x4());
299 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
300
301#ifdef OBJECT_PRINT
302 std::ostringstream os;
303 value->Int32x4Print(os);
304 CHECK_EQ("1, 2, 3, 4", os.str());
305#endif // OBJECT_PRINT
306 }
307 // Uint32x4
308 {
309 uint32_t lanes[4] = {1, 2, 3, 4};
310
311 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
312 CHECK(value->IsUint32x4());
313 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
314
315#ifdef OBJECT_PRINT
316 std::ostringstream os;
317 value->Uint32x4Print(os);
318 CHECK_EQ("1, 2, 3, 4", os.str());
319#endif // OBJECT_PRINT
320 }
321 // Bool32x4
322 {
323 bool lanes[4] = {true, false, true, false};
324
325 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
326 CHECK(value->IsBool32x4());
327 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
328
329#ifdef OBJECT_PRINT
330 std::ostringstream os;
331 value->Bool32x4Print(os);
332 CHECK_EQ("true, false, true, false", os.str());
333#endif // OBJECT_PRINT
334 }
335 // Int16x8
336 {
337 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
338
339 Handle<Int16x8> value = factory->NewInt16x8(lanes);
340 CHECK(value->IsInt16x8());
341 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
342
343#ifdef OBJECT_PRINT
344 std::ostringstream os;
345 value->Int16x8Print(os);
346 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
347#endif // OBJECT_PRINT
348 }
349 // Uint16x8
350 {
351 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
352
353 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
354 CHECK(value->IsUint16x8());
355 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
356
357#ifdef OBJECT_PRINT
358 std::ostringstream os;
359 value->Uint16x8Print(os);
360 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
361#endif // OBJECT_PRINT
362 }
363 // Bool16x8
364 {
365 bool lanes[8] = {true, false, true, false, true, false, true, false};
366
367 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
368 CHECK(value->IsBool16x8());
369 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
370
371#ifdef OBJECT_PRINT
372 std::ostringstream os;
373 value->Bool16x8Print(os);
374 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
375#endif // OBJECT_PRINT
376 }
377 // Int8x16
378 {
379 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
380
381 Handle<Int8x16> value = factory->NewInt8x16(lanes);
382 CHECK(value->IsInt8x16());
383 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
384
385#ifdef OBJECT_PRINT
386 std::ostringstream os;
387 value->Int8x16Print(os);
388 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
389#endif // OBJECT_PRINT
390 }
391 // Uint8x16
392 {
393 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
394
395 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
396 CHECK(value->IsUint8x16());
397 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
398
399#ifdef OBJECT_PRINT
400 std::ostringstream os;
401 value->Uint8x16Print(os);
402 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
403#endif // OBJECT_PRINT
404 }
405 // Bool8x16
406 {
407 bool lanes[16] = {true, false, true, false, true, false, true, false,
408 true, false, true, false, true, false, true, false};
409
410 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
411 CHECK(value->IsBool8x16());
412 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
413
414#ifdef OBJECT_PRINT
415 std::ostringstream os;
416 value->Bool8x16Print(os);
417 CHECK_EQ(
418 "true, false, true, false, true, false, true, false, true, false, "
419 "true, false, true, false, true, false",
420 os.str());
421#endif // OBJECT_PRINT
422 }
423}
424
425
426TEST(Tagging) {
427 CcTest::InitializeVM();
428 int request = 24;
429 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
430 CHECK(Smi::FromInt(42)->IsSmi());
431 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
432 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
433}
434
435
436TEST(GarbageCollection) {
437 CcTest::InitializeVM();
438 Isolate* isolate = CcTest::i_isolate();
439 Heap* heap = isolate->heap();
440 Factory* factory = isolate->factory();
441
442 HandleScope sc(isolate);
443 // Check GC.
444 heap->CollectGarbage(NEW_SPACE);
445
446 Handle<JSGlobalObject> global(
447 CcTest::i_isolate()->context()->global_object());
448 Handle<String> name = factory->InternalizeUtf8String("theFunction");
449 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
450 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
451 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
452 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
453 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
454
455 {
456 HandleScope inner_scope(isolate);
457 // Allocate a function and keep it in global object's property.
458 Handle<JSFunction> function = factory->NewFunction(name);
459 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
460 // Allocate an object. Unrooted after leaving the scope.
461 Handle<JSObject> obj = factory->NewJSObject(function);
462 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
463 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
464
465 CHECK_EQ(Smi::FromInt(23),
466 *Object::GetProperty(obj, prop_name).ToHandleChecked());
467 CHECK_EQ(Smi::FromInt(24),
468 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
469 }
470
471 heap->CollectGarbage(NEW_SPACE);
472
473 // Function should be alive.
474 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
475 // Check function is retained.
476 Handle<Object> func_value =
477 Object::GetProperty(global, name).ToHandleChecked();
478 CHECK(func_value->IsJSFunction());
479 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
480
481 {
482 HandleScope inner_scope(isolate);
483 // Allocate another object, make it reachable from global.
484 Handle<JSObject> obj = factory->NewJSObject(function);
485 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
486 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
487 }
488
489 // After gc, it should survive.
490 heap->CollectGarbage(NEW_SPACE);
491
492 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
493 Handle<Object> obj =
494 Object::GetProperty(global, obj_name).ToHandleChecked();
495 CHECK(obj->IsJSObject());
496 CHECK_EQ(Smi::FromInt(23),
497 *Object::GetProperty(obj, prop_name).ToHandleChecked());
498}
499
500
501static void VerifyStringAllocation(Isolate* isolate, const char* string) {
502 HandleScope scope(isolate);
503 Handle<String> s = isolate->factory()->NewStringFromUtf8(
504 CStrVector(string)).ToHandleChecked();
505 CHECK_EQ(StrLength(string), s->length());
506 for (int index = 0; index < s->length(); index++) {
507 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
508 }
509}
510
511
512TEST(String) {
513 CcTest::InitializeVM();
514 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
515
516 VerifyStringAllocation(isolate, "a");
517 VerifyStringAllocation(isolate, "ab");
518 VerifyStringAllocation(isolate, "abc");
519 VerifyStringAllocation(isolate, "abcd");
520 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
521}
522
523
524TEST(LocalHandles) {
525 CcTest::InitializeVM();
526 Isolate* isolate = CcTest::i_isolate();
527 Factory* factory = isolate->factory();
528
529 v8::HandleScope scope(CcTest::isolate());
530 const char* name = "Kasper the spunky";
531 Handle<String> string = factory->NewStringFromAsciiChecked(name);
532 CHECK_EQ(StrLength(name), string->length());
533}
534
535
536TEST(GlobalHandles) {
537 CcTest::InitializeVM();
538 Isolate* isolate = CcTest::i_isolate();
539 Heap* heap = isolate->heap();
540 Factory* factory = isolate->factory();
541 GlobalHandles* global_handles = isolate->global_handles();
542
543 Handle<Object> h1;
544 Handle<Object> h2;
545 Handle<Object> h3;
546 Handle<Object> h4;
547
548 {
549 HandleScope scope(isolate);
550
551 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
552 Handle<Object> u = factory->NewNumber(1.12344);
553
554 h1 = global_handles->Create(*i);
555 h2 = global_handles->Create(*u);
556 h3 = global_handles->Create(*i);
557 h4 = global_handles->Create(*u);
558 }
559
560 // after gc, it should survive
561 heap->CollectGarbage(NEW_SPACE);
562
563 CHECK((*h1)->IsString());
564 CHECK((*h2)->IsHeapNumber());
565 CHECK((*h3)->IsString());
566 CHECK((*h4)->IsHeapNumber());
567
568 CHECK_EQ(*h3, *h1);
569 GlobalHandles::Destroy(h1.location());
570 GlobalHandles::Destroy(h3.location());
571
572 CHECK_EQ(*h4, *h2);
573 GlobalHandles::Destroy(h2.location());
574 GlobalHandles::Destroy(h4.location());
575}
576
577
578static bool WeakPointerCleared = false;
579
580static void TestWeakGlobalHandleCallback(
Ben Murdochc5610432016-08-08 18:44:38 +0100581 const v8::WeakCallbackInfo<void>& data) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000582 std::pair<v8::Persistent<v8::Value>*, int>* p =
583 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
584 data.GetParameter());
585 if (p->second == 1234) WeakPointerCleared = true;
586 p->first->Reset();
587}
588
589
590TEST(WeakGlobalHandlesScavenge) {
591 i::FLAG_stress_compaction = false;
592 CcTest::InitializeVM();
593 Isolate* isolate = CcTest::i_isolate();
594 Heap* heap = isolate->heap();
595 Factory* factory = isolate->factory();
596 GlobalHandles* global_handles = isolate->global_handles();
597
598 WeakPointerCleared = false;
599
600 Handle<Object> h1;
601 Handle<Object> h2;
602
603 {
604 HandleScope scope(isolate);
605
606 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
607 Handle<Object> u = factory->NewNumber(1.12344);
608
609 h1 = global_handles->Create(*i);
610 h2 = global_handles->Create(*u);
611 }
612
613 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
Ben Murdochc5610432016-08-08 18:44:38 +0100614 GlobalHandles::MakeWeak(
615 h2.location(), reinterpret_cast<void*>(&handle_and_id),
616 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000617
618 // Scavenge treats weak pointers as normal roots.
619 heap->CollectGarbage(NEW_SPACE);
620
621 CHECK((*h1)->IsString());
622 CHECK((*h2)->IsHeapNumber());
623
624 CHECK(!WeakPointerCleared);
625 CHECK(!global_handles->IsNearDeath(h2.location()));
626 CHECK(!global_handles->IsNearDeath(h1.location()));
627
628 GlobalHandles::Destroy(h1.location());
629 GlobalHandles::Destroy(h2.location());
630}
631
632
633TEST(WeakGlobalHandlesMark) {
634 CcTest::InitializeVM();
635 Isolate* isolate = CcTest::i_isolate();
636 Heap* heap = isolate->heap();
637 Factory* factory = isolate->factory();
638 GlobalHandles* global_handles = isolate->global_handles();
639
640 WeakPointerCleared = false;
641
642 Handle<Object> h1;
643 Handle<Object> h2;
644
645 {
646 HandleScope scope(isolate);
647
648 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
649 Handle<Object> u = factory->NewNumber(1.12344);
650
651 h1 = global_handles->Create(*i);
652 h2 = global_handles->Create(*u);
653 }
654
655 // Make sure the objects are promoted.
656 heap->CollectGarbage(OLD_SPACE);
657 heap->CollectGarbage(NEW_SPACE);
658 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
659
660 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
Ben Murdochc5610432016-08-08 18:44:38 +0100661 GlobalHandles::MakeWeak(
662 h2.location(), reinterpret_cast<void*>(&handle_and_id),
663 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
665 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
666
667 // Incremental marking potentially marked handles before they turned weak.
668 heap->CollectAllGarbage();
669
670 CHECK((*h1)->IsString());
671
672 CHECK(WeakPointerCleared);
673 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
674
675 GlobalHandles::Destroy(h1.location());
676}
677
678
679TEST(DeleteWeakGlobalHandle) {
680 i::FLAG_stress_compaction = false;
681 CcTest::InitializeVM();
682 Isolate* isolate = CcTest::i_isolate();
683 Heap* heap = isolate->heap();
684 Factory* factory = isolate->factory();
685 GlobalHandles* global_handles = isolate->global_handles();
686
687 WeakPointerCleared = false;
688
689 Handle<Object> h;
690
691 {
692 HandleScope scope(isolate);
693
694 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
695 h = global_handles->Create(*i);
696 }
697
698 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
Ben Murdochc5610432016-08-08 18:44:38 +0100699 GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
700 &TestWeakGlobalHandleCallback,
701 v8::WeakCallbackType::kParameter);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702
703 // Scanvenge does not recognize weak reference.
704 heap->CollectGarbage(NEW_SPACE);
705
706 CHECK(!WeakPointerCleared);
707
708 // Mark-compact treats weak reference properly.
709 heap->CollectGarbage(OLD_SPACE);
710
711 CHECK(WeakPointerCleared);
712}
713
Ben Murdoch61f157c2016-09-16 13:49:30 +0100714TEST(DoNotPromoteWhiteObjectsOnScavenge) {
715 CcTest::InitializeVM();
716 Isolate* isolate = CcTest::i_isolate();
717 Heap* heap = isolate->heap();
718 Factory* factory = isolate->factory();
719
720 HandleScope scope(isolate);
721 Handle<Object> white = factory->NewStringFromStaticChars("white");
722
723 CHECK(Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(*white))));
724
725 heap->CollectGarbage(NEW_SPACE);
726
727 CHECK(heap->InNewSpace(*white));
728}
729
730TEST(PromoteGreyOrBlackObjectsOnScavenge) {
731 CcTest::InitializeVM();
732 Isolate* isolate = CcTest::i_isolate();
733 Heap* heap = isolate->heap();
734 Factory* factory = isolate->factory();
735
736 HandleScope scope(isolate);
737 Handle<Object> marked = factory->NewStringFromStaticChars("marked");
738
739 IncrementalMarking* marking = heap->incremental_marking();
740 marking->Stop();
741 heap->StartIncrementalMarking();
742 while (Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(*marked)))) {
743 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
744 IncrementalMarking::FORCE_MARKING,
745 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
746 }
747
748 heap->CollectGarbage(NEW_SPACE);
749
750 CHECK(!heap->InNewSpace(*marked));
751}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000752
753TEST(BytecodeArray) {
754 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
755 static const int kRawBytesSize = sizeof(kRawBytes);
756 static const int kFrameSize = 32;
757 static const int kParameterCount = 2;
758
759 i::FLAG_manual_evacuation_candidates_selection = true;
760 CcTest::InitializeVM();
761 Isolate* isolate = CcTest::i_isolate();
762 Heap* heap = isolate->heap();
763 Factory* factory = isolate->factory();
764 HandleScope scope(isolate);
765
Ben Murdoch61f157c2016-09-16 13:49:30 +0100766 heap::SimulateFullSpace(heap->old_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000767 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
768 for (int i = 0; i < 5; i++) {
769 Handle<Object> number = factory->NewHeapNumber(i);
770 constant_pool->set(i, *number);
771 }
772
773 // Allocate and initialize BytecodeArray
774 Handle<BytecodeArray> array = factory->NewBytecodeArray(
775 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
776
777 CHECK(array->IsBytecodeArray());
778 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
779 CHECK_EQ(array->frame_size(), kFrameSize);
780 CHECK_EQ(array->parameter_count(), kParameterCount);
781 CHECK_EQ(array->constant_pool(), *constant_pool);
782 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
783 CHECK_GE(array->address() + array->BytecodeArraySize(),
784 array->GetFirstBytecodeAddress() + array->length());
785 for (int i = 0; i < kRawBytesSize; i++) {
786 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
787 CHECK_EQ(array->get(i), kRawBytes[i]);
788 }
789
790 FixedArray* old_constant_pool_address = *constant_pool;
791
792 // Perform a full garbage collection and force the constant pool to be on an
793 // evacuation candidate.
794 Page* evac_page = Page::FromAddress(constant_pool->address());
795 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
796 heap->CollectAllGarbage();
797
798 // BytecodeArray should survive.
799 CHECK_EQ(array->length(), kRawBytesSize);
800 CHECK_EQ(array->frame_size(), kFrameSize);
801 for (int i = 0; i < kRawBytesSize; i++) {
802 CHECK_EQ(array->get(i), kRawBytes[i]);
803 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
804 }
805
806 // Constant pool should have been migrated.
807 CHECK_EQ(array->constant_pool(), *constant_pool);
808 CHECK_NE(array->constant_pool(), old_constant_pool_address);
809}
810
811
812static const char* not_so_random_string_table[] = {
813 "abstract",
814 "boolean",
815 "break",
816 "byte",
817 "case",
818 "catch",
819 "char",
820 "class",
821 "const",
822 "continue",
823 "debugger",
824 "default",
825 "delete",
826 "do",
827 "double",
828 "else",
829 "enum",
830 "export",
831 "extends",
832 "false",
833 "final",
834 "finally",
835 "float",
836 "for",
837 "function",
838 "goto",
839 "if",
840 "implements",
841 "import",
842 "in",
843 "instanceof",
844 "int",
845 "interface",
846 "long",
847 "native",
848 "new",
849 "null",
850 "package",
851 "private",
852 "protected",
853 "public",
854 "return",
855 "short",
856 "static",
857 "super",
858 "switch",
859 "synchronized",
860 "this",
861 "throw",
862 "throws",
863 "transient",
864 "true",
865 "try",
866 "typeof",
867 "var",
868 "void",
869 "volatile",
870 "while",
871 "with",
872 0
873};
874
875
876static void CheckInternalizedStrings(const char** strings) {
877 Isolate* isolate = CcTest::i_isolate();
878 Factory* factory = isolate->factory();
879 for (const char* string = *strings; *strings != 0; string = *strings++) {
880 HandleScope scope(isolate);
881 Handle<String> a =
882 isolate->factory()->InternalizeUtf8String(CStrVector(string));
883 // InternalizeUtf8String may return a failure if a GC is needed.
884 CHECK(a->IsInternalizedString());
885 Handle<String> b = factory->InternalizeUtf8String(string);
886 CHECK_EQ(*b, *a);
887 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
888 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
889 CHECK_EQ(*b, *a);
890 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
891 }
892}
893
894
895TEST(StringTable) {
896 CcTest::InitializeVM();
897
898 v8::HandleScope sc(CcTest::isolate());
899 CheckInternalizedStrings(not_so_random_string_table);
900 CheckInternalizedStrings(not_so_random_string_table);
901}
902
903
904TEST(FunctionAllocation) {
905 CcTest::InitializeVM();
906 Isolate* isolate = CcTest::i_isolate();
907 Factory* factory = isolate->factory();
908
909 v8::HandleScope sc(CcTest::isolate());
910 Handle<String> name = factory->InternalizeUtf8String("theFunction");
911 Handle<JSFunction> function = factory->NewFunction(name);
912
913 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
914 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
915
916 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
917 Handle<JSObject> obj = factory->NewJSObject(function);
918 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
919 CHECK_EQ(Smi::FromInt(23),
920 *Object::GetProperty(obj, prop_name).ToHandleChecked());
921 // Check that we can add properties to function objects.
922 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
923 CHECK_EQ(Smi::FromInt(24),
924 *Object::GetProperty(function, prop_name).ToHandleChecked());
925}
926
927
928TEST(ObjectProperties) {
929 CcTest::InitializeVM();
930 Isolate* isolate = CcTest::i_isolate();
931 Factory* factory = isolate->factory();
932
933 v8::HandleScope sc(CcTest::isolate());
934 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
935 Handle<Object> object = Object::GetProperty(
936 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
937 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
938 Handle<JSObject> obj = factory->NewJSObject(constructor);
939 Handle<String> first = factory->InternalizeUtf8String("first");
940 Handle<String> second = factory->InternalizeUtf8String("second");
941
942 Handle<Smi> one(Smi::FromInt(1), isolate);
943 Handle<Smi> two(Smi::FromInt(2), isolate);
944
945 // check for empty
946 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
947
948 // add first
949 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
950 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
951
952 // delete first
953 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
954 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
955
956 // add first and then second
957 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
958 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
959 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
960 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
961
962 // delete first and then second
963 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
964 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
965 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
966 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
967 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
968
969 // add first and then second
970 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
971 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
972 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
973 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
974
975 // delete second and then first
976 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
977 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
978 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
979 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
980 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
981
982 // check string and internalized string match
983 const char* string1 = "fisk";
984 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
985 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
986 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
987 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
988
989 // check internalized string and string match
990 const char* string2 = "fugl";
991 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
992 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
993 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
994 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
995}
996
997
998TEST(JSObjectMaps) {
999 CcTest::InitializeVM();
1000 Isolate* isolate = CcTest::i_isolate();
1001 Factory* factory = isolate->factory();
1002
1003 v8::HandleScope sc(CcTest::isolate());
1004 Handle<String> name = factory->InternalizeUtf8String("theFunction");
1005 Handle<JSFunction> function = factory->NewFunction(name);
1006
1007 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
1008 Handle<JSObject> obj = factory->NewJSObject(function);
1009 Handle<Map> initial_map(function->initial_map());
1010
1011 // Set a propery
1012 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
1013 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
1014 CHECK_EQ(Smi::FromInt(23),
1015 *Object::GetProperty(obj, prop_name).ToHandleChecked());
1016
1017 // Check the map has changed
1018 CHECK(*initial_map != obj->map());
1019}
1020
1021
1022TEST(JSArray) {
1023 CcTest::InitializeVM();
1024 Isolate* isolate = CcTest::i_isolate();
1025 Factory* factory = isolate->factory();
1026
1027 v8::HandleScope sc(CcTest::isolate());
1028 Handle<String> name = factory->InternalizeUtf8String("Array");
1029 Handle<Object> fun_obj = Object::GetProperty(
1030 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
1031 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
1032
1033 // Allocate the object.
1034 Handle<Object> element;
1035 Handle<JSObject> object = factory->NewJSObject(function);
1036 Handle<JSArray> array = Handle<JSArray>::cast(object);
1037 // We just initialized the VM, no heap allocation failure yet.
1038 JSArray::Initialize(array, 0);
1039
1040 // Set array length to 0.
1041 JSArray::SetLength(array, 0);
1042 CHECK_EQ(Smi::FromInt(0), array->length());
1043 // Must be in fast mode.
1044 CHECK(array->HasFastSmiOrObjectElements());
1045
1046 // array[length] = name.
1047 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
1048 CHECK_EQ(Smi::FromInt(1), array->length());
1049 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1050 CHECK_EQ(*element, *name);
1051
1052 // Set array length with larger than smi value.
1053 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1054
1055 uint32_t int_length = 0;
1056 CHECK(array->length()->ToArrayIndex(&int_length));
1057 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1058 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1059
1060 // array[length] = name.
1061 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1062 uint32_t new_int_length = 0;
1063 CHECK(array->length()->ToArrayIndex(&new_int_length));
1064 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1065 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1066 CHECK_EQ(*element, *name);
1067 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1068 CHECK_EQ(*element, *name);
1069}
1070
1071
1072TEST(JSObjectCopy) {
1073 CcTest::InitializeVM();
1074 Isolate* isolate = CcTest::i_isolate();
1075 Factory* factory = isolate->factory();
1076
1077 v8::HandleScope sc(CcTest::isolate());
1078 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1079 Handle<Object> object = Object::GetProperty(
1080 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1081 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1082 Handle<JSObject> obj = factory->NewJSObject(constructor);
1083 Handle<String> first = factory->InternalizeUtf8String("first");
1084 Handle<String> second = factory->InternalizeUtf8String("second");
1085
1086 Handle<Smi> one(Smi::FromInt(1), isolate);
1087 Handle<Smi> two(Smi::FromInt(2), isolate);
1088
1089 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1090 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1091
1092 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1093 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1094
1095 // Make the clone.
1096 Handle<Object> value1, value2;
1097 Handle<JSObject> clone = factory->CopyJSObject(obj);
1098 CHECK(!clone.is_identical_to(obj));
1099
1100 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1101 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1102 CHECK_EQ(*value1, *value2);
1103 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1104 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1105 CHECK_EQ(*value1, *value2);
1106
1107 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1108 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1109 CHECK_EQ(*value1, *value2);
1110 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1111 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1112 CHECK_EQ(*value1, *value2);
1113
1114 // Flip the values.
1115 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1116 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1117
1118 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1119 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1120
1121 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1122 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1123 CHECK_EQ(*value1, *value2);
1124 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1125 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1126 CHECK_EQ(*value1, *value2);
1127
1128 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1129 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1130 CHECK_EQ(*value1, *value2);
1131 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1132 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1133 CHECK_EQ(*value1, *value2);
1134}
1135
1136
1137TEST(StringAllocation) {
1138 CcTest::InitializeVM();
1139 Isolate* isolate = CcTest::i_isolate();
1140 Factory* factory = isolate->factory();
1141
1142 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1143 for (int length = 0; length < 100; length++) {
1144 v8::HandleScope scope(CcTest::isolate());
1145 char* non_one_byte = NewArray<char>(3 * length + 1);
1146 char* one_byte = NewArray<char>(length + 1);
1147 non_one_byte[3 * length] = 0;
1148 one_byte[length] = 0;
1149 for (int i = 0; i < length; i++) {
1150 one_byte[i] = 'a';
1151 non_one_byte[3 * i] = chars[0];
1152 non_one_byte[3 * i + 1] = chars[1];
1153 non_one_byte[3 * i + 2] = chars[2];
1154 }
1155 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1156 Vector<const char>(non_one_byte, 3 * length));
1157 CHECK_EQ(length, non_one_byte_sym->length());
1158 Handle<String> one_byte_sym =
1159 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1160 CHECK_EQ(length, one_byte_sym->length());
1161 Handle<String> non_one_byte_str =
1162 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1163 .ToHandleChecked();
1164 non_one_byte_str->Hash();
1165 CHECK_EQ(length, non_one_byte_str->length());
1166 Handle<String> one_byte_str =
1167 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1168 .ToHandleChecked();
1169 one_byte_str->Hash();
1170 CHECK_EQ(length, one_byte_str->length());
1171 DeleteArray(non_one_byte);
1172 DeleteArray(one_byte);
1173 }
1174}
1175
1176
1177static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1178 // Count the number of objects found in the heap.
1179 int found_count = 0;
1180 HeapIterator iterator(heap);
1181 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1182 for (int i = 0; i < size; i++) {
1183 if (*objs[i] == obj) {
1184 found_count++;
1185 }
1186 }
1187 }
1188 return found_count;
1189}
1190
1191
1192TEST(Iteration) {
1193 CcTest::InitializeVM();
1194 Isolate* isolate = CcTest::i_isolate();
1195 Factory* factory = isolate->factory();
1196 v8::HandleScope scope(CcTest::isolate());
1197
1198 // Array of objects to scan haep for.
1199 const int objs_count = 6;
1200 Handle<Object> objs[objs_count];
1201 int next_objs_index = 0;
1202
1203 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1204 objs[next_objs_index++] = factory->NewJSArray(10);
1205 objs[next_objs_index++] =
Ben Murdochda12d292016-06-02 14:46:10 +01001206 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, TENURED);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001207
1208 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1209 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1210 objs[next_objs_index++] =
1211 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1212
1213 // Allocate a large string (for large object space).
1214 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1215 char* str = new char[large_size];
1216 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1217 str[large_size - 1] = '\0';
1218 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1219 delete[] str;
1220
1221 // Add a Map object to look for.
1222 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1223
1224 CHECK_EQ(objs_count, next_objs_index);
1225 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1226}
1227
1228
1229UNINITIALIZED_TEST(TestCodeFlushing) {
1230 // If we do not flush code this test is invalid.
1231 if (!FLAG_flush_code) return;
1232 i::FLAG_allow_natives_syntax = true;
1233 i::FLAG_optimize_for_size = false;
1234 v8::Isolate::CreateParams create_params;
1235 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1236 v8::Isolate* isolate = v8::Isolate::New(create_params);
1237 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1238 isolate->Enter();
1239 Factory* factory = i_isolate->factory();
1240 {
1241 v8::HandleScope scope(isolate);
1242 v8::Context::New(isolate)->Enter();
1243 const char* source =
1244 "function foo() {"
1245 " var x = 42;"
1246 " var y = 42;"
1247 " var z = x + y;"
1248 "};"
1249 "foo()";
1250 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1251
1252 // This compile will add the code to the compilation cache.
1253 {
1254 v8::HandleScope scope(isolate);
1255 CompileRun(source);
1256 }
1257
1258 // Check function is compiled.
1259 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1260 foo_name).ToHandleChecked();
1261 CHECK(func_value->IsJSFunction());
1262 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1263 CHECK(function->shared()->is_compiled());
1264
1265 // The code will survive at least two GCs.
1266 i_isolate->heap()->CollectAllGarbage();
1267 i_isolate->heap()->CollectAllGarbage();
1268 CHECK(function->shared()->is_compiled());
1269
1270 // Simulate several GCs that use full marking.
1271 const int kAgingThreshold = 6;
1272 for (int i = 0; i < kAgingThreshold; i++) {
1273 i_isolate->heap()->CollectAllGarbage();
1274 }
1275
1276 // foo should no longer be in the compilation cache
1277 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1278 CHECK(!function->is_compiled() || function->IsOptimized());
1279 // Call foo to get it recompiled.
1280 CompileRun("foo()");
1281 CHECK(function->shared()->is_compiled());
1282 CHECK(function->is_compiled());
1283 }
1284 isolate->Exit();
1285 isolate->Dispose();
1286}
1287
1288
1289TEST(TestCodeFlushingPreAged) {
1290 // If we do not flush code this test is invalid.
1291 if (!FLAG_flush_code) return;
1292 i::FLAG_allow_natives_syntax = true;
1293 i::FLAG_optimize_for_size = true;
1294 CcTest::InitializeVM();
1295 Isolate* isolate = CcTest::i_isolate();
1296 Factory* factory = isolate->factory();
1297 v8::HandleScope scope(CcTest::isolate());
1298 const char* source = "function foo() {"
1299 " var x = 42;"
1300 " var y = 42;"
1301 " var z = x + y;"
1302 "};"
1303 "foo()";
1304 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1305
1306 // Compile foo, but don't run it.
1307 { v8::HandleScope scope(CcTest::isolate());
1308 CompileRun(source);
1309 }
1310
1311 // Check function is compiled.
1312 Handle<Object> func_value =
1313 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1314 CHECK(func_value->IsJSFunction());
1315 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1316 CHECK(function->shared()->is_compiled());
1317
1318 // The code has been run so will survive at least one GC.
1319 CcTest::heap()->CollectAllGarbage();
1320 CHECK(function->shared()->is_compiled());
1321
1322 // The code was only run once, so it should be pre-aged and collected on the
1323 // next GC.
1324 CcTest::heap()->CollectAllGarbage();
1325 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1326
1327 // Execute the function again twice, and ensure it is reset to the young age.
1328 { v8::HandleScope scope(CcTest::isolate());
1329 CompileRun("foo();"
1330 "foo();");
1331 }
1332
1333 // The code will survive at least two GC now that it is young again.
1334 CcTest::heap()->CollectAllGarbage();
1335 CcTest::heap()->CollectAllGarbage();
1336 CHECK(function->shared()->is_compiled());
1337
1338 // Simulate several GCs that use full marking.
1339 const int kAgingThreshold = 6;
1340 for (int i = 0; i < kAgingThreshold; i++) {
1341 CcTest::heap()->CollectAllGarbage();
1342 }
1343
1344 // foo should no longer be in the compilation cache
1345 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1346 CHECK(!function->is_compiled() || function->IsOptimized());
1347 // Call foo to get it recompiled.
1348 CompileRun("foo()");
1349 CHECK(function->shared()->is_compiled());
1350 CHECK(function->is_compiled());
1351}
1352
1353
1354TEST(TestCodeFlushingIncremental) {
1355 // If we do not flush code this test is invalid.
1356 if (!FLAG_flush_code) return;
1357 i::FLAG_allow_natives_syntax = true;
1358 i::FLAG_optimize_for_size = false;
1359 CcTest::InitializeVM();
1360 Isolate* isolate = CcTest::i_isolate();
1361 Factory* factory = isolate->factory();
1362 v8::HandleScope scope(CcTest::isolate());
1363 const char* source = "function foo() {"
1364 " var x = 42;"
1365 " var y = 42;"
1366 " var z = x + y;"
1367 "};"
1368 "foo()";
1369 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1370
1371 // This compile will add the code to the compilation cache.
1372 { v8::HandleScope scope(CcTest::isolate());
1373 CompileRun(source);
1374 }
1375
1376 // Check function is compiled.
1377 Handle<Object> func_value =
1378 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1379 CHECK(func_value->IsJSFunction());
1380 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1381 CHECK(function->shared()->is_compiled());
1382
1383 // The code will survive at least two GCs.
1384 CcTest::heap()->CollectAllGarbage();
1385 CcTest::heap()->CollectAllGarbage();
1386 CHECK(function->shared()->is_compiled());
1387
1388 // Simulate several GCs that use incremental marking.
1389 const int kAgingThreshold = 6;
1390 for (int i = 0; i < kAgingThreshold; i++) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001391 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001392 CcTest::heap()->CollectAllGarbage();
1393 }
1394 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1395 CHECK(!function->is_compiled() || function->IsOptimized());
1396
1397 // This compile will compile the function again.
1398 { v8::HandleScope scope(CcTest::isolate());
1399 CompileRun("foo();");
1400 }
1401
1402 // Simulate several GCs that use incremental marking but make sure
1403 // the loop breaks once the function is enqueued as a candidate.
1404 for (int i = 0; i < kAgingThreshold; i++) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001405 heap::SimulateIncrementalMarking(CcTest::heap());
1406 if (!function->next_function_link()->IsUndefined(CcTest::i_isolate()))
1407 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408 CcTest::heap()->CollectAllGarbage();
1409 }
1410
1411 // Force optimization while incremental marking is active and while
1412 // the function is enqueued as a candidate.
1413 { v8::HandleScope scope(CcTest::isolate());
1414 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1415 }
1416
1417 // Simulate one final GC to make sure the candidate queue is sane.
1418 CcTest::heap()->CollectAllGarbage();
1419 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1420 CHECK(function->is_compiled() || !function->IsOptimized());
1421}
1422
1423
1424TEST(TestCodeFlushingIncrementalScavenge) {
1425 // If we do not flush code this test is invalid.
1426 if (!FLAG_flush_code) return;
1427 i::FLAG_allow_natives_syntax = true;
1428 i::FLAG_optimize_for_size = false;
1429 CcTest::InitializeVM();
1430 Isolate* isolate = CcTest::i_isolate();
1431 Factory* factory = isolate->factory();
1432 v8::HandleScope scope(CcTest::isolate());
1433 const char* source = "var foo = function() {"
1434 " var x = 42;"
1435 " var y = 42;"
1436 " var z = x + y;"
1437 "};"
1438 "foo();"
1439 "var bar = function() {"
1440 " var x = 23;"
1441 "};"
1442 "bar();";
1443 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1444 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1445
1446 // Perfrom one initial GC to enable code flushing.
1447 CcTest::heap()->CollectAllGarbage();
1448
1449 // This compile will add the code to the compilation cache.
1450 { v8::HandleScope scope(CcTest::isolate());
1451 CompileRun(source);
1452 }
1453
1454 // Check functions are compiled.
1455 Handle<Object> func_value =
1456 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1457 CHECK(func_value->IsJSFunction());
1458 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1459 CHECK(function->shared()->is_compiled());
1460 Handle<Object> func_value2 =
1461 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1462 CHECK(func_value2->IsJSFunction());
1463 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1464 CHECK(function2->shared()->is_compiled());
1465
1466 // Clear references to functions so that one of them can die.
1467 { v8::HandleScope scope(CcTest::isolate());
1468 CompileRun("foo = 0; bar = 0;");
1469 }
1470
1471 // Bump the code age so that flushing is triggered while the function
1472 // object is still located in new-space.
1473 const int kAgingThreshold = 6;
1474 for (int i = 0; i < kAgingThreshold; i++) {
1475 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1476 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1477 }
1478
1479 // Simulate incremental marking so that the functions are enqueued as
1480 // code flushing candidates. Then kill one of the functions. Finally
1481 // perform a scavenge while incremental marking is still running.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001482 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001483 *function2.location() = NULL;
1484 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1485
1486 // Simulate one final GC to make sure the candidate queue is sane.
1487 CcTest::heap()->CollectAllGarbage();
1488 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1489 CHECK(!function->is_compiled() || function->IsOptimized());
1490}
1491
1492
1493TEST(TestCodeFlushingIncrementalAbort) {
1494 // If we do not flush code this test is invalid.
1495 if (!FLAG_flush_code) return;
1496 i::FLAG_allow_natives_syntax = true;
1497 i::FLAG_optimize_for_size = false;
1498 CcTest::InitializeVM();
1499 Isolate* isolate = CcTest::i_isolate();
1500 Factory* factory = isolate->factory();
1501 Heap* heap = isolate->heap();
1502 v8::HandleScope scope(CcTest::isolate());
1503 const char* source = "function foo() {"
1504 " var x = 42;"
1505 " var y = 42;"
1506 " var z = x + y;"
1507 "};"
1508 "foo()";
1509 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1510
1511 // This compile will add the code to the compilation cache.
1512 { v8::HandleScope scope(CcTest::isolate());
1513 CompileRun(source);
1514 }
1515
1516 // Check function is compiled.
1517 Handle<Object> func_value =
1518 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1519 CHECK(func_value->IsJSFunction());
1520 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1521 CHECK(function->shared()->is_compiled());
1522
1523 // The code will survive at least two GCs.
1524 heap->CollectAllGarbage();
1525 heap->CollectAllGarbage();
1526 CHECK(function->shared()->is_compiled());
1527
1528 // Bump the code age so that flushing is triggered.
1529 const int kAgingThreshold = 6;
1530 for (int i = 0; i < kAgingThreshold; i++) {
1531 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1532 }
1533
1534 // Simulate incremental marking so that the function is enqueued as
1535 // code flushing candidate.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001536 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537
1538 // Enable the debugger and add a breakpoint while incremental marking
1539 // is running so that incremental marking aborts and code flushing is
1540 // disabled.
1541 int position = 0;
1542 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1543 EnableDebugger(CcTest::isolate());
1544 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1545 isolate->debug()->ClearAllBreakPoints();
1546 DisableDebugger(CcTest::isolate());
1547
1548 // Force optimization now that code flushing is disabled.
1549 { v8::HandleScope scope(CcTest::isolate());
1550 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1551 }
1552
1553 // Simulate one final GC to make sure the candidate queue is sane.
1554 heap->CollectAllGarbage();
1555 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1556 CHECK(function->is_compiled() || !function->IsOptimized());
1557}
1558
Ben Murdoch097c5b22016-05-18 11:27:45 +01001559TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1560 // Turn off always_opt because it interferes with running the built-in for
1561 // the last call to g().
1562 i::FLAG_always_opt = false;
1563 i::FLAG_allow_natives_syntax = true;
1564 CcTest::InitializeVM();
1565 Isolate* isolate = CcTest::i_isolate();
1566 Factory* factory = isolate->factory();
1567 Heap* heap = isolate->heap();
1568 v8::HandleScope scope(CcTest::isolate());
1569
1570 CompileRun(
1571 "function make_closure(x) {"
1572 " return function() { return x + 3 };"
1573 "}"
1574 "var f = make_closure(5); f();"
1575 "var g = make_closure(5);");
1576
1577 // Check f is compiled.
1578 Handle<String> f_name = factory->InternalizeUtf8String("f");
1579 Handle<Object> f_value =
1580 Object::GetProperty(isolate->global_object(), f_name).ToHandleChecked();
1581 Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1582 CHECK(f_function->is_compiled());
1583
1584 // Check g is not compiled.
1585 Handle<String> g_name = factory->InternalizeUtf8String("g");
1586 Handle<Object> g_value =
1587 Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked();
1588 Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
Ben Murdochc5610432016-08-08 18:44:38 +01001589 CHECK(!g_function->is_compiled());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001590
Ben Murdoch61f157c2016-09-16 13:49:30 +01001591 heap::SimulateIncrementalMarking(heap);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001592 CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1593
1594 // g should now have available an optimized function, unmarked by gc. The
1595 // CompileLazy built-in will discover it and install it in the closure, and
1596 // the incremental write barrier should be used.
1597 CompileRun("g();");
1598 CHECK(g_function->is_compiled());
1599}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001600
1601TEST(CompilationCacheCachingBehavior) {
1602 // If we do not flush code, or have the compilation cache turned off, this
1603 // test is invalid.
1604 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1605 return;
1606 }
1607 CcTest::InitializeVM();
1608 Isolate* isolate = CcTest::i_isolate();
1609 Factory* factory = isolate->factory();
1610 Heap* heap = isolate->heap();
1611 CompilationCache* compilation_cache = isolate->compilation_cache();
Ben Murdochda12d292016-06-02 14:46:10 +01001612 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613
1614 v8::HandleScope scope(CcTest::isolate());
1615 const char* raw_source =
1616 "function foo() {"
1617 " var x = 42;"
1618 " var y = 42;"
1619 " var z = x + y;"
1620 "};"
1621 "foo()";
1622 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1623 Handle<Context> native_context = isolate->native_context();
1624
1625 {
1626 v8::HandleScope scope(CcTest::isolate());
1627 CompileRun(raw_source);
1628 }
1629
Ben Murdochc5610432016-08-08 18:44:38 +01001630 // The script should be in the cache now.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001631 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1632 source, Handle<Object>(), 0, 0,
1633 v8::ScriptOriginOptions(false, true, false), native_context,
1634 language_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001635 CHECK(!info.is_null());
1636
1637 // Check that the code cache entry survives at least on GC.
1638 // (Unless --optimize-for-size, in which case it might get collected
1639 // immediately.)
1640 if (!FLAG_optimize_for_size) {
1641 heap->CollectAllGarbage();
1642 info = compilation_cache->LookupScript(
1643 source, Handle<Object>(), 0, 0,
1644 v8::ScriptOriginOptions(false, true, false), native_context,
1645 language_mode);
1646 CHECK(!info.is_null());
1647 }
1648
1649 // Progress code age until it's old and ready for GC.
1650 while (!info.ToHandleChecked()->code()->IsOld()) {
1651 // To guarantee progress, we have to MakeOlder with different parities.
1652 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
1653 // always NO_MARKING_PARITY and the code age only progresses if the parity
1654 // is different.
1655 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
1656 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
1657 }
1658
1659 heap->CollectAllGarbage();
1660 // Ensure code aging cleared the entry from the cache.
1661 info = compilation_cache->LookupScript(
1662 source, Handle<Object>(), 0, 0,
1663 v8::ScriptOriginOptions(false, true, false), native_context,
1664 language_mode);
1665 CHECK(info.is_null());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001666}
1667
1668
1669static void OptimizeEmptyFunction(const char* name) {
1670 HandleScope scope(CcTest::i_isolate());
1671 EmbeddedVector<char, 256> source;
1672 SNPrintF(source,
1673 "function %s() { return 0; }"
1674 "%s(); %s();"
1675 "%%OptimizeFunctionOnNextCall(%s);"
1676 "%s();",
1677 name, name, name, name, name);
1678 CompileRun(source.start());
1679}
1680
1681
1682// Count the number of native contexts in the weak list of native contexts.
1683int CountNativeContexts() {
1684 int count = 0;
1685 Object* object = CcTest::heap()->native_contexts_list();
Ben Murdoch61f157c2016-09-16 13:49:30 +01001686 while (!object->IsUndefined(CcTest::i_isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001687 count++;
Ben Murdochc5610432016-08-08 18:44:38 +01001688 object = Context::cast(object)->next_context_link();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001689 }
1690 return count;
1691}
1692
1693
1694// Count the number of user functions in the weak list of optimized
1695// functions attached to a native context.
1696static int CountOptimizedUserFunctions(v8::Local<v8::Context> context) {
1697 int count = 0;
1698 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1699 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1700 while (object->IsJSFunction() &&
1701 !JSFunction::cast(object)->shared()->IsBuiltin()) {
1702 count++;
1703 object = JSFunction::cast(object)->next_function_link();
1704 }
1705 return count;
1706}
1707
1708
1709TEST(TestInternalWeakLists) {
1710 FLAG_always_opt = false;
1711 FLAG_allow_natives_syntax = true;
1712 v8::V8::Initialize();
1713
1714 // Some flags turn Scavenge collections into Mark-sweep collections
1715 // and hence are incompatible with this test case.
1716 if (FLAG_gc_global || FLAG_stress_compaction) return;
1717 FLAG_retain_maps_for_n_gc = 0;
1718
1719 static const int kNumTestContexts = 10;
1720
1721 Isolate* isolate = CcTest::i_isolate();
1722 Heap* heap = isolate->heap();
1723 HandleScope scope(isolate);
1724 v8::Local<v8::Context> ctx[kNumTestContexts];
1725 if (!isolate->use_crankshaft()) return;
1726
1727 CHECK_EQ(0, CountNativeContexts());
1728
1729 // Create a number of global contests which gets linked together.
1730 for (int i = 0; i < kNumTestContexts; i++) {
1731 ctx[i] = v8::Context::New(CcTest::isolate());
1732
1733 // Collect garbage that might have been created by one of the
1734 // installed extensions.
1735 isolate->compilation_cache()->Clear();
1736 heap->CollectAllGarbage();
1737
1738 CHECK_EQ(i + 1, CountNativeContexts());
1739
1740 ctx[i]->Enter();
1741
1742 // Create a handle scope so no function objects get stuck in the outer
1743 // handle scope.
1744 HandleScope scope(isolate);
1745 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1746 OptimizeEmptyFunction("f1");
1747 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1748 OptimizeEmptyFunction("f2");
1749 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1750 OptimizeEmptyFunction("f3");
1751 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1752 OptimizeEmptyFunction("f4");
1753 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1754 OptimizeEmptyFunction("f5");
1755 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1756
1757 // Remove function f1, and
1758 CompileRun("f1=null");
1759
1760 // Scavenge treats these references as strong.
1761 for (int j = 0; j < 10; j++) {
1762 CcTest::heap()->CollectGarbage(NEW_SPACE);
1763 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1764 }
1765
1766 // Mark compact handles the weak references.
1767 isolate->compilation_cache()->Clear();
1768 heap->CollectAllGarbage();
1769 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1770
1771 // Get rid of f3 and f5 in the same way.
1772 CompileRun("f3=null");
1773 for (int j = 0; j < 10; j++) {
1774 CcTest::heap()->CollectGarbage(NEW_SPACE);
1775 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1776 }
1777 CcTest::heap()->CollectAllGarbage();
1778 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1779 CompileRun("f5=null");
1780 for (int j = 0; j < 10; j++) {
1781 CcTest::heap()->CollectGarbage(NEW_SPACE);
1782 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1783 }
1784 CcTest::heap()->CollectAllGarbage();
1785 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1786
1787 ctx[i]->Exit();
1788 }
1789
1790 // Force compilation cache cleanup.
1791 CcTest::heap()->NotifyContextDisposed(true);
1792 CcTest::heap()->CollectAllGarbage();
1793
1794 // Dispose the native contexts one by one.
1795 for (int i = 0; i < kNumTestContexts; i++) {
1796 // TODO(dcarney): is there a better way to do this?
1797 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1798 *unsafe = CcTest::heap()->undefined_value();
1799 ctx[i].Clear();
1800
1801 // Scavenge treats these references as strong.
1802 for (int j = 0; j < 10; j++) {
1803 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1804 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1805 }
1806
1807 // Mark compact handles the weak references.
1808 CcTest::heap()->CollectAllGarbage();
1809 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1810 }
1811
1812 CHECK_EQ(0, CountNativeContexts());
1813}
1814
1815
1816// Count the number of native contexts in the weak list of native contexts
1817// causing a GC after the specified number of elements.
1818static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1819 Heap* heap = isolate->heap();
1820 int count = 0;
1821 Handle<Object> object(heap->native_contexts_list(), isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001822 while (!object->IsUndefined(isolate)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 count++;
1824 if (count == n) heap->CollectAllGarbage();
1825 object =
Ben Murdochc5610432016-08-08 18:44:38 +01001826 Handle<Object>(Context::cast(*object)->next_context_link(), isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827 }
1828 return count;
1829}
1830
1831
1832// Count the number of user functions in the weak list of optimized
1833// functions attached to a native context causing a GC after the
1834// specified number of elements.
1835static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1836 int n) {
1837 int count = 0;
1838 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1839 Isolate* isolate = icontext->GetIsolate();
1840 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1841 isolate);
1842 while (object->IsJSFunction() &&
1843 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1844 count++;
1845 if (count == n) isolate->heap()->CollectAllGarbage();
1846 object = Handle<Object>(
1847 Object::cast(JSFunction::cast(*object)->next_function_link()),
1848 isolate);
1849 }
1850 return count;
1851}
1852
1853
1854TEST(TestInternalWeakListsTraverseWithGC) {
1855 FLAG_always_opt = false;
1856 FLAG_allow_natives_syntax = true;
1857 v8::V8::Initialize();
1858
1859 static const int kNumTestContexts = 10;
1860
1861 Isolate* isolate = CcTest::i_isolate();
1862 HandleScope scope(isolate);
1863 v8::Local<v8::Context> ctx[kNumTestContexts];
1864 if (!isolate->use_crankshaft()) return;
1865
1866 CHECK_EQ(0, CountNativeContexts());
1867
1868 // Create an number of contexts and check the length of the weak list both
1869 // with and without GCs while iterating the list.
1870 for (int i = 0; i < kNumTestContexts; i++) {
1871 ctx[i] = v8::Context::New(CcTest::isolate());
1872 CHECK_EQ(i + 1, CountNativeContexts());
1873 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1874 }
1875
1876 ctx[0]->Enter();
1877
1878 // Compile a number of functions the length of the weak list of optimized
1879 // functions both with and without GCs while iterating the list.
1880 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1881 OptimizeEmptyFunction("f1");
1882 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1883 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1884 OptimizeEmptyFunction("f2");
1885 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1886 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1887 OptimizeEmptyFunction("f3");
1888 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1889 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1890 OptimizeEmptyFunction("f4");
1891 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1892 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1893 OptimizeEmptyFunction("f5");
1894 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1895 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1896
1897 ctx[0]->Exit();
1898}
1899
1900
1901TEST(TestSizeOfRegExpCode) {
1902 if (!FLAG_regexp_optimization) return;
1903
1904 v8::V8::Initialize();
1905
1906 Isolate* isolate = CcTest::i_isolate();
1907 HandleScope scope(isolate);
1908
1909 LocalContext context;
1910
1911 // Adjust source below and this check to match
1912 // RegExpImple::kRegExpTooLargeToOptimize.
1913 CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1914
1915 // Compile a regexp that is much larger if we are using regexp optimizations.
1916 CompileRun(
1917 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1918 "var half_size_reg_exp;"
1919 "while (reg_exp_source.length < 20 * 1024) {"
1920 " half_size_reg_exp = reg_exp_source;"
1921 " reg_exp_source = reg_exp_source + reg_exp_source;"
1922 "}"
1923 // Flatten string.
1924 "reg_exp_source.match(/f/);");
1925
1926 // Get initial heap size after several full GCs, which will stabilize
1927 // the heap size and return with sweeping finished completely.
1928 CcTest::heap()->CollectAllGarbage();
1929 CcTest::heap()->CollectAllGarbage();
1930 CcTest::heap()->CollectAllGarbage();
1931 CcTest::heap()->CollectAllGarbage();
1932 CcTest::heap()->CollectAllGarbage();
1933 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1934 if (collector->sweeping_in_progress()) {
1935 collector->EnsureSweepingCompleted();
1936 }
1937 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1938
1939 CompileRun("'foo'.match(reg_exp_source);");
1940 CcTest::heap()->CollectAllGarbage();
1941 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1942
1943 CompileRun("'foo'.match(half_size_reg_exp);");
1944 CcTest::heap()->CollectAllGarbage();
1945 int size_with_optimized_regexp =
1946 static_cast<int>(CcTest::heap()->SizeOfObjects());
1947
1948 int size_of_regexp_code = size_with_regexp - initial_size;
1949
1950 // On some platforms the debug-code flag causes huge amounts of regexp code
1951 // to be emitted, breaking this test.
1952 if (!FLAG_debug_code) {
1953 CHECK_LE(size_of_regexp_code, 1 * MB);
1954 }
1955
1956 // Small regexp is half the size, but compiles to more than twice the code
1957 // due to the optimization steps.
1958 CHECK_GE(size_with_optimized_regexp,
1959 size_with_regexp + size_of_regexp_code * 2);
1960}
1961
1962
1963HEAP_TEST(TestSizeOfObjects) {
1964 v8::V8::Initialize();
1965
1966 // Get initial heap size after several full GCs, which will stabilize
1967 // the heap size and return with sweeping finished completely.
1968 CcTest::heap()->CollectAllGarbage();
1969 CcTest::heap()->CollectAllGarbage();
1970 CcTest::heap()->CollectAllGarbage();
1971 CcTest::heap()->CollectAllGarbage();
1972 CcTest::heap()->CollectAllGarbage();
1973 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1974 if (collector->sweeping_in_progress()) {
1975 collector->EnsureSweepingCompleted();
1976 }
1977 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1978
1979 {
1980 // Allocate objects on several different old-space pages so that
1981 // concurrent sweeper threads will be busy sweeping the old space on
1982 // subsequent GC runs.
1983 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1984 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1985 for (int i = 1; i <= 100; i++) {
1986 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1987 CHECK_EQ(initial_size + i * filler_size,
1988 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1989 }
1990 }
1991
1992 // The heap size should go back to initial size after a full GC, even
1993 // though sweeping didn't finish yet.
1994 CcTest::heap()->CollectAllGarbage();
1995
1996 // Normally sweeping would not be complete here, but no guarantees.
1997
1998 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1999
2000 // Waiting for sweeper threads should not change heap size.
2001 if (collector->sweeping_in_progress()) {
2002 collector->EnsureSweepingCompleted();
2003 }
2004 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2005}
2006
2007
2008TEST(TestAlignmentCalculations) {
2009 // Maximum fill amounts are consistent.
2010 int maximum_double_misalignment = kDoubleSize - kPointerSize;
2011 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
2012 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
2013 CHECK_EQ(0, max_word_fill);
2014 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
2015 CHECK_EQ(maximum_double_misalignment, max_double_fill);
2016 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
2017 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
2018 int max_simd128_unaligned_fill =
2019 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
2020 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
2021
2022 Address base = static_cast<Address>(NULL);
2023 int fill = 0;
2024
2025 // Word alignment never requires fill.
2026 fill = Heap::GetFillToAlign(base, kWordAligned);
2027 CHECK_EQ(0, fill);
2028 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
2029 CHECK_EQ(0, fill);
2030
2031 // No fill is required when address is double aligned.
2032 fill = Heap::GetFillToAlign(base, kDoubleAligned);
2033 CHECK_EQ(0, fill);
2034 // Fill is required if address is not double aligned.
2035 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
2036 CHECK_EQ(maximum_double_misalignment, fill);
2037 // kDoubleUnaligned has the opposite fill amounts.
2038 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2039 CHECK_EQ(maximum_double_misalignment, fill);
2040 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2041 CHECK_EQ(0, fill);
2042
2043 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2044 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2045 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2046 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2047 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2048 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2049 CHECK_EQ(kPointerSize, fill);
2050 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2051 CHECK_EQ(0, fill);
2052}
2053
2054
2055static HeapObject* NewSpaceAllocateAligned(int size,
2056 AllocationAlignment alignment) {
2057 Heap* heap = CcTest::heap();
2058 AllocationResult allocation =
2059 heap->new_space()->AllocateRawAligned(size, alignment);
2060 HeapObject* obj = NULL;
2061 allocation.To(&obj);
Ben Murdochda12d292016-06-02 14:46:10 +01002062 heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002063 return obj;
2064}
2065
2066
2067// Get new space allocation into the desired alignment.
2068static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2069 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2070 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2071 if (fill) {
2072 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2073 }
2074 return *top_addr;
2075}
2076
2077
2078TEST(TestAlignedAllocation) {
2079 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2080 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2081 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2082 Address start;
2083 HeapObject* obj;
2084 HeapObject* filler;
2085 if (double_misalignment) {
2086 // Allocate a pointer sized object that must be double aligned at an
2087 // aligned address.
2088 start = AlignNewSpace(kDoubleAligned, 0);
2089 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2090 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2091 // There is no filler.
2092 CHECK_EQ(kPointerSize, *top_addr - start);
2093
2094 // Allocate a second pointer sized object that must be double aligned at an
2095 // unaligned address.
2096 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2097 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2098 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2099 // There is a filler object before the object.
2100 filler = HeapObject::FromAddress(start);
2101 CHECK(obj != filler && filler->IsFiller() &&
2102 filler->Size() == kPointerSize);
2103 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2104
2105 // Similarly for kDoubleUnaligned.
2106 start = AlignNewSpace(kDoubleUnaligned, 0);
2107 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2108 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2109 CHECK_EQ(kPointerSize, *top_addr - start);
2110 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2111 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2112 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2113 // There is a filler object before the object.
2114 filler = HeapObject::FromAddress(start);
2115 CHECK(obj != filler && filler->IsFiller() &&
2116 filler->Size() == kPointerSize);
2117 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2118 }
2119
2120 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2121 // on platform.
2122 start = AlignNewSpace(kSimd128Unaligned, 0);
2123 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2124 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2125 // There is no filler.
2126 CHECK_EQ(kPointerSize, *top_addr - start);
2127 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2128 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2129 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2130 // There is a filler object before the object.
2131 filler = HeapObject::FromAddress(start);
2132 CHECK(obj != filler && filler->IsFiller() &&
2133 filler->Size() == kSimd128Size - kPointerSize);
2134 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2135
2136 if (double_misalignment) {
2137 // Test the 2 other alignments possible on 32 bit platforms.
2138 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2139 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2140 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2141 // There is a filler object before the object.
2142 filler = HeapObject::FromAddress(start);
2143 CHECK(obj != filler && filler->IsFiller() &&
2144 filler->Size() == 2 * kPointerSize);
2145 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2146 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2147 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2148 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2149 // There is a filler object before the object.
2150 filler = HeapObject::FromAddress(start);
2151 CHECK(obj != filler && filler->IsFiller() &&
2152 filler->Size() == kPointerSize);
2153 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2154 }
2155}
2156
2157
2158static HeapObject* OldSpaceAllocateAligned(int size,
2159 AllocationAlignment alignment) {
2160 Heap* heap = CcTest::heap();
2161 AllocationResult allocation =
2162 heap->old_space()->AllocateRawAligned(size, alignment);
2163 HeapObject* obj = NULL;
2164 allocation.To(&obj);
Ben Murdochda12d292016-06-02 14:46:10 +01002165 heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002166 return obj;
2167}
2168
2169
2170// Get old space allocation into the desired alignment.
2171static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2172 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2173 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2174 int allocation = fill + offset;
2175 if (allocation) {
2176 OldSpaceAllocateAligned(allocation, kWordAligned);
2177 }
2178 Address top = *top_addr;
2179 // Now force the remaining allocation onto the free list.
2180 CcTest::heap()->old_space()->EmptyAllocationInfo();
2181 return top;
2182}
2183
2184
2185// Test the case where allocation must be done from the free list, so filler
2186// may precede or follow the object.
2187TEST(TestAlignedOverAllocation) {
2188 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2189 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2190 Address start;
2191 HeapObject* obj;
2192 HeapObject* filler1;
2193 HeapObject* filler2;
2194 if (double_misalignment) {
2195 start = AlignOldSpace(kDoubleAligned, 0);
2196 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2197 // The object is aligned, and a filler object is created after.
2198 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2199 filler1 = HeapObject::FromAddress(start + kPointerSize);
2200 CHECK(obj != filler1 && filler1->IsFiller() &&
2201 filler1->Size() == kPointerSize);
2202 // Try the opposite alignment case.
2203 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2204 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2205 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2206 filler1 = HeapObject::FromAddress(start);
2207 CHECK(obj != filler1);
2208 CHECK(filler1->IsFiller());
2209 CHECK(filler1->Size() == kPointerSize);
2210 CHECK(obj != filler1 && filler1->IsFiller() &&
2211 filler1->Size() == kPointerSize);
2212
2213 // Similarly for kDoubleUnaligned.
2214 start = AlignOldSpace(kDoubleUnaligned, 0);
2215 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2216 // The object is aligned, and a filler object is created after.
2217 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2218 filler1 = HeapObject::FromAddress(start + kPointerSize);
2219 CHECK(obj != filler1 && filler1->IsFiller() &&
2220 filler1->Size() == kPointerSize);
2221 // Try the opposite alignment case.
2222 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2223 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2224 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2225 filler1 = HeapObject::FromAddress(start);
2226 CHECK(obj != filler1 && filler1->IsFiller() &&
2227 filler1->Size() == kPointerSize);
2228 }
2229
2230 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2231 // on platform.
2232 start = AlignOldSpace(kSimd128Unaligned, 0);
2233 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2234 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2235 // There is a filler object after the object.
2236 filler1 = HeapObject::FromAddress(start + kPointerSize);
2237 CHECK(obj != filler1 && filler1->IsFiller() &&
2238 filler1->Size() == kSimd128Size - kPointerSize);
2239 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2240 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2241 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2242 // There is a filler object before the object.
2243 filler1 = HeapObject::FromAddress(start);
2244 CHECK(obj != filler1 && filler1->IsFiller() &&
2245 filler1->Size() == kSimd128Size - kPointerSize);
2246
2247 if (double_misalignment) {
2248 // Test the 2 other alignments possible on 32 bit platforms.
2249 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2250 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2251 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2252 // There are filler objects before and after the object.
2253 filler1 = HeapObject::FromAddress(start);
2254 CHECK(obj != filler1 && filler1->IsFiller() &&
2255 filler1->Size() == 2 * kPointerSize);
2256 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2257 CHECK(obj != filler2 && filler2->IsFiller() &&
2258 filler2->Size() == kPointerSize);
2259 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2260 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2261 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2262 // There are filler objects before and after the object.
2263 filler1 = HeapObject::FromAddress(start);
2264 CHECK(obj != filler1 && filler1->IsFiller() &&
2265 filler1->Size() == kPointerSize);
2266 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2267 CHECK(obj != filler2 && filler2->IsFiller() &&
2268 filler2->Size() == 2 * kPointerSize);
2269 }
2270}
2271
2272
2273TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2274 CcTest::InitializeVM();
2275 HeapIterator iterator(CcTest::heap());
2276 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2277 intptr_t size_of_objects_2 = 0;
2278 for (HeapObject* obj = iterator.next();
2279 obj != NULL;
2280 obj = iterator.next()) {
2281 if (!obj->IsFreeSpace()) {
2282 size_of_objects_2 += obj->Size();
2283 }
2284 }
2285 // Delta must be within 5% of the larger result.
2286 // TODO(gc): Tighten this up by distinguishing between byte
2287 // arrays that are real and those that merely mark free space
2288 // on the heap.
2289 if (size_of_objects_1 > size_of_objects_2) {
2290 intptr_t delta = size_of_objects_1 - size_of_objects_2;
Ben Murdochc5610432016-08-08 18:44:38 +01002291 PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
2292 ", "
2293 "Iterator: %" V8PRIdPTR
2294 ", "
2295 "delta: %" V8PRIdPTR "\n",
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002296 size_of_objects_1, size_of_objects_2, delta);
2297 CHECK_GT(size_of_objects_1 / 20, delta);
2298 } else {
2299 intptr_t delta = size_of_objects_2 - size_of_objects_1;
Ben Murdochc5610432016-08-08 18:44:38 +01002300 PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
2301 ", "
2302 "Iterator: %" V8PRIdPTR
2303 ", "
2304 "delta: %" V8PRIdPTR "\n",
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002305 size_of_objects_1, size_of_objects_2, delta);
2306 CHECK_GT(size_of_objects_2 / 20, delta);
2307 }
2308}
2309
2310
2311static void FillUpNewSpace(NewSpace* new_space) {
2312 // Fill up new space to the point that it is completely full. Make sure
2313 // that the scavenger does not undo the filling.
2314 Heap* heap = new_space->heap();
2315 Isolate* isolate = heap->isolate();
2316 Factory* factory = isolate->factory();
2317 HandleScope scope(isolate);
2318 AlwaysAllocateScope always_allocate(isolate);
2319 intptr_t available = new_space->Capacity() - new_space->Size();
2320 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2321 for (intptr_t i = 0; i < number_of_fillers; i++) {
2322 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2323 }
2324}
2325
2326
2327TEST(GrowAndShrinkNewSpace) {
2328 CcTest::InitializeVM();
2329 Heap* heap = CcTest::heap();
2330 NewSpace* new_space = heap->new_space();
2331
Ben Murdochda12d292016-06-02 14:46:10 +01002332 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002333 return;
2334 }
2335
2336 // Explicitly growing should double the space capacity.
2337 intptr_t old_capacity, new_capacity;
2338 old_capacity = new_space->TotalCapacity();
2339 new_space->Grow();
2340 new_capacity = new_space->TotalCapacity();
2341 CHECK(2 * old_capacity == new_capacity);
2342
2343 old_capacity = new_space->TotalCapacity();
2344 FillUpNewSpace(new_space);
2345 new_capacity = new_space->TotalCapacity();
2346 CHECK(old_capacity == new_capacity);
2347
2348 // Explicitly shrinking should not affect space capacity.
2349 old_capacity = new_space->TotalCapacity();
2350 new_space->Shrink();
2351 new_capacity = new_space->TotalCapacity();
2352 CHECK(old_capacity == new_capacity);
2353
2354 // Let the scavenger empty the new space.
2355 heap->CollectGarbage(NEW_SPACE);
2356 CHECK_LE(new_space->Size(), old_capacity);
2357
2358 // Explicitly shrinking should halve the space capacity.
2359 old_capacity = new_space->TotalCapacity();
2360 new_space->Shrink();
2361 new_capacity = new_space->TotalCapacity();
2362 CHECK(old_capacity == 2 * new_capacity);
2363
2364 // Consecutive shrinking should not affect space capacity.
2365 old_capacity = new_space->TotalCapacity();
2366 new_space->Shrink();
2367 new_space->Shrink();
2368 new_space->Shrink();
2369 new_capacity = new_space->TotalCapacity();
2370 CHECK(old_capacity == new_capacity);
2371}
2372
2373
2374TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2375 CcTest::InitializeVM();
2376 Heap* heap = CcTest::heap();
Ben Murdochda12d292016-06-02 14:46:10 +01002377 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002378 return;
2379 }
2380
2381 v8::HandleScope scope(CcTest::isolate());
2382 NewSpace* new_space = heap->new_space();
2383 intptr_t old_capacity, new_capacity;
2384 old_capacity = new_space->TotalCapacity();
2385 new_space->Grow();
2386 new_capacity = new_space->TotalCapacity();
2387 CHECK(2 * old_capacity == new_capacity);
2388 FillUpNewSpace(new_space);
2389 heap->CollectAllAvailableGarbage();
2390 new_capacity = new_space->TotalCapacity();
2391 CHECK(old_capacity == new_capacity);
2392}
2393
2394
2395static int NumberOfGlobalObjects() {
2396 int count = 0;
2397 HeapIterator iterator(CcTest::heap());
2398 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2399 if (obj->IsJSGlobalObject()) count++;
2400 }
2401 return count;
2402}
2403
2404
2405// Test that we don't embed maps from foreign contexts into
2406// optimized code.
2407TEST(LeakNativeContextViaMap) {
2408 i::FLAG_allow_natives_syntax = true;
2409 v8::Isolate* isolate = CcTest::isolate();
2410 v8::HandleScope outer_scope(isolate);
2411 v8::Persistent<v8::Context> ctx1p;
2412 v8::Persistent<v8::Context> ctx2p;
2413 {
2414 v8::HandleScope scope(isolate);
2415 ctx1p.Reset(isolate, v8::Context::New(isolate));
2416 ctx2p.Reset(isolate, v8::Context::New(isolate));
2417 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2418 }
2419
2420 CcTest::heap()->CollectAllAvailableGarbage();
2421 CHECK_EQ(2, NumberOfGlobalObjects());
2422
2423 {
2424 v8::HandleScope inner_scope(isolate);
2425 CompileRun("var v = {x: 42}");
2426 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2427 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2428 v8::Local<v8::Value> v =
2429 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2430 ctx2->Enter();
2431 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2432 v8::Local<v8::Value> res = CompileRun(
2433 "function f() { return o.x; }"
2434 "for (var i = 0; i < 10; ++i) f();"
2435 "%OptimizeFunctionOnNextCall(f);"
2436 "f();");
2437 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2438 CHECK(ctx2->Global()
2439 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2440 .FromJust());
2441 ctx2->Exit();
2442 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2443 ctx1p.Reset();
2444 isolate->ContextDisposedNotification();
2445 }
2446 CcTest::heap()->CollectAllAvailableGarbage();
2447 CHECK_EQ(1, NumberOfGlobalObjects());
2448 ctx2p.Reset();
2449 CcTest::heap()->CollectAllAvailableGarbage();
2450 CHECK_EQ(0, NumberOfGlobalObjects());
2451}
2452
2453
2454// Test that we don't embed functions from foreign contexts into
2455// optimized code.
2456TEST(LeakNativeContextViaFunction) {
2457 i::FLAG_allow_natives_syntax = true;
2458 v8::Isolate* isolate = CcTest::isolate();
2459 v8::HandleScope outer_scope(isolate);
2460 v8::Persistent<v8::Context> ctx1p;
2461 v8::Persistent<v8::Context> ctx2p;
2462 {
2463 v8::HandleScope scope(isolate);
2464 ctx1p.Reset(isolate, v8::Context::New(isolate));
2465 ctx2p.Reset(isolate, v8::Context::New(isolate));
2466 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2467 }
2468
2469 CcTest::heap()->CollectAllAvailableGarbage();
2470 CHECK_EQ(2, NumberOfGlobalObjects());
2471
2472 {
2473 v8::HandleScope inner_scope(isolate);
2474 CompileRun("var v = function() { return 42; }");
2475 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2476 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2477 v8::Local<v8::Value> v =
2478 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2479 ctx2->Enter();
2480 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2481 v8::Local<v8::Value> res = CompileRun(
2482 "function f(x) { return x(); }"
2483 "for (var i = 0; i < 10; ++i) f(o);"
2484 "%OptimizeFunctionOnNextCall(f);"
2485 "f(o);");
2486 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2487 CHECK(ctx2->Global()
2488 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2489 .FromJust());
2490 ctx2->Exit();
2491 ctx1->Exit();
2492 ctx1p.Reset();
2493 isolate->ContextDisposedNotification();
2494 }
2495 CcTest::heap()->CollectAllAvailableGarbage();
2496 CHECK_EQ(1, NumberOfGlobalObjects());
2497 ctx2p.Reset();
2498 CcTest::heap()->CollectAllAvailableGarbage();
2499 CHECK_EQ(0, NumberOfGlobalObjects());
2500}
2501
2502
2503TEST(LeakNativeContextViaMapKeyed) {
2504 i::FLAG_allow_natives_syntax = true;
2505 v8::Isolate* isolate = CcTest::isolate();
2506 v8::HandleScope outer_scope(isolate);
2507 v8::Persistent<v8::Context> ctx1p;
2508 v8::Persistent<v8::Context> ctx2p;
2509 {
2510 v8::HandleScope scope(isolate);
2511 ctx1p.Reset(isolate, v8::Context::New(isolate));
2512 ctx2p.Reset(isolate, v8::Context::New(isolate));
2513 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2514 }
2515
2516 CcTest::heap()->CollectAllAvailableGarbage();
2517 CHECK_EQ(2, NumberOfGlobalObjects());
2518
2519 {
2520 v8::HandleScope inner_scope(isolate);
2521 CompileRun("var v = [42, 43]");
2522 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2523 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2524 v8::Local<v8::Value> v =
2525 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2526 ctx2->Enter();
2527 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2528 v8::Local<v8::Value> res = CompileRun(
2529 "function f() { return o[0]; }"
2530 "for (var i = 0; i < 10; ++i) f();"
2531 "%OptimizeFunctionOnNextCall(f);"
2532 "f();");
2533 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2534 CHECK(ctx2->Global()
2535 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2536 .FromJust());
2537 ctx2->Exit();
2538 ctx1->Exit();
2539 ctx1p.Reset();
2540 isolate->ContextDisposedNotification();
2541 }
2542 CcTest::heap()->CollectAllAvailableGarbage();
2543 CHECK_EQ(1, NumberOfGlobalObjects());
2544 ctx2p.Reset();
2545 CcTest::heap()->CollectAllAvailableGarbage();
2546 CHECK_EQ(0, NumberOfGlobalObjects());
2547}
2548
2549
2550TEST(LeakNativeContextViaMapProto) {
2551 i::FLAG_allow_natives_syntax = true;
2552 v8::Isolate* isolate = CcTest::isolate();
2553 v8::HandleScope outer_scope(isolate);
2554 v8::Persistent<v8::Context> ctx1p;
2555 v8::Persistent<v8::Context> ctx2p;
2556 {
2557 v8::HandleScope scope(isolate);
2558 ctx1p.Reset(isolate, v8::Context::New(isolate));
2559 ctx2p.Reset(isolate, v8::Context::New(isolate));
2560 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2561 }
2562
2563 CcTest::heap()->CollectAllAvailableGarbage();
2564 CHECK_EQ(2, NumberOfGlobalObjects());
2565
2566 {
2567 v8::HandleScope inner_scope(isolate);
2568 CompileRun("var v = { y: 42}");
2569 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2570 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2571 v8::Local<v8::Value> v =
2572 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2573 ctx2->Enter();
2574 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2575 v8::Local<v8::Value> res = CompileRun(
2576 "function f() {"
2577 " var p = {x: 42};"
2578 " p.__proto__ = o;"
2579 " return p.x;"
2580 "}"
2581 "for (var i = 0; i < 10; ++i) f();"
2582 "%OptimizeFunctionOnNextCall(f);"
2583 "f();");
2584 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2585 CHECK(ctx2->Global()
2586 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2587 .FromJust());
2588 ctx2->Exit();
2589 ctx1->Exit();
2590 ctx1p.Reset();
2591 isolate->ContextDisposedNotification();
2592 }
2593 CcTest::heap()->CollectAllAvailableGarbage();
2594 CHECK_EQ(1, NumberOfGlobalObjects());
2595 ctx2p.Reset();
2596 CcTest::heap()->CollectAllAvailableGarbage();
2597 CHECK_EQ(0, NumberOfGlobalObjects());
2598}
2599
2600
2601TEST(InstanceOfStubWriteBarrier) {
2602 i::FLAG_allow_natives_syntax = true;
2603#ifdef VERIFY_HEAP
2604 i::FLAG_verify_heap = true;
2605#endif
2606
2607 CcTest::InitializeVM();
2608 if (!CcTest::i_isolate()->use_crankshaft()) return;
2609 if (i::FLAG_force_marking_deque_overflows) return;
2610 v8::HandleScope outer_scope(CcTest::isolate());
2611 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2612
2613 {
2614 v8::HandleScope scope(CcTest::isolate());
2615 CompileRun(
2616 "function foo () { }"
2617 "function mkbar () { return new (new Function(\"\")) (); }"
2618 "function f (x) { return (x instanceof foo); }"
2619 "function g () { f(mkbar()); }"
2620 "f(new foo()); f(new foo());"
2621 "%OptimizeFunctionOnNextCall(f);"
2622 "f(new foo()); g();");
2623 }
2624
2625 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2626 marking->Stop();
2627 CcTest::heap()->StartIncrementalMarking();
2628
2629 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2630 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2631 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2632
2633 CHECK(f->IsOptimized());
2634
2635 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2636 !marking->IsStopped()) {
2637 // Discard any pending GC requests otherwise we will get GC when we enter
2638 // code below.
2639 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2640 }
2641
2642 CHECK(marking->IsMarking());
2643
2644 {
2645 v8::HandleScope scope(CcTest::isolate());
2646 v8::Local<v8::Object> global = CcTest::global();
2647 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2648 global->Get(ctx, v8_str("g")).ToLocalChecked());
2649 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2650 }
2651
2652 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2653 CcTest::heap()->CollectGarbage(OLD_SPACE);
2654}
2655
Ben Murdochc5610432016-08-08 18:44:38 +01002656namespace {
2657
2658int GetProfilerTicks(SharedFunctionInfo* shared) {
2659 return FLAG_ignition ? shared->profiler_ticks()
2660 : shared->code()->profiler_ticks();
2661}
2662
2663} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002664
2665TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2666 i::FLAG_stress_compaction = false;
2667 i::FLAG_allow_natives_syntax = true;
2668#ifdef VERIFY_HEAP
2669 i::FLAG_verify_heap = true;
2670#endif
2671
2672 CcTest::InitializeVM();
2673 if (!CcTest::i_isolate()->use_crankshaft()) return;
2674 v8::HandleScope outer_scope(CcTest::isolate());
2675 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2676
2677 {
2678 v8::HandleScope scope(CcTest::isolate());
2679 CompileRun(
2680 "function f () {"
2681 " var s = 0;"
2682 " for (var i = 0; i < 100; i++) s += i;"
2683 " return s;"
2684 "}"
2685 "f(); f();"
2686 "%OptimizeFunctionOnNextCall(f);"
2687 "f();");
2688 }
2689 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2690 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2691 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2692 CHECK(f->IsOptimized());
2693
Ben Murdochc5610432016-08-08 18:44:38 +01002694 // Make sure incremental marking it not running.
2695 CcTest::heap()->incremental_marking()->Stop();
2696
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002697 CcTest::heap()->StartIncrementalMarking();
2698 // The following calls will increment CcTest::heap()->global_ic_age().
2699 CcTest::isolate()->ContextDisposedNotification();
Ben Murdoch61f157c2016-09-16 13:49:30 +01002700 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002701 CcTest::heap()->CollectAllGarbage();
Ben Murdochc5610432016-08-08 18:44:38 +01002702
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002703 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2704 CHECK_EQ(0, f->shared()->opt_count());
Ben Murdochc5610432016-08-08 18:44:38 +01002705 CHECK_EQ(0, GetProfilerTicks(f->shared()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002706}
2707
2708
2709TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2710 i::FLAG_stress_compaction = false;
2711 i::FLAG_allow_natives_syntax = true;
2712#ifdef VERIFY_HEAP
2713 i::FLAG_verify_heap = true;
2714#endif
2715
2716 CcTest::InitializeVM();
2717 if (!CcTest::i_isolate()->use_crankshaft()) return;
2718 v8::HandleScope outer_scope(CcTest::isolate());
2719 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2720
2721 {
2722 v8::HandleScope scope(CcTest::isolate());
2723 CompileRun(
2724 "function f () {"
2725 " var s = 0;"
2726 " for (var i = 0; i < 100; i++) s += i;"
2727 " return s;"
2728 "}"
2729 "f(); f();"
2730 "%OptimizeFunctionOnNextCall(f);"
2731 "f();");
2732 }
2733 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2734 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2735 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002736 CHECK(f->IsOptimized());
2737
Ben Murdochc5610432016-08-08 18:44:38 +01002738 // Make sure incremental marking it not running.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002739 CcTest::heap()->incremental_marking()->Stop();
2740
2741 // The following two calls will increment CcTest::heap()->global_ic_age().
2742 CcTest::isolate()->ContextDisposedNotification();
2743 CcTest::heap()->CollectAllGarbage();
2744
2745 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2746 CHECK_EQ(0, f->shared()->opt_count());
Ben Murdochc5610432016-08-08 18:44:38 +01002747 CHECK_EQ(0, GetProfilerTicks(f->shared()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002748}
2749
2750
2751HEAP_TEST(GCFlags) {
2752 CcTest::InitializeVM();
2753 Heap* heap = CcTest::heap();
2754
2755 heap->set_current_gc_flags(Heap::kNoGCFlags);
2756 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2757
2758 // Set the flags to check whether we appropriately resets them after the GC.
2759 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2760 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2761 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2762
2763 MarkCompactCollector* collector = heap->mark_compact_collector();
2764 if (collector->sweeping_in_progress()) {
2765 collector->EnsureSweepingCompleted();
2766 }
2767
2768 IncrementalMarking* marking = heap->incremental_marking();
2769 marking->Stop();
2770 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2771 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2772
2773 heap->CollectGarbage(NEW_SPACE);
2774 // NewSpace scavenges should not overwrite the flags.
2775 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2776
2777 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2778 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2779}
2780
2781
2782TEST(IdleNotificationFinishMarking) {
2783 i::FLAG_allow_natives_syntax = true;
2784 CcTest::InitializeVM();
Ben Murdoch61f157c2016-09-16 13:49:30 +01002785 const int initial_gc_count = CcTest::heap()->gc_count();
2786 heap::SimulateFullSpace(CcTest::heap()->old_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002787 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2788 marking->Stop();
2789 CcTest::heap()->StartIncrementalMarking();
2790
Ben Murdoch61f157c2016-09-16 13:49:30 +01002791 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002792
2793 // TODO(hpayer): We cannot write proper unit test right now for heap.
2794 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2795 // marking delay counter.
2796
2797 // Perform a huge incremental marking step but don't complete marking.
2798 intptr_t bytes_processed = 0;
2799 do {
2800 bytes_processed =
2801 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2802 IncrementalMarking::FORCE_MARKING,
2803 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2804 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2805 } while (bytes_processed);
2806
2807 // The next invocations of incremental marking are not going to complete
2808 // marking
2809 // since the completion threshold is not reached
2810 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2811 i++) {
2812 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2813 IncrementalMarking::FORCE_MARKING,
2814 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2815 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2816 }
2817
2818 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2819
2820 // The next idle notification has to finish incremental marking.
2821 const double kLongIdleTime = 1000.0;
2822 CcTest::isolate()->IdleNotificationDeadline(
2823 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2824 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2825 kLongIdleTime);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002826 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002827}
2828
2829
2830// Test that HAllocateObject will always return an object in new-space.
2831TEST(OptimizedAllocationAlwaysInNewSpace) {
2832 i::FLAG_allow_natives_syntax = true;
2833 CcTest::InitializeVM();
2834 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2835 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2836 v8::HandleScope scope(CcTest::isolate());
2837 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
Ben Murdoch61f157c2016-09-16 13:49:30 +01002838 heap::SimulateFullSpace(CcTest::heap()->new_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002839 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2840 v8::Local<v8::Value> res = CompileRun(
2841 "function c(x) {"
2842 " this.x = x;"
2843 " for (var i = 0; i < 32; i++) {"
2844 " this['x' + i] = x;"
2845 " }"
2846 "}"
2847 "function f(x) { return new c(x); };"
2848 "f(1); f(2); f(3);"
2849 "%OptimizeFunctionOnNextCall(f);"
2850 "f(4);");
2851
2852 CHECK_EQ(4, res.As<v8::Object>()
2853 ->GetRealNamedProperty(ctx, v8_str("x"))
2854 .ToLocalChecked()
2855 ->Int32Value(ctx)
2856 .FromJust());
2857
2858 i::Handle<JSReceiver> o =
2859 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2860
2861 CHECK(CcTest::heap()->InNewSpace(*o));
2862}
2863
2864
2865TEST(OptimizedPretenuringAllocationFolding) {
2866 i::FLAG_allow_natives_syntax = true;
2867 i::FLAG_expose_gc = true;
2868 CcTest::InitializeVM();
2869 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2870 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2871 v8::HandleScope scope(CcTest::isolate());
2872 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2873 // Grow new space unitl maximum capacity reached.
2874 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2875 CcTest::heap()->new_space()->Grow();
2876 }
2877
2878 i::ScopedVector<char> source(1024);
2879 i::SNPrintF(
2880 source,
2881 "var number_elements = %d;"
2882 "var elements = new Array();"
2883 "function f() {"
2884 " for (var i = 0; i < number_elements; i++) {"
2885 " elements[i] = [[{}], [1.1]];"
2886 " }"
2887 " return elements[number_elements-1]"
2888 "};"
2889 "f(); gc();"
2890 "f(); f();"
2891 "%%OptimizeFunctionOnNextCall(f);"
2892 "f();",
2893 AllocationSite::kPretenureMinimumCreated);
2894
2895 v8::Local<v8::Value> res = CompileRun(source.start());
2896
2897 v8::Local<v8::Value> int_array =
2898 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2899 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2900 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2901 v8::Local<v8::Value> double_array =
2902 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2903 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2904 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2905
2906 i::Handle<JSReceiver> o =
2907 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2908 CHECK(CcTest::heap()->InOldSpace(*o));
2909 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2910 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2911 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2912 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2913}
2914
2915
2916TEST(OptimizedPretenuringObjectArrayLiterals) {
2917 i::FLAG_allow_natives_syntax = true;
2918 i::FLAG_expose_gc = true;
2919 CcTest::InitializeVM();
2920 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2921 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2922 v8::HandleScope scope(CcTest::isolate());
2923
2924 // Grow new space unitl maximum capacity reached.
2925 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2926 CcTest::heap()->new_space()->Grow();
2927 }
2928
2929 i::ScopedVector<char> source(1024);
2930 i::SNPrintF(
2931 source,
2932 "var number_elements = %d;"
2933 "var elements = new Array(number_elements);"
2934 "function f() {"
2935 " for (var i = 0; i < number_elements; i++) {"
2936 " elements[i] = [{}, {}, {}];"
2937 " }"
2938 " return elements[number_elements - 1];"
2939 "};"
2940 "f(); gc();"
2941 "f(); f();"
2942 "%%OptimizeFunctionOnNextCall(f);"
2943 "f();",
2944 AllocationSite::kPretenureMinimumCreated);
2945
2946 v8::Local<v8::Value> res = CompileRun(source.start());
2947
2948 i::Handle<JSObject> o = Handle<JSObject>::cast(
2949 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2950
2951 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2952 CHECK(CcTest::heap()->InOldSpace(*o));
2953}
2954
2955
2956TEST(OptimizedPretenuringMixedInObjectProperties) {
2957 i::FLAG_allow_natives_syntax = true;
2958 i::FLAG_expose_gc = true;
2959 CcTest::InitializeVM();
2960 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2961 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2962 v8::HandleScope scope(CcTest::isolate());
2963
2964 // Grow new space unitl maximum capacity reached.
2965 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2966 CcTest::heap()->new_space()->Grow();
2967 }
2968
2969
2970 i::ScopedVector<char> source(1024);
2971 i::SNPrintF(
2972 source,
2973 "var number_elements = %d;"
2974 "var elements = new Array(number_elements);"
2975 "function f() {"
2976 " for (var i = 0; i < number_elements; i++) {"
2977 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2978 " }"
2979 " return elements[number_elements - 1];"
2980 "};"
2981 "f(); gc();"
2982 "f(); f();"
2983 "%%OptimizeFunctionOnNextCall(f);"
2984 "f();",
2985 AllocationSite::kPretenureMinimumCreated);
2986
2987 v8::Local<v8::Value> res = CompileRun(source.start());
2988
2989 i::Handle<JSObject> o = Handle<JSObject>::cast(
2990 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2991
2992 CHECK(CcTest::heap()->InOldSpace(*o));
2993 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2994 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2995 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2996 if (!o->IsUnboxedDoubleField(idx2)) {
2997 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2998 } else {
2999 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
3000 }
3001
3002 JSObject* inner_object =
3003 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
3004 CHECK(CcTest::heap()->InOldSpace(inner_object));
3005 if (!inner_object->IsUnboxedDoubleField(idx1)) {
3006 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
3007 } else {
3008 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
3009 }
3010 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
3011}
3012
3013
3014TEST(OptimizedPretenuringDoubleArrayProperties) {
3015 i::FLAG_allow_natives_syntax = true;
3016 i::FLAG_expose_gc = true;
3017 CcTest::InitializeVM();
3018 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3019 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3020 v8::HandleScope scope(CcTest::isolate());
3021
3022 // Grow new space unitl maximum capacity reached.
3023 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3024 CcTest::heap()->new_space()->Grow();
3025 }
3026
3027 i::ScopedVector<char> source(1024);
3028 i::SNPrintF(
3029 source,
3030 "var number_elements = %d;"
3031 "var elements = new Array(number_elements);"
3032 "function f() {"
3033 " for (var i = 0; i < number_elements; i++) {"
3034 " elements[i] = {a: 1.1, b: 2.2};"
3035 " }"
3036 " return elements[i - 1];"
3037 "};"
3038 "f(); gc();"
3039 "f(); f();"
3040 "%%OptimizeFunctionOnNextCall(f);"
3041 "f();",
3042 AllocationSite::kPretenureMinimumCreated);
3043
3044 v8::Local<v8::Value> res = CompileRun(source.start());
3045
3046 i::Handle<JSObject> o = Handle<JSObject>::cast(
3047 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3048
3049 CHECK(CcTest::heap()->InOldSpace(*o));
3050 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3051}
3052
3053
3054TEST(OptimizedPretenuringdoubleArrayLiterals) {
3055 i::FLAG_allow_natives_syntax = true;
3056 i::FLAG_expose_gc = true;
3057 CcTest::InitializeVM();
3058 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3059 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3060 v8::HandleScope scope(CcTest::isolate());
3061
3062 // Grow new space unitl maximum capacity reached.
3063 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3064 CcTest::heap()->new_space()->Grow();
3065 }
3066
3067 i::ScopedVector<char> source(1024);
3068 i::SNPrintF(
3069 source,
3070 "var number_elements = %d;"
3071 "var elements = new Array(number_elements);"
3072 "function f() {"
3073 " for (var i = 0; i < number_elements; i++) {"
3074 " elements[i] = [1.1, 2.2, 3.3];"
3075 " }"
3076 " return elements[number_elements - 1];"
3077 "};"
3078 "f(); gc();"
3079 "f(); f();"
3080 "%%OptimizeFunctionOnNextCall(f);"
3081 "f();",
3082 AllocationSite::kPretenureMinimumCreated);
3083
3084 v8::Local<v8::Value> res = CompileRun(source.start());
3085
3086 i::Handle<JSObject> o = Handle<JSObject>::cast(
3087 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3088
3089 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3090 CHECK(CcTest::heap()->InOldSpace(*o));
3091}
3092
3093
3094TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3095 i::FLAG_allow_natives_syntax = true;
3096 i::FLAG_expose_gc = true;
3097 CcTest::InitializeVM();
3098 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3099 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3100 v8::HandleScope scope(CcTest::isolate());
3101 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3102 // Grow new space unitl maximum capacity reached.
3103 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3104 CcTest::heap()->new_space()->Grow();
3105 }
3106
3107 i::ScopedVector<char> source(1024);
3108 i::SNPrintF(
3109 source,
3110 "var number_elements = 100;"
3111 "var elements = new Array(number_elements);"
3112 "function f() {"
3113 " for (var i = 0; i < number_elements; i++) {"
3114 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3115 " }"
3116 " return elements[number_elements - 1];"
3117 "};"
3118 "f(); gc();"
3119 "f(); f();"
3120 "%%OptimizeFunctionOnNextCall(f);"
3121 "f();");
3122
3123 v8::Local<v8::Value> res = CompileRun(source.start());
3124
3125 v8::Local<v8::Value> int_array =
3126 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3127 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
3128 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
3129 v8::Local<v8::Value> double_array =
3130 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3131 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
3132 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
3133
3134 Handle<JSObject> o = Handle<JSObject>::cast(
3135 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3136 CHECK(CcTest::heap()->InOldSpace(*o));
3137 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3138 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3139 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3140 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3141}
3142
3143
3144TEST(OptimizedPretenuringNestedObjectLiterals) {
3145 i::FLAG_allow_natives_syntax = true;
3146 i::FLAG_expose_gc = true;
3147 CcTest::InitializeVM();
3148 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3149 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3150 v8::HandleScope scope(CcTest::isolate());
3151 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3152 // Grow new space unitl maximum capacity reached.
3153 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3154 CcTest::heap()->new_space()->Grow();
3155 }
3156
3157 i::ScopedVector<char> source(1024);
3158 i::SNPrintF(
3159 source,
3160 "var number_elements = %d;"
3161 "var elements = new Array(number_elements);"
3162 "function f() {"
3163 " for (var i = 0; i < number_elements; i++) {"
3164 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3165 " }"
3166 " return elements[number_elements - 1];"
3167 "};"
3168 "f(); gc();"
3169 "f(); f();"
3170 "%%OptimizeFunctionOnNextCall(f);"
3171 "f();",
3172 AllocationSite::kPretenureMinimumCreated);
3173
3174 v8::Local<v8::Value> res = CompileRun(source.start());
3175
3176 v8::Local<v8::Value> int_array_1 =
3177 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3178 Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
3179 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
3180 v8::Local<v8::Value> int_array_2 =
3181 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3182 Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
3183 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
3184
3185 Handle<JSObject> o = Handle<JSObject>::cast(
3186 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3187 CHECK(CcTest::heap()->InOldSpace(*o));
3188 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3189 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3190 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3191 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3192}
3193
3194
3195TEST(OptimizedPretenuringNestedDoubleLiterals) {
3196 i::FLAG_allow_natives_syntax = true;
3197 i::FLAG_expose_gc = true;
3198 CcTest::InitializeVM();
3199 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3200 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3201 v8::HandleScope scope(CcTest::isolate());
3202 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3203 // Grow new space unitl maximum capacity reached.
3204 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3205 CcTest::heap()->new_space()->Grow();
3206 }
3207
3208 i::ScopedVector<char> source(1024);
3209 i::SNPrintF(
3210 source,
3211 "var number_elements = %d;"
3212 "var elements = new Array(number_elements);"
3213 "function f() {"
3214 " for (var i = 0; i < number_elements; i++) {"
3215 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3216 " }"
3217 " return elements[number_elements - 1];"
3218 "};"
3219 "f(); gc();"
3220 "f(); f();"
3221 "%%OptimizeFunctionOnNextCall(f);"
3222 "f();",
3223 AllocationSite::kPretenureMinimumCreated);
3224
3225 v8::Local<v8::Value> res = CompileRun(source.start());
3226
3227 v8::Local<v8::Value> double_array_1 =
3228 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3229 i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
3230 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
3231 v8::Local<v8::Value> double_array_2 =
3232 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3233 i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
3234 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
3235
3236 i::Handle<JSObject> o = Handle<JSObject>::cast(
3237 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3238 CHECK(CcTest::heap()->InOldSpace(*o));
3239 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3240 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3241 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3242 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3243}
3244
3245
3246// Test regular array literals allocation.
3247TEST(OptimizedAllocationArrayLiterals) {
3248 i::FLAG_allow_natives_syntax = true;
3249 CcTest::InitializeVM();
3250 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3251 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3252 v8::HandleScope scope(CcTest::isolate());
3253 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3254 v8::Local<v8::Value> res = CompileRun(
3255 "function f() {"
3256 " var numbers = new Array(1, 2, 3);"
3257 " numbers[0] = 3.14;"
3258 " return numbers;"
3259 "};"
3260 "f(); f(); f();"
3261 "%OptimizeFunctionOnNextCall(f);"
3262 "f();");
3263 CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
3264 ->Get(ctx, v8_str("0"))
3265 .ToLocalChecked()
3266 ->Int32Value(ctx)
3267 .FromJust());
3268
3269 i::Handle<JSObject> o = Handle<JSObject>::cast(
3270 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3271
3272 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3273}
3274
3275
3276static int CountMapTransitions(Map* map) {
3277 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3278}
3279
3280
3281// Test that map transitions are cleared and maps are collected with
3282// incremental marking as well.
3283TEST(Regress1465) {
3284 i::FLAG_stress_compaction = false;
3285 i::FLAG_allow_natives_syntax = true;
3286 i::FLAG_trace_incremental_marking = true;
3287 i::FLAG_retain_maps_for_n_gc = 0;
3288 CcTest::InitializeVM();
3289 v8::HandleScope scope(CcTest::isolate());
3290 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3291 static const int transitions_count = 256;
3292
3293 CompileRun("function F() {}");
3294 {
3295 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3296 for (int i = 0; i < transitions_count; i++) {
3297 EmbeddedVector<char, 64> buffer;
3298 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3299 CompileRun(buffer.start());
3300 }
3301 CompileRun("var root = new F;");
3302 }
3303
3304 i::Handle<JSReceiver> root =
3305 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3306 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3307
3308 // Count number of live transitions before marking.
3309 int transitions_before = CountMapTransitions(root->map());
3310 CompileRun("%DebugPrint(root);");
3311 CHECK_EQ(transitions_count, transitions_before);
3312
Ben Murdoch61f157c2016-09-16 13:49:30 +01003313 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003314 CcTest::heap()->CollectAllGarbage();
3315
3316 // Count number of live transitions after marking. Note that one transition
3317 // is left, because 'o' still holds an instance of one transition target.
3318 int transitions_after = CountMapTransitions(root->map());
3319 CompileRun("%DebugPrint(root);");
3320 CHECK_EQ(1, transitions_after);
3321}
3322
3323
3324#ifdef DEBUG
3325static void AddTransitions(int transitions_count) {
3326 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3327 for (int i = 0; i < transitions_count; i++) {
3328 EmbeddedVector<char, 64> buffer;
3329 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3330 CompileRun(buffer.start());
3331 }
3332}
3333
3334
3335static i::Handle<JSObject> GetByName(const char* name) {
3336 return i::Handle<JSObject>::cast(
3337 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3338 CcTest::global()
3339 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
3340 .ToLocalChecked())));
3341}
3342
3343
3344static void AddPropertyTo(
3345 int gc_count, Handle<JSObject> object, const char* property_name) {
3346 Isolate* isolate = CcTest::i_isolate();
3347 Factory* factory = isolate->factory();
3348 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3349 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3350 i::FLAG_gc_interval = gc_count;
3351 i::FLAG_gc_global = true;
3352 i::FLAG_retain_maps_for_n_gc = 0;
3353 CcTest::heap()->set_allocation_timeout(gc_count);
3354 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3355}
3356
3357
3358TEST(TransitionArrayShrinksDuringAllocToZero) {
3359 i::FLAG_stress_compaction = false;
3360 i::FLAG_allow_natives_syntax = true;
3361 CcTest::InitializeVM();
3362 v8::HandleScope scope(CcTest::isolate());
3363 static const int transitions_count = 10;
3364 CompileRun("function F() { }");
3365 AddTransitions(transitions_count);
3366 CompileRun("var root = new F;");
3367 Handle<JSObject> root = GetByName("root");
3368
3369 // Count number of live transitions before marking.
3370 int transitions_before = CountMapTransitions(root->map());
3371 CHECK_EQ(transitions_count, transitions_before);
3372
3373 // Get rid of o
3374 CompileRun("o = new F;"
3375 "root = new F");
3376 root = GetByName("root");
3377 AddPropertyTo(2, root, "funny");
3378 CcTest::heap()->CollectGarbage(NEW_SPACE);
3379
3380 // Count number of live transitions after marking. Note that one transition
3381 // is left, because 'o' still holds an instance of one transition target.
3382 int transitions_after = CountMapTransitions(
3383 Map::cast(root->map()->GetBackPointer()));
3384 CHECK_EQ(1, transitions_after);
3385}
3386
3387
3388TEST(TransitionArrayShrinksDuringAllocToOne) {
3389 i::FLAG_stress_compaction = false;
3390 i::FLAG_allow_natives_syntax = true;
3391 CcTest::InitializeVM();
3392 v8::HandleScope scope(CcTest::isolate());
3393 static const int transitions_count = 10;
3394 CompileRun("function F() {}");
3395 AddTransitions(transitions_count);
3396 CompileRun("var root = new F;");
3397 Handle<JSObject> root = GetByName("root");
3398
3399 // Count number of live transitions before marking.
3400 int transitions_before = CountMapTransitions(root->map());
3401 CHECK_EQ(transitions_count, transitions_before);
3402
3403 root = GetByName("root");
3404 AddPropertyTo(2, root, "funny");
3405 CcTest::heap()->CollectGarbage(NEW_SPACE);
3406
3407 // Count number of live transitions after marking. Note that one transition
3408 // is left, because 'o' still holds an instance of one transition target.
3409 int transitions_after = CountMapTransitions(
3410 Map::cast(root->map()->GetBackPointer()));
3411 CHECK_EQ(2, transitions_after);
3412}
3413
3414
3415TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3416 i::FLAG_stress_compaction = false;
3417 i::FLAG_allow_natives_syntax = true;
3418 CcTest::InitializeVM();
3419 v8::HandleScope scope(CcTest::isolate());
3420 static const int transitions_count = 10;
3421 CompileRun("function F() {}");
3422 AddTransitions(transitions_count);
3423 CompileRun("var root = new F;");
3424 Handle<JSObject> root = GetByName("root");
3425
3426 // Count number of live transitions before marking.
3427 int transitions_before = CountMapTransitions(root->map());
3428 CHECK_EQ(transitions_count, transitions_before);
3429
3430 root = GetByName("root");
3431 AddPropertyTo(0, root, "prop9");
3432 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3433
3434 // Count number of live transitions after marking. Note that one transition
3435 // is left, because 'o' still holds an instance of one transition target.
3436 int transitions_after = CountMapTransitions(
3437 Map::cast(root->map()->GetBackPointer()));
3438 CHECK_EQ(1, transitions_after);
3439}
3440
3441
3442TEST(TransitionArraySimpleToFull) {
3443 i::FLAG_stress_compaction = false;
3444 i::FLAG_allow_natives_syntax = true;
3445 CcTest::InitializeVM();
3446 v8::HandleScope scope(CcTest::isolate());
3447 static const int transitions_count = 1;
3448 CompileRun("function F() {}");
3449 AddTransitions(transitions_count);
3450 CompileRun("var root = new F;");
3451 Handle<JSObject> root = GetByName("root");
3452
3453 // Count number of live transitions before marking.
3454 int transitions_before = CountMapTransitions(root->map());
3455 CHECK_EQ(transitions_count, transitions_before);
3456
3457 CompileRun("o = new F;"
3458 "root = new F");
3459 root = GetByName("root");
3460 CHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3461 AddPropertyTo(2, root, "happy");
3462
3463 // Count number of live transitions after marking. Note that one transition
3464 // is left, because 'o' still holds an instance of one transition target.
3465 int transitions_after = CountMapTransitions(
3466 Map::cast(root->map()->GetBackPointer()));
3467 CHECK_EQ(1, transitions_after);
3468}
3469#endif // DEBUG
3470
3471
3472TEST(Regress2143a) {
3473 i::FLAG_incremental_marking = true;
3474 CcTest::InitializeVM();
3475 v8::HandleScope scope(CcTest::isolate());
3476
3477 // Prepare a map transition from the root object together with a yet
3478 // untransitioned root object.
3479 CompileRun("var root = new Object;"
3480 "root.foo = 0;"
3481 "root = new Object;");
3482
Ben Murdoch61f157c2016-09-16 13:49:30 +01003483 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003484
3485 // Compile a StoreIC that performs the prepared map transition. This
3486 // will restart incremental marking and should make sure the root is
3487 // marked grey again.
3488 CompileRun("function f(o) {"
3489 " o.foo = 0;"
3490 "}"
3491 "f(new Object);"
3492 "f(root);");
3493
3494 // This bug only triggers with aggressive IC clearing.
3495 CcTest::heap()->AgeInlineCaches();
3496
3497 // Explicitly request GC to perform final marking step and sweeping.
3498 CcTest::heap()->CollectAllGarbage();
3499
3500 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3501 CcTest::global()
3502 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3503 .ToLocalChecked()));
3504
3505 // The root object should be in a sane state.
3506 CHECK(root->IsJSObject());
3507 CHECK(root->map()->IsMap());
3508}
3509
3510
3511TEST(Regress2143b) {
3512 i::FLAG_incremental_marking = true;
3513 i::FLAG_allow_natives_syntax = true;
3514 CcTest::InitializeVM();
3515 v8::HandleScope scope(CcTest::isolate());
3516
3517 // Prepare a map transition from the root object together with a yet
3518 // untransitioned root object.
3519 CompileRun("var root = new Object;"
3520 "root.foo = 0;"
3521 "root = new Object;");
3522
Ben Murdoch61f157c2016-09-16 13:49:30 +01003523 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003524
3525 // Compile an optimized LStoreNamedField that performs the prepared
3526 // map transition. This will restart incremental marking and should
3527 // make sure the root is marked grey again.
3528 CompileRun("function f(o) {"
3529 " o.foo = 0;"
3530 "}"
3531 "f(new Object);"
3532 "f(new Object);"
3533 "%OptimizeFunctionOnNextCall(f);"
3534 "f(root);"
3535 "%DeoptimizeFunction(f);");
3536
3537 // This bug only triggers with aggressive IC clearing.
3538 CcTest::heap()->AgeInlineCaches();
3539
3540 // Explicitly request GC to perform final marking step and sweeping.
3541 CcTest::heap()->CollectAllGarbage();
3542
3543 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3544 CcTest::global()
3545 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3546 .ToLocalChecked()));
3547
3548 // The root object should be in a sane state.
3549 CHECK(root->IsJSObject());
3550 CHECK(root->map()->IsMap());
3551}
3552
3553
3554TEST(ReleaseOverReservedPages) {
3555 if (FLAG_never_compact) return;
3556 i::FLAG_trace_gc = true;
3557 // The optimizer can allocate stuff, messing up the test.
3558 i::FLAG_crankshaft = false;
3559 i::FLAG_always_opt = false;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003560 // Parallel compaction increases fragmentation, depending on how existing
3561 // memory is distributed. Since this is non-deterministic because of
3562 // concurrent sweeping, we disable it for this test.
3563 i::FLAG_parallel_compaction = false;
3564 // Concurrent sweeping adds non determinism, depending on when memory is
3565 // available for further reuse.
3566 i::FLAG_concurrent_sweeping = false;
Ben Murdochc5610432016-08-08 18:44:38 +01003567 // Fast evacuation of pages may result in a different page count in old space.
3568 i::FLAG_page_promotion = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003569 CcTest::InitializeVM();
3570 Isolate* isolate = CcTest::i_isolate();
3571 Factory* factory = isolate->factory();
3572 Heap* heap = isolate->heap();
3573 v8::HandleScope scope(CcTest::isolate());
3574 static const int number_of_test_pages = 20;
3575
3576 // Prepare many pages with low live-bytes count.
3577 PagedSpace* old_space = heap->old_space();
Ben Murdoch61f157c2016-09-16 13:49:30 +01003578 const int initial_page_count = old_space->CountTotalPages();
3579 const int overall_page_count = number_of_test_pages + initial_page_count;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003580 for (int i = 0; i < number_of_test_pages; i++) {
3581 AlwaysAllocateScope always_allocate(isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003582 heap::SimulateFullSpace(old_space);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003583 factory->NewFixedArray(1, TENURED);
3584 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01003585 CHECK_EQ(overall_page_count, old_space->CountTotalPages());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003586
3587 // Triggering one GC will cause a lot of garbage to be discovered but
3588 // even spread across all allocated pages.
3589 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3590 "triggered for preparation");
Ben Murdoch61f157c2016-09-16 13:49:30 +01003591 CHECK_GE(overall_page_count, old_space->CountTotalPages());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003592
3593 // Triggering subsequent GCs should cause at least half of the pages
3594 // to be released to the OS after at most two cycles.
3595 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3596 "triggered by test 1");
Ben Murdoch61f157c2016-09-16 13:49:30 +01003597 CHECK_GE(overall_page_count, old_space->CountTotalPages());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003598 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3599 "triggered by test 2");
Ben Murdoch61f157c2016-09-16 13:49:30 +01003600 CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003601
3602 // Triggering a last-resort GC should cause all pages to be released to the
3603 // OS so that other processes can seize the memory. If we get a failure here
3604 // where there are 2 pages left instead of 1, then we should increase the
3605 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3606 // first page should be small in order to reduce memory used when the VM
3607 // boots, but if the 20 small arrays don't fit on the first page then that's
3608 // an indication that it is too small.
3609 heap->CollectAllAvailableGarbage("triggered really hard");
Ben Murdoch61f157c2016-09-16 13:49:30 +01003610 CHECK_EQ(initial_page_count, old_space->CountTotalPages());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003611}
3612
3613static int forced_gc_counter = 0;
3614
3615void MockUseCounterCallback(v8::Isolate* isolate,
3616 v8::Isolate::UseCounterFeature feature) {
3617 isolate->GetCurrentContext();
3618 if (feature == v8::Isolate::kForcedGC) {
3619 forced_gc_counter++;
3620 }
3621}
3622
3623
3624TEST(CountForcedGC) {
3625 i::FLAG_expose_gc = true;
3626 CcTest::InitializeVM();
3627 Isolate* isolate = CcTest::i_isolate();
3628 v8::HandleScope scope(CcTest::isolate());
3629
3630 isolate->SetUseCounterCallback(MockUseCounterCallback);
3631
3632 forced_gc_counter = 0;
3633 const char* source = "gc();";
3634 CompileRun(source);
3635 CHECK_GT(forced_gc_counter, 0);
3636}
3637
3638
3639#ifdef OBJECT_PRINT
3640TEST(PrintSharedFunctionInfo) {
3641 CcTest::InitializeVM();
3642 v8::HandleScope scope(CcTest::isolate());
3643 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3644 const char* source = "f = function() { return 987654321; }\n"
3645 "g = function() { return 123456789; }\n";
3646 CompileRun(source);
3647 i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3648 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3649 CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3650
3651 OFStream os(stdout);
3652 g->shared()->Print(os);
3653 os << std::endl;
3654}
3655#endif // OBJECT_PRINT
3656
3657
3658TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3659 if (i::FLAG_always_opt) return;
3660 CcTest::InitializeVM();
3661 v8::HandleScope scope(CcTest::isolate());
3662 v8::Local<v8::Value> fun1, fun2;
3663 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3664 {
3665 CompileRun("function fun() {};");
3666 fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3667 }
3668
3669 {
3670 CompileRun("function fun() {};");
3671 fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3672 }
3673
3674 // Prepare function f that contains type feedback for the two closures.
3675 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3676 CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3677 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3678
3679 Handle<JSFunction> f = Handle<JSFunction>::cast(
3680 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3681 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3682
Ben Murdoch61f157c2016-09-16 13:49:30 +01003683 Handle<TypeFeedbackVector> feedback_vector(f->feedback_vector());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003684 FeedbackVectorHelper feedback_helper(feedback_vector);
3685
3686 int expected_slots = 2;
3687 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3688 int slot1 = 0;
3689 int slot2 = 1;
3690 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3691 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3692
Ben Murdoch61f157c2016-09-16 13:49:30 +01003693 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003694 CcTest::heap()->CollectAllGarbage();
3695
3696 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3697 ->cleared());
3698 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3699 ->cleared());
3700}
3701
3702
3703static Code* FindFirstIC(Code* code, Code::Kind kind) {
3704 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3705 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3706 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3707 RelocInfo* info = it.rinfo();
3708 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3709 if (target->is_inline_cache_stub() && target->kind() == kind) {
3710 return target;
3711 }
3712 }
3713 return NULL;
3714}
3715
3716
3717static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3718 InlineCacheState desired_state) {
3719 Handle<TypeFeedbackVector> vector =
Ben Murdoch61f157c2016-09-16 13:49:30 +01003720 Handle<TypeFeedbackVector>(f->feedback_vector());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003721 FeedbackVectorHelper helper(vector);
3722 FeedbackVectorSlot slot = helper.slot(slot_index);
3723 if (vector->GetKind(slot) == FeedbackVectorSlotKind::LOAD_IC) {
3724 LoadICNexus nexus(vector, slot);
3725 CHECK(nexus.StateFromFeedback() == desired_state);
3726 } else {
3727 CHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
3728 KeyedLoadICNexus nexus(vector, slot);
3729 CHECK(nexus.StateFromFeedback() == desired_state);
3730 }
3731}
3732
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003733TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3734 if (i::FLAG_always_opt) return;
3735 CcTest::InitializeVM();
3736 v8::HandleScope scope(CcTest::isolate());
3737 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3738 // Prepare function f that contains a monomorphic IC for object
3739 // originating from the same native context.
3740 CompileRun(
3741 "function fun() { this.x = 1; };"
3742 "function f(o) { return new o(); } f(fun); f(fun);");
3743 Handle<JSFunction> f = Handle<JSFunction>::cast(
3744 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3745 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3746
Ben Murdoch61f157c2016-09-16 13:49:30 +01003747 Handle<TypeFeedbackVector> vector(f->feedback_vector());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003748 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3749
Ben Murdoch61f157c2016-09-16 13:49:30 +01003750 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003751 CcTest::heap()->CollectAllGarbage();
3752
3753 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3754}
3755
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003756TEST(IncrementalMarkingPreservesMonomorphicIC) {
3757 if (i::FLAG_always_opt) return;
3758 CcTest::InitializeVM();
3759 v8::HandleScope scope(CcTest::isolate());
3760 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3761 // Prepare function f that contains a monomorphic IC for object
3762 // originating from the same native context.
3763 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3764 "function f(o) { return o.x; } f(obj); f(obj);");
3765 Handle<JSFunction> f = Handle<JSFunction>::cast(
3766 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3767 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3768
3769 CheckVectorIC(f, 0, MONOMORPHIC);
3770
Ben Murdoch61f157c2016-09-16 13:49:30 +01003771 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003772 CcTest::heap()->CollectAllGarbage();
3773
3774 CheckVectorIC(f, 0, MONOMORPHIC);
3775}
3776
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003777TEST(IncrementalMarkingPreservesPolymorphicIC) {
3778 if (i::FLAG_always_opt) return;
3779 CcTest::InitializeVM();
3780 v8::HandleScope scope(CcTest::isolate());
3781 v8::Local<v8::Value> obj1, obj2;
3782 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3783
3784 {
3785 LocalContext env;
3786 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3787 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3788 }
3789
3790 {
3791 LocalContext env;
3792 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3793 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3794 }
3795
3796 // Prepare function f that contains a polymorphic IC for objects
3797 // originating from two different native contexts.
3798 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3799 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3800 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3801 Handle<JSFunction> f = Handle<JSFunction>::cast(
3802 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3803 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3804
3805 CheckVectorIC(f, 0, POLYMORPHIC);
3806
3807 // Fire context dispose notification.
Ben Murdoch61f157c2016-09-16 13:49:30 +01003808 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003809 CcTest::heap()->CollectAllGarbage();
3810
3811 CheckVectorIC(f, 0, POLYMORPHIC);
3812}
3813
Ben Murdoch61f157c2016-09-16 13:49:30 +01003814TEST(ContextDisposeDoesntClearPolymorphicIC) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003815 if (i::FLAG_always_opt) return;
3816 CcTest::InitializeVM();
3817 v8::HandleScope scope(CcTest::isolate());
3818 v8::Local<v8::Value> obj1, obj2;
3819 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3820
3821 {
3822 LocalContext env;
3823 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3824 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3825 }
3826
3827 {
3828 LocalContext env;
3829 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3830 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3831 }
3832
3833 // Prepare function f that contains a polymorphic IC for objects
3834 // originating from two different native contexts.
3835 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3836 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3837 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3838 Handle<JSFunction> f = Handle<JSFunction>::cast(
3839 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3840 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3841
3842 CheckVectorIC(f, 0, POLYMORPHIC);
3843
3844 // Fire context dispose notification.
3845 CcTest::isolate()->ContextDisposedNotification();
Ben Murdoch61f157c2016-09-16 13:49:30 +01003846 heap::SimulateIncrementalMarking(CcTest::heap());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003847 CcTest::heap()->CollectAllGarbage();
3848
Ben Murdoch61f157c2016-09-16 13:49:30 +01003849 CheckVectorIC(f, 0, POLYMORPHIC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003850}
3851
3852
3853class SourceResource : public v8::String::ExternalOneByteStringResource {
3854 public:
3855 explicit SourceResource(const char* data)
3856 : data_(data), length_(strlen(data)) { }
3857
3858 virtual void Dispose() {
3859 i::DeleteArray(data_);
3860 data_ = NULL;
3861 }
3862
3863 const char* data() const { return data_; }
3864
3865 size_t length() const { return length_; }
3866
3867 bool IsDisposed() { return data_ == NULL; }
3868
3869 private:
3870 const char* data_;
3871 size_t length_;
3872};
3873
3874
3875void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3876 const char* accessor) {
3877 // Test that the data retained by the Error.stack accessor is released
3878 // after the first time the accessor is fired. We use external string
3879 // to check whether the data is being released since the external string
3880 // resource's callback is fired when the external string is GC'ed.
3881 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3882 v8::HandleScope scope(isolate);
3883 SourceResource* resource = new SourceResource(i::StrDup(source));
3884 {
3885 v8::HandleScope scope(isolate);
3886 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3887 v8::Local<v8::String> source_string =
3888 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3889 i_isolate->heap()->CollectAllAvailableGarbage();
3890 v8::Script::Compile(ctx, source_string)
3891 .ToLocalChecked()
3892 ->Run(ctx)
3893 .ToLocalChecked();
3894 CHECK(!resource->IsDisposed());
3895 }
3896 // i_isolate->heap()->CollectAllAvailableGarbage();
3897 CHECK(!resource->IsDisposed());
3898
3899 CompileRun(accessor);
3900 i_isolate->heap()->CollectAllAvailableGarbage();
3901
3902 // External source has been released.
3903 CHECK(resource->IsDisposed());
3904 delete resource;
3905}
3906
3907
3908UNINITIALIZED_TEST(ReleaseStackTraceData) {
3909 if (i::FLAG_always_opt) {
3910 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3911 // See: https://codereview.chromium.org/181833004/
3912 return;
3913 }
3914 FLAG_use_ic = false; // ICs retain objects.
3915 FLAG_concurrent_recompilation = false;
3916 v8::Isolate::CreateParams create_params;
3917 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3918 v8::Isolate* isolate = v8::Isolate::New(create_params);
3919 {
3920 v8::Isolate::Scope isolate_scope(isolate);
3921 v8::HandleScope handle_scope(isolate);
3922 v8::Context::New(isolate)->Enter();
3923 static const char* source1 = "var error = null; "
3924 /* Normal Error */ "try { "
3925 " throw new Error(); "
3926 "} catch (e) { "
3927 " error = e; "
3928 "} ";
3929 static const char* source2 = "var error = null; "
3930 /* Stack overflow */ "try { "
3931 " (function f() { f(); })(); "
3932 "} catch (e) { "
3933 " error = e; "
3934 "} ";
3935 static const char* source3 = "var error = null; "
3936 /* Normal Error */ "try { "
3937 /* as prototype */ " throw new Error(); "
3938 "} catch (e) { "
3939 " error = {}; "
3940 " error.__proto__ = e; "
3941 "} ";
3942 static const char* source4 = "var error = null; "
3943 /* Stack overflow */ "try { "
3944 /* as prototype */ " (function f() { f(); })(); "
3945 "} catch (e) { "
3946 " error = {}; "
3947 " error.__proto__ = e; "
3948 "} ";
3949 static const char* getter = "error.stack";
3950 static const char* setter = "error.stack = 0";
3951
3952 ReleaseStackTraceDataTest(isolate, source1, setter);
3953 ReleaseStackTraceDataTest(isolate, source2, setter);
3954 // We do not test source3 and source4 with setter, since the setter is
3955 // supposed to (untypically) write to the receiver, not the holder. This is
3956 // to emulate the behavior of a data property.
3957
3958 ReleaseStackTraceDataTest(isolate, source1, getter);
3959 ReleaseStackTraceDataTest(isolate, source2, getter);
3960 ReleaseStackTraceDataTest(isolate, source3, getter);
3961 ReleaseStackTraceDataTest(isolate, source4, getter);
3962 }
3963 isolate->Dispose();
3964}
3965
3966
3967TEST(Regress159140) {
3968 i::FLAG_allow_natives_syntax = true;
3969 CcTest::InitializeVM();
3970 Isolate* isolate = CcTest::i_isolate();
3971 LocalContext env;
3972 Heap* heap = isolate->heap();
3973 HandleScope scope(isolate);
3974
3975 // Perform one initial GC to enable code flushing.
3976 heap->CollectAllGarbage();
3977
3978 // Prepare several closures that are all eligible for code flushing
3979 // because all reachable ones are not optimized. Make sure that the
3980 // optimized code object is directly reachable through a handle so
3981 // that it is marked black during incremental marking.
3982 Handle<Code> code;
3983 {
3984 HandleScope inner_scope(isolate);
3985 CompileRun("function h(x) {}"
3986 "function mkClosure() {"
3987 " return function(x) { return x + 1; };"
3988 "}"
3989 "var f = mkClosure();"
3990 "var g = mkClosure();"
3991 "f(1); f(2);"
3992 "g(1); g(2);"
3993 "h(1); h(2);"
3994 "%OptimizeFunctionOnNextCall(f); f(3);"
3995 "%OptimizeFunctionOnNextCall(h); h(3);");
3996
3997 Handle<JSFunction> f = Handle<JSFunction>::cast(
3998 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3999 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4000 CHECK(f->is_compiled());
4001 CompileRun("f = null;");
4002
4003 Handle<JSFunction> g = Handle<JSFunction>::cast(
4004 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4005 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4006 CHECK(g->is_compiled());
4007 const int kAgingThreshold = 6;
4008 for (int i = 0; i < kAgingThreshold; i++) {
4009 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4010 }
4011
4012 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4013 }
4014
4015 // Simulate incremental marking so that the functions are enqueued as
4016 // code flushing candidates. Then optimize one function. Finally
4017 // finish the GC to complete code flushing.
Ben Murdoch61f157c2016-09-16 13:49:30 +01004018 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004019 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4020 heap->CollectAllGarbage();
4021
4022 // Unoptimized code is missing and the deoptimizer will go ballistic.
4023 CompileRun("g('bozo');");
4024}
4025
4026
4027TEST(Regress165495) {
4028 i::FLAG_allow_natives_syntax = true;
4029 CcTest::InitializeVM();
4030 Isolate* isolate = CcTest::i_isolate();
4031 Heap* heap = isolate->heap();
4032 HandleScope scope(isolate);
4033
4034 // Perform one initial GC to enable code flushing.
4035 heap->CollectAllGarbage();
4036
4037 // Prepare an optimized closure that the optimized code map will get
4038 // populated. Then age the unoptimized code to trigger code flushing
4039 // but make sure the optimized code is unreachable.
4040 {
4041 HandleScope inner_scope(isolate);
4042 LocalContext env;
4043 CompileRun("function mkClosure() {"
4044 " return function(x) { return x + 1; };"
4045 "}"
4046 "var f = mkClosure();"
4047 "f(1); f(2);"
4048 "%OptimizeFunctionOnNextCall(f); f(3);");
4049
4050 Handle<JSFunction> f = Handle<JSFunction>::cast(
4051 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4052 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4053 CHECK(f->is_compiled());
4054 const int kAgingThreshold = 6;
4055 for (int i = 0; i < kAgingThreshold; i++) {
4056 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4057 }
4058
4059 CompileRun("f = null;");
4060 }
4061
4062 // Simulate incremental marking so that unoptimized code is flushed
4063 // even though it still is cached in the optimized code map.
Ben Murdoch61f157c2016-09-16 13:49:30 +01004064 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004065 heap->CollectAllGarbage();
4066
4067 // Make a new closure that will get code installed from the code map.
4068 // Unoptimized code is missing and the deoptimizer will go ballistic.
4069 CompileRun("var g = mkClosure(); g('bozo');");
4070}
4071
4072
4073TEST(Regress169209) {
4074 i::FLAG_stress_compaction = false;
4075 i::FLAG_allow_natives_syntax = true;
4076
4077 CcTest::InitializeVM();
4078 Isolate* isolate = CcTest::i_isolate();
4079 Heap* heap = isolate->heap();
4080 HandleScope scope(isolate);
4081
4082 // Perform one initial GC to enable code flushing.
4083 heap->CollectAllGarbage();
4084
4085 // Prepare a shared function info eligible for code flushing for which
4086 // the unoptimized code will be replaced during optimization.
4087 Handle<SharedFunctionInfo> shared1;
4088 {
4089 HandleScope inner_scope(isolate);
4090 LocalContext env;
4091 CompileRun("function f() { return 'foobar'; }"
4092 "function g(x) { if (x) f(); }"
4093 "f();"
4094 "g(false);"
4095 "g(false);");
4096
4097 Handle<JSFunction> f = Handle<JSFunction>::cast(
4098 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4099 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4100 CHECK(f->is_compiled());
4101 const int kAgingThreshold = 6;
4102 for (int i = 0; i < kAgingThreshold; i++) {
4103 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4104 }
4105
4106 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4107 }
4108
4109 // Prepare a shared function info eligible for code flushing that will
4110 // represent the dangling tail of the candidate list.
4111 Handle<SharedFunctionInfo> shared2;
4112 {
4113 HandleScope inner_scope(isolate);
4114 LocalContext env;
4115 CompileRun("function flushMe() { return 0; }"
4116 "flushMe(1);");
4117
4118 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4119 *v8::Local<v8::Function>::Cast(CcTest::global()
4120 ->Get(env.local(), v8_str("flushMe"))
4121 .ToLocalChecked())));
4122 CHECK(f->is_compiled());
4123 const int kAgingThreshold = 6;
4124 for (int i = 0; i < kAgingThreshold; i++) {
4125 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4126 }
4127
4128 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4129 }
4130
4131 // Simulate incremental marking and collect code flushing candidates.
Ben Murdoch61f157c2016-09-16 13:49:30 +01004132 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004133 CHECK(shared1->code()->gc_metadata() != NULL);
4134
4135 // Optimize function and make sure the unoptimized code is replaced.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004136 CompileRun("%OptimizeFunctionOnNextCall(g);"
4137 "g(false);");
4138
4139 // Finish garbage collection cycle.
4140 heap->CollectAllGarbage();
4141 CHECK(shared1->code()->gc_metadata() == NULL);
4142}
4143
4144
4145TEST(Regress169928) {
4146 i::FLAG_allow_natives_syntax = true;
4147 i::FLAG_crankshaft = false;
4148 CcTest::InitializeVM();
4149 Isolate* isolate = CcTest::i_isolate();
4150 LocalContext env;
4151 Factory* factory = isolate->factory();
4152 v8::HandleScope scope(CcTest::isolate());
4153
4154 // Some flags turn Scavenge collections into Mark-sweep collections
4155 // and hence are incompatible with this test case.
4156 if (FLAG_gc_global || FLAG_stress_compaction) return;
4157
4158 // Prepare the environment
4159 CompileRun("function fastliteralcase(literal, value) {"
4160 " literal[0] = value;"
4161 " return literal;"
4162 "}"
4163 "function get_standard_literal() {"
4164 " var literal = [1, 2, 3];"
4165 " return literal;"
4166 "}"
4167 "obj = fastliteralcase(get_standard_literal(), 1);"
4168 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4169 "obj = fastliteralcase(get_standard_literal(), 2);");
4170
4171 // prepare the heap
4172 v8::Local<v8::String> mote_code_string =
4173 v8_str("fastliteralcase(mote, 2.5);");
4174
4175 v8::Local<v8::String> array_name = v8_str("mote");
4176 CHECK(CcTest::global()
4177 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4178 .FromJust());
4179
4180 // First make sure we flip spaces
4181 CcTest::heap()->CollectGarbage(NEW_SPACE);
4182
4183 // Allocate the object.
4184 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4185 array_data->set(0, Smi::FromInt(1));
4186 array_data->set(1, Smi::FromInt(2));
4187
Ben Murdoch61f157c2016-09-16 13:49:30 +01004188 heap::AllocateAllButNBytes(
4189 CcTest::heap()->new_space(),
4190 JSArray::kSize + AllocationMemento::kSize + kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004191
4192 Handle<JSArray> array =
4193 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4194
4195 CHECK_EQ(Smi::FromInt(2), array->length());
4196 CHECK(array->HasFastSmiOrObjectElements());
4197
4198 // We need filler the size of AllocationMemento object, plus an extra
4199 // fill pointer value.
4200 HeapObject* obj = NULL;
4201 AllocationResult allocation =
4202 CcTest::heap()->new_space()->AllocateRawUnaligned(
4203 AllocationMemento::kSize + kPointerSize);
4204 CHECK(allocation.To(&obj));
4205 Address addr_obj = obj->address();
Ben Murdochda12d292016-06-02 14:46:10 +01004206 CcTest::heap()->CreateFillerObjectAt(addr_obj,
4207 AllocationMemento::kSize + kPointerSize,
4208 ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004209
4210 // Give the array a name, making sure not to allocate strings.
4211 v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
4212 CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
4213
4214 // This should crash with a protection violation if we are running a build
4215 // with the bug.
4216 AlwaysAllocateScope aa_scope(isolate);
4217 v8::Script::Compile(env.local(), mote_code_string)
4218 .ToLocalChecked()
4219 ->Run(env.local())
4220 .ToLocalChecked();
4221}
4222
4223
4224#ifdef DEBUG
4225TEST(Regress513507) {
4226 i::FLAG_flush_optimized_code_cache = false;
4227 i::FLAG_allow_natives_syntax = true;
4228 i::FLAG_gc_global = true;
4229 CcTest::InitializeVM();
4230 Isolate* isolate = CcTest::i_isolate();
4231 LocalContext env;
4232 Heap* heap = isolate->heap();
4233 HandleScope scope(isolate);
4234
4235 // Prepare function whose optimized code map we can use.
4236 Handle<SharedFunctionInfo> shared;
4237 {
4238 HandleScope inner_scope(isolate);
4239 CompileRun("function f() { return 1 }"
4240 "f(); %OptimizeFunctionOnNextCall(f); f();");
4241
4242 Handle<JSFunction> f = Handle<JSFunction>::cast(
4243 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4244 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4245 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4246 CompileRun("f = null");
4247 }
4248
4249 // Prepare optimized code that we can use.
4250 Handle<Code> code;
4251 {
4252 HandleScope inner_scope(isolate);
4253 CompileRun("function g() { return 2 }"
4254 "g(); %OptimizeFunctionOnNextCall(g); g();");
4255
4256 Handle<JSFunction> g = Handle<JSFunction>::cast(
4257 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4258 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4259 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4260 if (!code->is_optimized_code()) return;
4261 }
4262
Ben Murdoch61f157c2016-09-16 13:49:30 +01004263 Handle<TypeFeedbackVector> vector =
4264 TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004265 Handle<LiteralsArray> lit =
4266 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4267 Handle<Context> context(isolate->context());
4268
4269 // Add the new code several times to the optimized code map and also set an
4270 // allocation timeout so that expanding the code map will trigger a GC.
4271 heap->set_allocation_timeout(5);
4272 FLAG_gc_interval = 1000;
4273 for (int i = 0; i < 10; ++i) {
4274 BailoutId id = BailoutId(i);
4275 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4276 }
4277}
4278#endif // DEBUG
4279
4280
4281TEST(Regress514122) {
4282 i::FLAG_flush_optimized_code_cache = false;
4283 i::FLAG_allow_natives_syntax = true;
4284 CcTest::InitializeVM();
4285 Isolate* isolate = CcTest::i_isolate();
4286 LocalContext env;
4287 Heap* heap = isolate->heap();
4288 HandleScope scope(isolate);
4289
4290 // Perfrom one initial GC to enable code flushing.
4291 CcTest::heap()->CollectAllGarbage();
4292
4293 // Prepare function whose optimized code map we can use.
4294 Handle<SharedFunctionInfo> shared;
4295 {
4296 HandleScope inner_scope(isolate);
4297 CompileRun("function f() { return 1 }"
4298 "f(); %OptimizeFunctionOnNextCall(f); f();");
4299
4300 Handle<JSFunction> f = Handle<JSFunction>::cast(
4301 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4302 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4303 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4304 CompileRun("f = null");
4305 }
4306
4307 // Prepare optimized code that we can use.
4308 Handle<Code> code;
4309 {
4310 HandleScope inner_scope(isolate);
4311 CompileRun("function g() { return 2 }"
4312 "g(); %OptimizeFunctionOnNextCall(g); g();");
4313
4314 Handle<JSFunction> g = Handle<JSFunction>::cast(
4315 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4316 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4317 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4318 if (!code->is_optimized_code()) return;
4319 }
4320
Ben Murdoch61f157c2016-09-16 13:49:30 +01004321 Handle<TypeFeedbackVector> vector =
4322 TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004323 Handle<LiteralsArray> lit =
4324 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4325 Handle<Context> context(isolate->context());
4326
4327 // Add the code several times to the optimized code map.
4328 for (int i = 0; i < 3; ++i) {
4329 HandleScope inner_scope(isolate);
4330 BailoutId id = BailoutId(i);
4331 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4332 }
4333 shared->optimized_code_map()->Print();
4334
4335 // Add the code with a literals array to be evacuated.
4336 Page* evac_page;
4337 {
4338 HandleScope inner_scope(isolate);
4339 AlwaysAllocateScope always_allocate(isolate);
4340 // Make sure literal is placed on an old-space evacuation candidate.
Ben Murdoch61f157c2016-09-16 13:49:30 +01004341 heap::SimulateFullSpace(heap->old_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004342
4343 // Make sure there the number of literals is > 0.
4344 Handle<LiteralsArray> lit =
4345 LiteralsArray::New(isolate, vector, 23, TENURED);
4346
4347 evac_page = Page::FromAddress(lit->address());
4348 BailoutId id = BailoutId(100);
4349 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4350 }
4351
4352 // Heap is ready, force {lit_page} to become an evacuation candidate and
4353 // simulate incremental marking to enqueue optimized code map.
4354 FLAG_manual_evacuation_candidates_selection = true;
4355 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004356 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004357
4358 // No matter whether reachable or not, {boomer} is doomed.
4359 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4360
4361 // Add the code several times to the optimized code map. This will leave old
4362 // copies of the optimized code map unreachable but still marked.
4363 for (int i = 3; i < 6; ++i) {
4364 HandleScope inner_scope(isolate);
4365 BailoutId id = BailoutId(i);
4366 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4367 }
4368
4369 // Trigger a GC to flush out the bug.
4370 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4371 boomer->Print();
4372}
4373
4374
4375TEST(OptimizedCodeMapReuseEntries) {
4376 i::FLAG_flush_optimized_code_cache = false;
4377 i::FLAG_allow_natives_syntax = true;
4378 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
4379 // run this test.
4380 if (i::FLAG_turbo) return;
4381 CcTest::InitializeVM();
4382 v8::Isolate* v8_isolate = CcTest::isolate();
4383 Isolate* isolate = CcTest::i_isolate();
4384 Heap* heap = isolate->heap();
4385 HandleScope scope(isolate);
4386
4387 // Create 3 contexts, allow the 2nd one to be disposed, and verify that
4388 // a 4th context will re-use the weak slots in the optimized code map
4389 // to hold data, rather than expanding the map.
4390 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
4391 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
4392 v8::ScriptCompiler::Source script_source(
4393 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
4394 .ToLocalChecked());
4395 v8::Local<v8::UnboundScript> indep =
4396 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
4397 .ToLocalChecked();
4398 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
4399 // Perfrom one initial GC to enable code flushing.
4400 heap->CollectAllGarbage();
4401
4402 c1->Enter();
4403 indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
4404 CompileRun(toplevel);
4405
4406 Handle<SharedFunctionInfo> shared;
4407 Handle<JSFunction> foo = Handle<JSFunction>::cast(
4408 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4409 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
4410 CHECK(foo->shared()->is_compiled());
4411 shared = handle(foo->shared());
4412 c1->Exit();
4413
4414 {
4415 HandleScope scope(isolate);
4416 v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
4417 c2->Enter();
4418 indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
4419 CompileRun(toplevel);
4420 c2->Exit();
4421 }
4422
4423 {
4424 HandleScope scope(isolate);
4425 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
4426 c3->Enter();
4427 indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
4428 CompileRun(toplevel);
4429 c3->Exit();
4430
4431 // Now, collect garbage. Context c2 should have no roots to it, and it's
4432 // entry in the optimized code map should be free for a new context.
4433 for (int i = 0; i < 4; i++) {
4434 heap->CollectAllGarbage();
4435 }
4436
4437 Handle<FixedArray> optimized_code_map =
4438 handle(shared->optimized_code_map());
4439 // There should be 3 entries in the map.
4440 CHECK_EQ(
4441 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
4442 SharedFunctionInfo::kEntryLength));
4443 // But one of them (formerly for c2) should be cleared.
4444 int cleared_count = 0;
4445 for (int i = SharedFunctionInfo::kEntriesStart;
4446 i < optimized_code_map->length();
4447 i += SharedFunctionInfo::kEntryLength) {
4448 cleared_count +=
4449 WeakCell::cast(
4450 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4451 ->cleared()
4452 ? 1
4453 : 0;
4454 }
4455 CHECK_EQ(1, cleared_count);
4456
4457 // Verify that a new context uses the cleared entry rather than creating a
4458 // new
4459 // optimized code map array.
4460 v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
4461 c4->Enter();
4462 indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
4463 CompileRun(toplevel);
4464 c4->Exit();
4465 CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
4466
4467 // Now each entry is in use.
4468 cleared_count = 0;
4469 for (int i = SharedFunctionInfo::kEntriesStart;
4470 i < optimized_code_map->length();
4471 i += SharedFunctionInfo::kEntryLength) {
4472 cleared_count +=
4473 WeakCell::cast(
4474 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4475 ->cleared()
4476 ? 1
4477 : 0;
4478 }
4479 CHECK_EQ(0, cleared_count);
4480 }
4481}
4482
4483
4484TEST(Regress513496) {
4485 i::FLAG_flush_optimized_code_cache = false;
4486 i::FLAG_allow_natives_syntax = true;
4487 CcTest::InitializeVM();
4488 Isolate* isolate = CcTest::i_isolate();
4489 Heap* heap = isolate->heap();
4490 HandleScope scope(isolate);
4491
4492 // Perfrom one initial GC to enable code flushing.
4493 CcTest::heap()->CollectAllGarbage();
4494
4495 // Prepare an optimized closure with containing an inlined function. Then age
4496 // the inlined unoptimized code to trigger code flushing but make sure the
4497 // outer optimized code is kept in the optimized code map.
4498 Handle<SharedFunctionInfo> shared;
4499 {
4500 LocalContext context;
4501 HandleScope inner_scope(isolate);
4502 CompileRun(
4503 "function g(x) { return x + 1 }"
4504 "function mkClosure() {"
4505 " return function(x) { return g(x); };"
4506 "}"
4507 "var f = mkClosure();"
4508 "f(1); f(2);"
4509 "%OptimizeFunctionOnNextCall(f); f(3);");
4510
4511 Handle<JSFunction> g = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4512 *v8::Local<v8::Function>::Cast(CcTest::global()
4513 ->Get(context.local(), v8_str("g"))
4514 .ToLocalChecked())));
4515 CHECK(g->shared()->is_compiled());
4516 const int kAgingThreshold = 6;
4517 for (int i = 0; i < kAgingThreshold; i++) {
4518 g->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4519 }
4520
4521 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4522 *v8::Local<v8::Function>::Cast(CcTest::global()
4523 ->Get(context.local(), v8_str("f"))
4524 .ToLocalChecked())));
4525 CHECK(f->is_compiled());
4526 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4527 CompileRun("f = null");
4528 }
4529
4530 // Lookup the optimized code and keep it alive.
4531 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4532 isolate->context()->native_context(), BailoutId::None());
4533 Handle<Code> optimized_code(result.code, isolate);
4534
4535 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4536 // though the optimized code for 'f' is reachable via the optimized code map.
4537 heap->CollectAllGarbage();
4538
4539 // Make a new closure that will get code installed from the code map.
4540 // Unoptimized code is missing and the deoptimizer will go ballistic.
4541 CompileRun("var h = mkClosure(); h('bozo');");
4542}
4543
4544
4545TEST(LargeObjectSlotRecording) {
4546 FLAG_manual_evacuation_candidates_selection = true;
4547 CcTest::InitializeVM();
4548 Isolate* isolate = CcTest::i_isolate();
4549 Heap* heap = isolate->heap();
4550 HandleScope scope(isolate);
4551
4552 // Create an object on an evacuation candidate.
Ben Murdoch61f157c2016-09-16 13:49:30 +01004553 heap::SimulateFullSpace(heap->old_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004554 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4555 Page* evac_page = Page::FromAddress(lit->address());
4556 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4557 FixedArray* old_location = *lit;
4558
4559 // Allocate a large object.
4560 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB);
4561 CHECK(size > Page::kMaxRegularHeapObjectSize);
4562 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
4563 CHECK(heap->lo_space()->Contains(*lo));
4564
4565 // Start incremental marking to active write barrier.
Ben Murdoch61f157c2016-09-16 13:49:30 +01004566 heap::SimulateIncrementalMarking(heap, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004567 heap->incremental_marking()->AdvanceIncrementalMarking(
Ben Murdochda12d292016-06-02 14:46:10 +01004568 10000000, IncrementalMarking::IdleStepActions());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004569
4570 // Create references from the large object to the object on the evacuation
4571 // candidate.
4572 const int kStep = size / 10;
4573 for (int i = 0; i < size; i += kStep) {
4574 lo->set(i, *lit);
4575 CHECK(lo->get(i) == old_location);
4576 }
4577
4578 // Move the evaucation candidate object.
4579 CcTest::heap()->CollectAllGarbage();
4580
4581 // Verify that the pointers in the large object got updated.
4582 for (int i = 0; i < size; i += kStep) {
4583 CHECK_EQ(lo->get(i), *lit);
4584 CHECK(lo->get(i) != old_location);
4585 }
4586}
4587
4588
4589class DummyVisitor : public ObjectVisitor {
4590 public:
4591 void VisitPointers(Object** start, Object** end) override {}
4592};
4593
4594
4595TEST(DeferredHandles) {
4596 CcTest::InitializeVM();
4597 Isolate* isolate = CcTest::i_isolate();
4598 Heap* heap = isolate->heap();
4599 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4600 HandleScopeData* data = isolate->handle_scope_data();
4601 Handle<Object> init(heap->empty_string(), isolate);
4602 while (data->next < data->limit) {
4603 Handle<Object> obj(heap->empty_string(), isolate);
4604 }
4605 // An entire block of handles has been filled.
4606 // Next handle would require a new block.
4607 CHECK(data->next == data->limit);
4608
4609 DeferredHandleScope deferred(isolate);
4610 DummyVisitor visitor;
4611 isolate->handle_scope_implementer()->Iterate(&visitor);
4612 delete deferred.Detach();
4613}
4614
4615
4616TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4617 CcTest::InitializeVM();
4618 v8::HandleScope scope(CcTest::isolate());
4619 CompileRun("function f(n) {"
4620 " var a = new Array(n);"
4621 " for (var i = 0; i < n; i += 100) a[i] = i;"
4622 "};"
4623 "f(10 * 1024 * 1024);");
4624 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4625 if (marking->IsStopped()) {
4626 CcTest::heap()->StartIncrementalMarking();
4627 }
4628 // This big step should be sufficient to mark the whole array.
4629 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4630 CHECK(marking->IsComplete() ||
4631 marking->IsReadyToOverApproximateWeakClosure());
4632}
4633
4634
4635TEST(DisableInlineAllocation) {
4636 i::FLAG_allow_natives_syntax = true;
4637 CcTest::InitializeVM();
4638 v8::HandleScope scope(CcTest::isolate());
4639 CompileRun("function test() {"
4640 " var x = [];"
4641 " for (var i = 0; i < 10; i++) {"
4642 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4643 " }"
4644 "}"
4645 "function run() {"
4646 " %OptimizeFunctionOnNextCall(test);"
4647 " test();"
4648 " %DeoptimizeFunction(test);"
4649 "}");
4650
4651 // Warm-up with inline allocation enabled.
4652 CompileRun("test(); test(); run();");
4653
4654 // Run test with inline allocation disabled.
4655 CcTest::heap()->DisableInlineAllocation();
4656 CompileRun("run()");
4657
4658 // Run test with inline allocation re-enabled.
4659 CcTest::heap()->EnableInlineAllocation();
4660 CompileRun("run()");
4661}
4662
4663
4664static int AllocationSitesCount(Heap* heap) {
4665 int count = 0;
4666 for (Object* site = heap->allocation_sites_list();
Ben Murdoch61f157c2016-09-16 13:49:30 +01004667 !(site->IsUndefined(heap->isolate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004668 site = AllocationSite::cast(site)->weak_next()) {
4669 count++;
4670 }
4671 return count;
4672}
4673
4674
4675TEST(EnsureAllocationSiteDependentCodesProcessed) {
4676 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4677 i::FLAG_allow_natives_syntax = true;
4678 CcTest::InitializeVM();
4679 Isolate* isolate = CcTest::i_isolate();
4680 v8::internal::Heap* heap = CcTest::heap();
4681 GlobalHandles* global_handles = isolate->global_handles();
4682
4683 if (!isolate->use_crankshaft()) return;
4684
4685 // The allocation site at the head of the list is ours.
4686 Handle<AllocationSite> site;
4687 {
4688 LocalContext context;
4689 v8::HandleScope scope(context->GetIsolate());
4690
4691 int count = AllocationSitesCount(heap);
4692 CompileRun("var bar = function() { return (new Array()); };"
4693 "var a = bar();"
4694 "bar();"
4695 "bar();");
4696
4697 // One allocation site should have been created.
4698 int new_count = AllocationSitesCount(heap);
4699 CHECK_EQ(new_count, (count + 1));
4700 site = Handle<AllocationSite>::cast(
4701 global_handles->Create(
4702 AllocationSite::cast(heap->allocation_sites_list())));
4703
4704 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4705
4706 CHECK_EQ(DependentCode::kAllocationSiteTransitionChangedGroup,
4707 site->dependent_code()->group());
4708 CHECK_EQ(1, site->dependent_code()->count());
4709 CHECK(site->dependent_code()->object_at(0)->IsWeakCell());
4710 Code* function_bar = Code::cast(
4711 WeakCell::cast(site->dependent_code()->object_at(0))->value());
4712 Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
4713 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4714 CcTest::global()
4715 ->Get(context.local(), v8_str("bar"))
4716 .ToLocalChecked())));
4717 CHECK_EQ(bar_handle->code(), function_bar);
4718 }
4719
4720 // Now make sure that a gc should get rid of the function, even though we
4721 // still have the allocation site alive.
4722 for (int i = 0; i < 4; i++) {
4723 heap->CollectAllGarbage();
4724 }
4725
4726 // The site still exists because of our global handle, but the code is no
4727 // longer referred to by dependent_code().
4728 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4729 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4730}
4731
4732
4733TEST(CellsInOptimizedCodeAreWeak) {
4734 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4735 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4736 i::FLAG_allow_natives_syntax = true;
4737 CcTest::InitializeVM();
4738 Isolate* isolate = CcTest::i_isolate();
4739 v8::internal::Heap* heap = CcTest::heap();
4740
4741 if (!isolate->use_crankshaft()) return;
4742 HandleScope outer_scope(heap->isolate());
4743 Handle<Code> code;
4744 {
4745 LocalContext context;
4746 HandleScope scope(heap->isolate());
4747
4748 CompileRun(
4749 "bar = (function() {"
4750 " function bar() {"
4751 " return foo(1);"
4752 " };"
4753 " var foo = function(x) { with (x) { return 1 + x; } };"
4754 " %NeverOptimizeFunction(foo);"
4755 " bar(foo);"
4756 " bar(foo);"
4757 " bar(foo);"
4758 " %OptimizeFunctionOnNextCall(bar);"
4759 " bar(foo);"
4760 " return bar;})();");
4761
4762 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4763 *v8::Local<v8::Function>::Cast(CcTest::global()
4764 ->Get(context.local(), v8_str("bar"))
4765 .ToLocalChecked())));
4766 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4767 }
4768
4769 // Now make sure that a gc should get rid of the function
4770 for (int i = 0; i < 4; i++) {
4771 heap->CollectAllGarbage();
4772 }
4773
4774 CHECK(code->marked_for_deoptimization());
4775}
4776
4777
4778TEST(ObjectsInOptimizedCodeAreWeak) {
4779 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4780 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4781 i::FLAG_allow_natives_syntax = true;
4782 CcTest::InitializeVM();
4783 Isolate* isolate = CcTest::i_isolate();
4784 v8::internal::Heap* heap = CcTest::heap();
4785
4786 if (!isolate->use_crankshaft()) return;
4787 HandleScope outer_scope(heap->isolate());
4788 Handle<Code> code;
4789 {
4790 LocalContext context;
4791 HandleScope scope(heap->isolate());
4792
4793 CompileRun(
4794 "function bar() {"
4795 " return foo(1);"
4796 "};"
4797 "function foo(x) { with (x) { return 1 + x; } };"
4798 "%NeverOptimizeFunction(foo);"
4799 "bar();"
4800 "bar();"
4801 "bar();"
4802 "%OptimizeFunctionOnNextCall(bar);"
4803 "bar();");
4804
4805 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4806 *v8::Local<v8::Function>::Cast(CcTest::global()
4807 ->Get(context.local(), v8_str("bar"))
4808 .ToLocalChecked())));
4809 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4810 }
4811
4812 // Now make sure that a gc should get rid of the function
4813 for (int i = 0; i < 4; i++) {
4814 heap->CollectAllGarbage();
4815 }
4816
4817 CHECK(code->marked_for_deoptimization());
4818}
4819
4820
4821TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4822 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4823 if (!i::FLAG_incremental_marking) return;
4824 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4825 i::FLAG_allow_natives_syntax = true;
4826 i::FLAG_compilation_cache = false;
4827 i::FLAG_retain_maps_for_n_gc = 0;
4828 CcTest::InitializeVM();
4829 Isolate* isolate = CcTest::i_isolate();
4830
4831 // Do not run for no-snap builds.
4832 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4833
4834 v8::internal::Heap* heap = CcTest::heap();
4835
4836 // Get a clean slate regarding optimized functions on the heap.
4837 i::Deoptimizer::DeoptimizeAll(isolate);
4838 heap->CollectAllGarbage();
4839
4840 if (!isolate->use_crankshaft()) return;
4841 HandleScope outer_scope(heap->isolate());
4842 for (int i = 0; i < 3; i++) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01004843 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004844 {
4845 LocalContext context;
4846 HandleScope scope(heap->isolate());
4847 EmbeddedVector<char, 256> source;
4848 SNPrintF(source,
4849 "function bar%d() {"
4850 " return foo%d(1);"
4851 "};"
4852 "function foo%d(x) { with (x) { return 1 + x; } };"
4853 "bar%d();"
4854 "bar%d();"
4855 "bar%d();"
4856 "%%OptimizeFunctionOnNextCall(bar%d);"
4857 "bar%d();",
4858 i, i, i, i, i, i, i, i);
4859 CompileRun(source.start());
4860 }
Ben Murdochda12d292016-06-02 14:46:10 +01004861 // We have to abort incremental marking here to abandon black pages.
4862 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004863 }
4864 int elements = 0;
4865 if (heap->weak_object_to_code_table()->IsHashTable()) {
4866 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4867 elements = t->NumberOfElements();
4868 }
4869 CHECK_EQ(0, elements);
4870}
4871
4872
4873static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4874 const char* name) {
4875 EmbeddedVector<char, 256> source;
4876 SNPrintF(source,
4877 "function %s() { return 0; }"
4878 "%s(); %s();"
4879 "%%OptimizeFunctionOnNextCall(%s);"
4880 "%s();", name, name, name, name, name);
4881 CompileRun(source.start());
4882 i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4883 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4884 CcTest::global()
4885 ->Get(isolate->GetCurrentContext(), v8_str(name))
4886 .ToLocalChecked())));
4887 return fun;
4888}
4889
4890
4891static int GetCodeChainLength(Code* code) {
4892 int result = 0;
4893 while (code->next_code_link()->IsCode()) {
4894 result++;
4895 code = Code::cast(code->next_code_link());
4896 }
4897 return result;
4898}
4899
4900
4901TEST(NextCodeLinkIsWeak) {
4902 i::FLAG_always_opt = false;
4903 i::FLAG_allow_natives_syntax = true;
4904 CcTest::InitializeVM();
4905 Isolate* isolate = CcTest::i_isolate();
4906 v8::internal::Heap* heap = CcTest::heap();
4907
4908 if (!isolate->use_crankshaft()) return;
4909 HandleScope outer_scope(heap->isolate());
4910 Handle<Code> code;
4911 heap->CollectAllAvailableGarbage();
4912 int code_chain_length_before, code_chain_length_after;
4913 {
4914 HandleScope scope(heap->isolate());
4915 Handle<JSFunction> mortal =
4916 OptimizeDummyFunction(CcTest::isolate(), "mortal");
4917 Handle<JSFunction> immortal =
4918 OptimizeDummyFunction(CcTest::isolate(), "immortal");
4919 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4920 code_chain_length_before = GetCodeChainLength(immortal->code());
4921 // Keep the immortal code and let the mortal code die.
4922 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4923 CompileRun("mortal = null; immortal = null;");
4924 }
4925 heap->CollectAllAvailableGarbage();
4926 // Now mortal code should be dead.
4927 code_chain_length_after = GetCodeChainLength(*code);
4928 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4929}
4930
4931
4932static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4933 i::byte buffer[i::Assembler::kMinimalBufferSize];
4934 MacroAssembler masm(isolate, buffer, sizeof(buffer),
4935 v8::internal::CodeObjectRequired::kYes);
4936 CodeDesc desc;
4937 masm.Push(isolate->factory()->undefined_value());
4938 masm.Drop(1);
4939 masm.GetCode(&desc);
4940 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4941 Handle<Code> code = isolate->factory()->NewCode(
4942 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4943 CHECK(code->IsCode());
4944 return code;
4945}
4946
4947
4948TEST(NextCodeLinkIsWeak2) {
4949 i::FLAG_allow_natives_syntax = true;
4950 CcTest::InitializeVM();
4951 Isolate* isolate = CcTest::i_isolate();
4952 v8::internal::Heap* heap = CcTest::heap();
4953
4954 if (!isolate->use_crankshaft()) return;
4955 HandleScope outer_scope(heap->isolate());
4956 heap->CollectAllAvailableGarbage();
4957 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4958 Handle<Code> new_head;
4959 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4960 {
4961 HandleScope scope(heap->isolate());
4962 Handle<Code> immortal = DummyOptimizedCode(isolate);
4963 Handle<Code> mortal = DummyOptimizedCode(isolate);
4964 mortal->set_next_code_link(*old_head);
4965 immortal->set_next_code_link(*mortal);
4966 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4967 new_head = scope.CloseAndEscape(immortal);
4968 }
4969 heap->CollectAllAvailableGarbage();
4970 // Now mortal code should be dead.
4971 CHECK_EQ(*old_head, new_head->next_code_link());
4972}
4973
4974
4975static bool weak_ic_cleared = false;
4976
4977static void ClearWeakIC(
4978 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4979 printf("clear weak is called\n");
4980 weak_ic_cleared = true;
4981 data.GetParameter()->Reset();
4982}
4983
4984
4985TEST(WeakFunctionInConstructor) {
4986 if (i::FLAG_always_opt) return;
4987 i::FLAG_stress_compaction = false;
4988 CcTest::InitializeVM();
4989 v8::Isolate* isolate = CcTest::isolate();
4990 LocalContext env;
4991 v8::HandleScope scope(isolate);
4992 CompileRun(
4993 "function createObj(obj) {"
4994 " return new obj();"
4995 "}");
4996 i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
4997 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4998 CcTest::global()
4999 ->Get(env.local(), v8_str("createObj"))
5000 .ToLocalChecked())));
5001
5002 v8::Persistent<v8::Object> garbage;
5003 {
5004 v8::HandleScope scope(isolate);
5005 const char* source =
5006 " (function() {"
5007 " function hat() { this.x = 5; }"
5008 " createObj(hat);"
5009 " createObj(hat);"
5010 " return hat;"
5011 " })();";
5012 garbage.Reset(isolate, CompileRun(env.local(), source)
5013 .ToLocalChecked()
5014 ->ToObject(env.local())
5015 .ToLocalChecked());
5016 }
5017 weak_ic_cleared = false;
5018 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5019 Heap* heap = CcTest::i_isolate()->heap();
5020 heap->CollectAllGarbage();
5021 CHECK(weak_ic_cleared);
5022
5023 // We've determined the constructor in createObj has had it's weak cell
5024 // cleared. Now, verify that one additional call with a new function
5025 // allows monomorphicity.
5026 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
Ben Murdoch61f157c2016-09-16 13:49:30 +01005027 createObj->feedback_vector(), CcTest::i_isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005028 for (int i = 0; i < 20; i++) {
5029 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5030 CHECK(slot_value->IsWeakCell());
5031 if (WeakCell::cast(slot_value)->cleared()) break;
5032 heap->CollectAllGarbage();
5033 }
5034
5035 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5036 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5037 CompileRun(
5038 "function coat() { this.x = 6; }"
5039 "createObj(coat);");
5040 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5041 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5042}
5043
5044
5045// Checks that the value returned by execution of the source is weak.
5046void CheckWeakness(const char* source) {
5047 i::FLAG_stress_compaction = false;
5048 CcTest::InitializeVM();
5049 v8::Isolate* isolate = CcTest::isolate();
5050 LocalContext env;
5051 v8::HandleScope scope(isolate);
5052 v8::Persistent<v8::Object> garbage;
5053 {
5054 v8::HandleScope scope(isolate);
5055 garbage.Reset(isolate, CompileRun(env.local(), source)
5056 .ToLocalChecked()
5057 ->ToObject(env.local())
5058 .ToLocalChecked());
5059 }
5060 weak_ic_cleared = false;
5061 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5062 Heap* heap = CcTest::i_isolate()->heap();
5063 heap->CollectAllGarbage();
5064 CHECK(weak_ic_cleared);
5065}
5066
5067
5068// Each of the following "weak IC" tests creates an IC that embeds a map with
5069// the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5070TEST(WeakMapInMonomorphicLoadIC) {
5071 CheckWeakness("function loadIC(obj) {"
5072 " return obj.name;"
5073 "}"
5074 " (function() {"
5075 " var proto = {'name' : 'weak'};"
5076 " var obj = Object.create(proto);"
5077 " loadIC(obj);"
5078 " loadIC(obj);"
5079 " loadIC(obj);"
5080 " return proto;"
5081 " })();");
5082}
5083
5084
5085TEST(WeakMapInPolymorphicLoadIC) {
5086 CheckWeakness(
5087 "function loadIC(obj) {"
5088 " return obj.name;"
5089 "}"
5090 " (function() {"
5091 " var proto = {'name' : 'weak'};"
5092 " var obj = Object.create(proto);"
5093 " loadIC(obj);"
5094 " loadIC(obj);"
5095 " loadIC(obj);"
5096 " var poly = Object.create(proto);"
5097 " poly.x = true;"
5098 " loadIC(poly);"
5099 " return proto;"
5100 " })();");
5101}
5102
5103
5104TEST(WeakMapInMonomorphicKeyedLoadIC) {
5105 CheckWeakness("function keyedLoadIC(obj, field) {"
5106 " return obj[field];"
5107 "}"
5108 " (function() {"
5109 " var proto = {'name' : 'weak'};"
5110 " var obj = Object.create(proto);"
5111 " keyedLoadIC(obj, 'name');"
5112 " keyedLoadIC(obj, 'name');"
5113 " keyedLoadIC(obj, 'name');"
5114 " return proto;"
5115 " })();");
5116}
5117
5118
5119TEST(WeakMapInPolymorphicKeyedLoadIC) {
5120 CheckWeakness(
5121 "function keyedLoadIC(obj, field) {"
5122 " return obj[field];"
5123 "}"
5124 " (function() {"
5125 " var proto = {'name' : 'weak'};"
5126 " var obj = Object.create(proto);"
5127 " keyedLoadIC(obj, 'name');"
5128 " keyedLoadIC(obj, 'name');"
5129 " keyedLoadIC(obj, 'name');"
5130 " var poly = Object.create(proto);"
5131 " poly.x = true;"
5132 " keyedLoadIC(poly, 'name');"
5133 " return proto;"
5134 " })();");
5135}
5136
5137
5138TEST(WeakMapInMonomorphicStoreIC) {
5139 CheckWeakness("function storeIC(obj, value) {"
5140 " obj.name = value;"
5141 "}"
5142 " (function() {"
5143 " var proto = {'name' : 'weak'};"
5144 " var obj = Object.create(proto);"
5145 " storeIC(obj, 'x');"
5146 " storeIC(obj, 'x');"
5147 " storeIC(obj, 'x');"
5148 " return proto;"
5149 " })();");
5150}
5151
5152
5153TEST(WeakMapInPolymorphicStoreIC) {
5154 CheckWeakness(
5155 "function storeIC(obj, value) {"
5156 " obj.name = value;"
5157 "}"
5158 " (function() {"
5159 " var proto = {'name' : 'weak'};"
5160 " var obj = Object.create(proto);"
5161 " storeIC(obj, 'x');"
5162 " storeIC(obj, 'x');"
5163 " storeIC(obj, 'x');"
5164 " var poly = Object.create(proto);"
5165 " poly.x = true;"
5166 " storeIC(poly, 'x');"
5167 " return proto;"
5168 " })();");
5169}
5170
5171
5172TEST(WeakMapInMonomorphicKeyedStoreIC) {
5173 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5174 " obj[field] = value;"
5175 "}"
5176 " (function() {"
5177 " var proto = {'name' : 'weak'};"
5178 " var obj = Object.create(proto);"
5179 " keyedStoreIC(obj, 'x');"
5180 " keyedStoreIC(obj, 'x');"
5181 " keyedStoreIC(obj, 'x');"
5182 " return proto;"
5183 " })();");
5184}
5185
5186
5187TEST(WeakMapInPolymorphicKeyedStoreIC) {
5188 CheckWeakness(
5189 "function keyedStoreIC(obj, field, value) {"
5190 " obj[field] = value;"
5191 "}"
5192 " (function() {"
5193 " var proto = {'name' : 'weak'};"
5194 " var obj = Object.create(proto);"
5195 " keyedStoreIC(obj, 'x');"
5196 " keyedStoreIC(obj, 'x');"
5197 " keyedStoreIC(obj, 'x');"
5198 " var poly = Object.create(proto);"
5199 " poly.x = true;"
5200 " keyedStoreIC(poly, 'x');"
5201 " return proto;"
5202 " })();");
5203}
5204
5205
5206TEST(WeakMapInMonomorphicCompareNilIC) {
5207 CheckWeakness("function compareNilIC(obj) {"
5208 " return obj == null;"
5209 "}"
5210 " (function() {"
5211 " var proto = {'name' : 'weak'};"
5212 " var obj = Object.create(proto);"
5213 " compareNilIC(obj);"
5214 " compareNilIC(obj);"
5215 " compareNilIC(obj);"
5216 " return proto;"
5217 " })();");
5218}
5219
5220
5221Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5222 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5223 Handle<Object> obj =
5224 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5225 return Handle<JSFunction>::cast(obj);
5226}
5227
Ben Murdoch61f157c2016-09-16 13:49:30 +01005228void CheckIC(Handle<JSFunction> function, Code::Kind kind, int slot_index,
5229 InlineCacheState state) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005230 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5231 kind == Code::CALL_IC) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01005232 TypeFeedbackVector* vector = function->feedback_vector();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005233 FeedbackVectorSlot slot(slot_index);
5234 if (kind == Code::LOAD_IC) {
5235 LoadICNexus nexus(vector, slot);
5236 CHECK_EQ(nexus.StateFromFeedback(), state);
5237 } else if (kind == Code::KEYED_LOAD_IC) {
5238 KeyedLoadICNexus nexus(vector, slot);
5239 CHECK_EQ(nexus.StateFromFeedback(), state);
5240 } else if (kind == Code::CALL_IC) {
5241 CallICNexus nexus(vector, slot);
5242 CHECK_EQ(nexus.StateFromFeedback(), state);
5243 }
5244 } else {
Ben Murdoch61f157c2016-09-16 13:49:30 +01005245 Code* ic = FindFirstIC(function->code(), kind);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005246 CHECK(ic->is_inline_cache_stub());
Ben Murdoch61f157c2016-09-16 13:49:30 +01005247 CHECK(!IC::ICUseVector(kind));
5248 CHECK_EQ(state, IC::StateFromCode(ic));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005249 }
5250}
5251
5252
5253TEST(MonomorphicStaysMonomorphicAfterGC) {
5254 if (FLAG_always_opt) return;
5255 CcTest::InitializeVM();
5256 Isolate* isolate = CcTest::i_isolate();
5257 Heap* heap = isolate->heap();
5258 v8::HandleScope scope(CcTest::isolate());
5259 CompileRun(
5260 "function loadIC(obj) {"
5261 " return obj.name;"
5262 "}"
5263 "function testIC() {"
5264 " var proto = {'name' : 'weak'};"
5265 " var obj = Object.create(proto);"
5266 " loadIC(obj);"
5267 " loadIC(obj);"
5268 " loadIC(obj);"
5269 " return proto;"
5270 "};");
5271 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5272 {
5273 v8::HandleScope scope(CcTest::isolate());
5274 CompileRun("(testIC())");
5275 }
5276 heap->CollectAllGarbage();
Ben Murdoch61f157c2016-09-16 13:49:30 +01005277 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005278 {
5279 v8::HandleScope scope(CcTest::isolate());
5280 CompileRun("(testIC())");
5281 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01005282 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005283}
5284
5285
5286TEST(PolymorphicStaysPolymorphicAfterGC) {
5287 if (FLAG_always_opt) return;
5288 CcTest::InitializeVM();
5289 Isolate* isolate = CcTest::i_isolate();
5290 Heap* heap = isolate->heap();
5291 v8::HandleScope scope(CcTest::isolate());
5292 CompileRun(
5293 "function loadIC(obj) {"
5294 " return obj.name;"
5295 "}"
5296 "function testIC() {"
5297 " var proto = {'name' : 'weak'};"
5298 " var obj = Object.create(proto);"
5299 " loadIC(obj);"
5300 " loadIC(obj);"
5301 " loadIC(obj);"
5302 " var poly = Object.create(proto);"
5303 " poly.x = true;"
5304 " loadIC(poly);"
5305 " return proto;"
5306 "};");
5307 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5308 {
5309 v8::HandleScope scope(CcTest::isolate());
5310 CompileRun("(testIC())");
5311 }
5312 heap->CollectAllGarbage();
Ben Murdoch61f157c2016-09-16 13:49:30 +01005313 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005314 {
5315 v8::HandleScope scope(CcTest::isolate());
5316 CompileRun("(testIC())");
5317 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01005318 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005319}
5320
5321
5322TEST(WeakCell) {
5323 CcTest::InitializeVM();
5324 Isolate* isolate = CcTest::i_isolate();
5325 v8::internal::Heap* heap = CcTest::heap();
5326 v8::internal::Factory* factory = isolate->factory();
5327
5328 HandleScope outer_scope(isolate);
5329 Handle<WeakCell> weak_cell1;
5330 {
5331 HandleScope inner_scope(isolate);
5332 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5333 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5334 }
5335
5336 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5337 Handle<WeakCell> weak_cell2;
5338 {
5339 HandleScope inner_scope(isolate);
5340 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5341 }
5342 CHECK(weak_cell1->value()->IsFixedArray());
5343 CHECK_EQ(*survivor, weak_cell2->value());
5344 heap->CollectGarbage(NEW_SPACE);
5345 CHECK(weak_cell1->value()->IsFixedArray());
5346 CHECK_EQ(*survivor, weak_cell2->value());
5347 heap->CollectGarbage(NEW_SPACE);
5348 CHECK(weak_cell1->value()->IsFixedArray());
5349 CHECK_EQ(*survivor, weak_cell2->value());
5350 heap->CollectAllAvailableGarbage();
5351 CHECK(weak_cell1->cleared());
5352 CHECK_EQ(*survivor, weak_cell2->value());
5353}
5354
5355
5356TEST(WeakCellsWithIncrementalMarking) {
5357 CcTest::InitializeVM();
5358 Isolate* isolate = CcTest::i_isolate();
5359 v8::internal::Heap* heap = CcTest::heap();
5360 v8::internal::Factory* factory = isolate->factory();
5361
5362 const int N = 16;
5363 HandleScope outer_scope(isolate);
5364 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5365 Handle<WeakCell> weak_cells[N];
5366
5367 for (int i = 0; i < N; i++) {
5368 HandleScope inner_scope(isolate);
5369 Handle<HeapObject> value =
5370 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5371 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5372 CHECK(weak_cell->value()->IsFixedArray());
5373 IncrementalMarking* marking = heap->incremental_marking();
5374 if (marking->IsStopped()) {
5375 heap->StartIncrementalMarking();
5376 }
5377 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5378 heap->CollectGarbage(NEW_SPACE);
5379 CHECK(weak_cell->value()->IsFixedArray());
5380 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5381 }
Ben Murdochda12d292016-06-02 14:46:10 +01005382 // Call collect all twice to make sure that we also cleared
5383 // weak cells that were allocated on black pages.
5384 heap->CollectAllGarbage();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005385 heap->CollectAllGarbage();
5386 CHECK_EQ(*survivor, weak_cells[0]->value());
5387 for (int i = 1; i < N; i++) {
5388 CHECK(weak_cells[i]->cleared());
5389 }
5390}
5391
5392
5393#ifdef DEBUG
5394TEST(AddInstructionChangesNewSpacePromotion) {
5395 i::FLAG_allow_natives_syntax = true;
5396 i::FLAG_expose_gc = true;
5397 i::FLAG_stress_compaction = true;
5398 i::FLAG_gc_interval = 1000;
5399 CcTest::InitializeVM();
5400 if (!i::FLAG_allocation_site_pretenuring) return;
5401 v8::HandleScope scope(CcTest::isolate());
5402 Isolate* isolate = CcTest::i_isolate();
5403 Heap* heap = isolate->heap();
5404 LocalContext env;
5405 CompileRun(
5406 "function add(a, b) {"
5407 " return a + b;"
5408 "}"
5409 "add(1, 2);"
5410 "add(\"a\", \"b\");"
5411 "var oldSpaceObject;"
5412 "gc();"
5413 "function crash(x) {"
5414 " var object = {a: null, b: null};"
5415 " var result = add(1.5, x | 0);"
5416 " object.a = result;"
5417 " oldSpaceObject = object;"
5418 " return object;"
5419 "}"
5420 "crash(1);"
5421 "crash(1);"
5422 "%OptimizeFunctionOnNextCall(crash);"
5423 "crash(1);");
5424
5425 v8::Local<v8::Object> global = CcTest::global();
5426 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5427 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5428 v8::Local<v8::Value> args1[] = {v8_num(1)};
5429 heap->DisableInlineAllocation();
5430 heap->set_allocation_timeout(1);
5431 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5432 heap->CollectAllGarbage();
5433}
5434
5435
5436void OnFatalErrorExpectOOM(const char* location, const char* message) {
5437 // Exit with 0 if the location matches our expectation.
5438 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5439}
5440
5441
5442TEST(CEntryStubOOM) {
5443 i::FLAG_allow_natives_syntax = true;
5444 CcTest::InitializeVM();
5445 v8::HandleScope scope(CcTest::isolate());
5446 CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
5447
5448 v8::Local<v8::Value> result = CompileRun(
5449 "%SetFlags('--gc-interval=1');"
5450 "var a = [];"
5451 "a.__proto__ = [];"
5452 "a.unshift(1)");
5453
5454 CHECK(result->IsNumber());
5455}
5456
5457#endif // DEBUG
5458
5459
5460static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5461
5462
5463static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5464 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5465}
5466
5467
5468UNINITIALIZED_TEST(Regress538257) {
5469 i::FLAG_manual_evacuation_candidates_selection = true;
5470 v8::Isolate::CreateParams create_params;
5471 // Set heap limits.
5472 create_params.constraints.set_max_semi_space_size(1 * Page::kPageSize / MB);
5473 create_params.constraints.set_max_old_space_size(6 * Page::kPageSize / MB);
5474 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5475 v8::Isolate* isolate = v8::Isolate::New(create_params);
5476 isolate->Enter();
5477 {
5478 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
Ben Murdochc5610432016-08-08 18:44:38 +01005479 Heap* heap = i_isolate->heap();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005480 HandleScope handle_scope(i_isolate);
Ben Murdochc5610432016-08-08 18:44:38 +01005481 PagedSpace* old_space = heap->old_space();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005482 const int kMaxObjects = 10000;
5483 const int kFixedArrayLen = 512;
5484 Handle<FixedArray> objects[kMaxObjects];
Ben Murdochc5610432016-08-08 18:44:38 +01005485 for (int i = 0; (i < kMaxObjects) &&
5486 heap->CanExpandOldGeneration(old_space->AreaSize());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005487 i++) {
5488 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5489 Page::FromAddress(objects[i]->address())
5490 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5491 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01005492 heap::SimulateFullSpace(old_space);
Ben Murdochc5610432016-08-08 18:44:38 +01005493 heap->CollectGarbage(OLD_SPACE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005494 // If we get this far, we've successfully aborted compaction. Any further
5495 // allocations might trigger OOM.
5496 }
5497 isolate->Exit();
5498 isolate->Dispose();
5499}
5500
5501
5502TEST(Regress357137) {
5503 CcTest::InitializeVM();
5504 v8::Isolate* isolate = CcTest::isolate();
5505 v8::HandleScope hscope(isolate);
5506 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5507 global->Set(
5508 v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
5509 .ToLocalChecked(),
5510 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5511 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5512 CHECK(!context.IsEmpty());
5513 v8::Context::Scope cscope(context);
5514
5515 v8::Local<v8::Value> result = CompileRun(
5516 "var locals = '';"
5517 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5518 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5519 "interrupt();" // This triggers a fake stack overflow in f.
5520 "f()()");
5521 CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
5522}
5523
5524
5525TEST(Regress507979) {
5526 const int kFixedArrayLen = 10;
5527 CcTest::InitializeVM();
5528 Isolate* isolate = CcTest::i_isolate();
5529 Heap* heap = isolate->heap();
5530 HandleScope handle_scope(isolate);
5531
5532 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5533 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005534 CHECK(heap->InNewSpace(*o1));
5535 CHECK(heap->InNewSpace(*o2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005536
5537 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5538
5539 // Replace parts of an object placed before a live object with a filler. This
5540 // way the filler object shares the mark bits with the following live object.
5541 o1->Shrink(kFixedArrayLen - 1);
5542
5543 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5544 // Let's not optimize the loop away.
5545 CHECK(obj->address() != nullptr);
5546 }
5547}
5548
5549
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005550UNINITIALIZED_TEST(PromotionQueue) {
5551 i::FLAG_expose_gc = true;
Ben Murdoch61f157c2016-09-16 13:49:30 +01005552 i::FLAG_max_semi_space_size = 2 * Page::kPageSize / MB;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005553 i::FLAG_min_semi_space_size = i::FLAG_max_semi_space_size;
5554 v8::Isolate::CreateParams create_params;
5555 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5556 v8::Isolate* isolate = v8::Isolate::New(create_params);
5557 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5558 {
5559 v8::Isolate::Scope isolate_scope(isolate);
5560 v8::HandleScope handle_scope(isolate);
5561 v8::Context::New(isolate)->Enter();
5562 Heap* heap = i_isolate->heap();
5563 NewSpace* new_space = heap->new_space();
5564
5565 // In this test we will try to overwrite the promotion queue which is at the
5566 // end of to-space. To actually make that possible, we need at least two
5567 // semi-space pages and take advantage of fragmentation.
5568 // (1) Use a semi-space consisting of two pages.
5569 // (2) Create a few small long living objects and call the scavenger to
5570 // move them to the other semi-space.
5571 // (3) Create a huge object, i.e., remainder of first semi-space page and
5572 // create another huge object which should be of maximum allocatable memory
5573 // size of the second semi-space page.
5574 // (4) Call the scavenger again.
5575 // What will happen is: the scavenger will promote the objects created in
5576 // (2) and will create promotion queue entries at the end of the second
5577 // semi-space page during the next scavenge when it promotes the objects to
5578 // the old generation. The first allocation of (3) will fill up the first
5579 // semi-space page. The second allocation in (3) will not fit into the
5580 // first semi-space page, but it will overwrite the promotion queue which
5581 // are in the second semi-space page. If the right guards are in place, the
5582 // promotion queue will be evacuated in that case.
5583
5584
5585 CHECK(new_space->IsAtMaximumCapacity());
5586 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5587
5588 // Call the scavenger two times to get an empty new space
5589 heap->CollectGarbage(NEW_SPACE);
5590 heap->CollectGarbage(NEW_SPACE);
5591
5592 // First create a few objects which will survive a scavenge, and will get
5593 // promoted to the old generation later on. These objects will create
5594 // promotion queue entries at the end of the second semi-space page.
5595 const int number_handles = 12;
5596 Handle<FixedArray> handles[number_handles];
5597 for (int i = 0; i < number_handles; i++) {
5598 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5599 }
5600
5601 heap->CollectGarbage(NEW_SPACE);
5602 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5603
5604 // Fill-up the first semi-space page.
Ben Murdoch61f157c2016-09-16 13:49:30 +01005605 heap::FillUpOnePage(new_space);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005606
5607 // Create a small object to initialize the bump pointer on the second
5608 // semi-space page.
5609 Handle<FixedArray> small =
5610 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5611 CHECK(heap->InNewSpace(*small));
5612
5613 // Fill-up the second semi-space page.
Ben Murdoch61f157c2016-09-16 13:49:30 +01005614 heap::FillUpOnePage(new_space);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005615
5616 // This scavenge will corrupt memory if the promotion queue is not
5617 // evacuated.
5618 heap->CollectGarbage(NEW_SPACE);
5619 }
5620 isolate->Dispose();
5621}
5622
5623
5624TEST(Regress388880) {
5625 i::FLAG_expose_gc = true;
5626 CcTest::InitializeVM();
5627 v8::HandleScope scope(CcTest::isolate());
5628 Isolate* isolate = CcTest::i_isolate();
5629 Factory* factory = isolate->factory();
5630 Heap* heap = isolate->heap();
5631
5632 Handle<Map> map1 = Map::Create(isolate, 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005633 Handle<String> name = factory->NewStringFromStaticChars("foo");
5634 name = factory->InternalizeString(name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005635 Handle<Map> map2 =
Ben Murdoch097c5b22016-05-18 11:27:45 +01005636 Map::CopyWithField(map1, name, FieldType::Any(isolate), NONE,
5637 Representation::Tagged(), OMIT_TRANSITION)
5638 .ToHandleChecked();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005639
5640 int desired_offset = Page::kPageSize - map1->instance_size();
5641
5642 // Allocate padding objects in old pointer space so, that object allocated
5643 // afterwards would end at the end of the page.
Ben Murdoch61f157c2016-09-16 13:49:30 +01005644 heap::SimulateFullSpace(heap->old_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005645 int padding_size = desired_offset - Page::kObjectStartOffset;
Ben Murdoch61f157c2016-09-16 13:49:30 +01005646 heap::CreatePadding(heap, padding_size, TENURED);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005647
5648 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5649 o->set_properties(*factory->empty_fixed_array());
5650
5651 // Ensure that the object allocated where we need it.
5652 Page* page = Page::FromAddress(o->address());
5653 CHECK_EQ(desired_offset, page->Offset(o->address()));
5654
5655 // Now we have an object right at the end of the page.
5656
5657 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5658 // that would cause crash.
5659 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5660 marking->Stop();
5661 CcTest::heap()->StartIncrementalMarking();
5662 CHECK(marking->IsMarking());
5663
5664 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5665 // when it calls heap->AdjustLiveBytes(...).
5666 JSObject::MigrateToMap(o, map2);
5667}
5668
5669
5670TEST(Regress3631) {
5671 i::FLAG_expose_gc = true;
5672 CcTest::InitializeVM();
5673 v8::HandleScope scope(CcTest::isolate());
5674 Isolate* isolate = CcTest::i_isolate();
5675 Heap* heap = isolate->heap();
5676 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5677 v8::Local<v8::Value> result = CompileRun(
5678 "var weak_map = new WeakMap();"
5679 "var future_keys = [];"
5680 "for (var i = 0; i < 50; i++) {"
5681 " var key = {'k' : i + 0.1};"
5682 " weak_map.set(key, 1);"
5683 " future_keys.push({'x' : i + 0.2});"
5684 "}"
5685 "weak_map");
5686 if (marking->IsStopped()) {
5687 CcTest::heap()->StartIncrementalMarking();
5688 }
5689 // Incrementally mark the backing store.
5690 Handle<JSReceiver> obj =
5691 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5692 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5693 while (!Marking::IsBlack(
5694 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5695 !marking->IsStopped()) {
5696 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5697 }
5698 // Stash the backing store in a handle.
5699 Handle<Object> save(weak_map->table(), isolate);
5700 // The following line will update the backing store.
5701 CompileRun(
5702 "for (var i = 0; i < 50; i++) {"
5703 " weak_map.set(future_keys[i], i);"
5704 "}");
5705 heap->incremental_marking()->set_should_hurry(true);
5706 heap->CollectGarbage(OLD_SPACE);
5707}
5708
5709
5710TEST(Regress442710) {
5711 CcTest::InitializeVM();
5712 Isolate* isolate = CcTest::i_isolate();
5713 Heap* heap = isolate->heap();
5714 Factory* factory = isolate->factory();
5715
5716 HandleScope sc(isolate);
5717 Handle<JSGlobalObject> global(
5718 CcTest::i_isolate()->context()->global_object());
5719 Handle<JSArray> array = factory->NewJSArray(2);
5720
5721 Handle<String> name = factory->InternalizeUtf8String("testArray");
5722 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5723 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5724 heap->CollectGarbage(OLD_SPACE);
5725}
5726
5727
5728HEAP_TEST(NumberStringCacheSize) {
5729 // Test that the number-string cache has not been resized in the snapshot.
5730 CcTest::InitializeVM();
5731 Isolate* isolate = CcTest::i_isolate();
5732 if (!isolate->snapshot_available()) return;
5733 Heap* heap = isolate->heap();
5734 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5735 heap->number_string_cache()->length());
5736}
5737
5738
5739TEST(Regress3877) {
5740 CcTest::InitializeVM();
5741 Isolate* isolate = CcTest::i_isolate();
5742 Heap* heap = isolate->heap();
5743 Factory* factory = isolate->factory();
5744 HandleScope scope(isolate);
5745 CompileRun("function cls() { this.x = 10; }");
5746 Handle<WeakCell> weak_prototype;
5747 {
5748 HandleScope inner_scope(isolate);
5749 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5750 Handle<JSReceiver> proto =
5751 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5752 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5753 }
5754 CHECK(!weak_prototype->cleared());
5755 CompileRun(
5756 "var a = { };"
5757 "a.x = new cls();"
5758 "cls.prototype = null;");
5759 for (int i = 0; i < 4; i++) {
5760 heap->CollectAllGarbage();
5761 }
5762 // The map of a.x keeps prototype alive
5763 CHECK(!weak_prototype->cleared());
5764 // Change the map of a.x and make the previous map garbage collectable.
5765 CompileRun("a.x.__proto__ = {};");
5766 for (int i = 0; i < 4; i++) {
5767 heap->CollectAllGarbage();
5768 }
5769 CHECK(weak_prototype->cleared());
5770}
5771
5772
5773Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5774 HandleScope inner_scope(isolate);
5775 Handle<Map> map = Map::Create(isolate, 1);
5776 v8::Local<v8::Value> result =
5777 CompileRun("(function () { return {x : 10}; })();");
5778 Handle<JSReceiver> proto =
5779 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5780 Map::SetPrototype(map, proto);
5781 heap->AddRetainedMap(map);
5782 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5783}
5784
5785
5786void CheckMapRetainingFor(int n) {
5787 FLAG_retain_maps_for_n_gc = n;
5788 Isolate* isolate = CcTest::i_isolate();
5789 Heap* heap = isolate->heap();
5790 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5791 CHECK(!weak_cell->cleared());
5792 for (int i = 0; i < n; i++) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01005793 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005794 heap->CollectGarbage(OLD_SPACE);
5795 }
5796 CHECK(!weak_cell->cleared());
Ben Murdoch61f157c2016-09-16 13:49:30 +01005797 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005798 heap->CollectGarbage(OLD_SPACE);
5799 CHECK(weak_cell->cleared());
5800}
5801
5802
5803TEST(MapRetaining) {
5804 CcTest::InitializeVM();
5805 v8::HandleScope scope(CcTest::isolate());
5806 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5807 CheckMapRetainingFor(0);
5808 CheckMapRetainingFor(1);
5809 CheckMapRetainingFor(7);
5810}
5811
5812
5813TEST(RegressArrayListGC) {
5814 FLAG_retain_maps_for_n_gc = 1;
5815 FLAG_incremental_marking = 0;
5816 FLAG_gc_global = true;
5817 CcTest::InitializeVM();
5818 v8::HandleScope scope(CcTest::isolate());
5819 Isolate* isolate = CcTest::i_isolate();
5820 Heap* heap = isolate->heap();
5821 AddRetainedMap(isolate, heap);
5822 Handle<Map> map = Map::Create(isolate, 1);
5823 heap->CollectGarbage(OLD_SPACE);
5824 // Force GC in old space on next addition of retained map.
5825 Map::WeakCellForMap(map);
Ben Murdoch61f157c2016-09-16 13:49:30 +01005826 heap::SimulateFullSpace(CcTest::heap()->new_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005827 for (int i = 0; i < 10; i++) {
5828 heap->AddRetainedMap(map);
5829 }
5830 heap->CollectGarbage(OLD_SPACE);
5831}
5832
5833
5834#ifdef DEBUG
5835TEST(PathTracer) {
5836 CcTest::InitializeVM();
5837 v8::HandleScope scope(CcTest::isolate());
5838
5839 v8::Local<v8::Value> result = CompileRun("'abc'");
5840 Handle<Object> o = v8::Utils::OpenHandle(*result);
5841 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5842}
5843#endif // DEBUG
5844
5845
5846TEST(WritableVsImmortalRoots) {
5847 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5848 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5849 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5850 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5851 // A root value can be writable, immortal, or neither, but not both.
5852 CHECK(!immortal || !writable);
5853 }
5854}
5855
5856
5857static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5858 int initial_length,
5859 int elements_to_trim) {
5860 v8::HandleScope scope(CcTest::isolate());
5861 Isolate* isolate = CcTest::i_isolate();
5862 Factory* factory = isolate->factory();
5863 Heap* heap = isolate->heap();
5864
5865 Handle<FixedTypedArrayBase> array =
5866 factory->NewFixedTypedArray(initial_length, type, true);
5867 int old_size = array->size();
5868 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5869 elements_to_trim);
5870
5871 // Check that free space filler is at the right place and did not smash the
5872 // array header.
5873 CHECK(array->IsFixedArrayBase());
5874 CHECK_EQ(initial_length - elements_to_trim, array->length());
5875 int new_size = array->size();
5876 if (new_size != old_size) {
5877 // Free space filler should be created in this case.
5878 Address next_obj_address = array->address() + array->size();
5879 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5880 }
5881 heap->CollectAllAvailableGarbage();
5882}
5883
5884
5885TEST(Regress472513) {
5886 CcTest::InitializeVM();
5887 v8::HandleScope scope(CcTest::isolate());
5888
5889 // The combination of type/initial_length/elements_to_trim triggered
5890 // typed array header smashing with free space filler (crbug/472513).
5891
5892 // 64-bit cases.
5893 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5894 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5895 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5896 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5897 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5898 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5899
5900 // 32-bit cases.
5901 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5902 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5903 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5904 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5905 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5906}
5907
5908
5909TEST(WeakFixedArray) {
5910 CcTest::InitializeVM();
5911 v8::HandleScope scope(CcTest::isolate());
5912
5913 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5914 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5915 array->Remove(number);
5916 array->Compact<WeakFixedArray::NullCallback>();
5917 WeakFixedArray::Add(array, number);
5918}
5919
5920
5921TEST(PreprocessStackTrace) {
5922 // Do not automatically trigger early GC.
5923 FLAG_gc_interval = -1;
5924 CcTest::InitializeVM();
5925 v8::HandleScope scope(CcTest::isolate());
5926 v8::TryCatch try_catch(CcTest::isolate());
5927 CompileRun("throw new Error();");
5928 CHECK(try_catch.HasCaught());
5929 Isolate* isolate = CcTest::i_isolate();
5930 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5931 Handle<Name> key = isolate->factory()->stack_trace_symbol();
5932 Handle<Object> stack_trace =
Ben Murdochda12d292016-06-02 14:46:10 +01005933 Object::GetProperty(exception, key).ToHandleChecked();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005934 Handle<Object> code =
5935 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5936 CHECK(code->IsCode());
5937
5938 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
5939
5940 Handle<Object> pos =
5941 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5942 CHECK(pos->IsSmi());
5943
5944 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5945 int array_length = Smi::cast(stack_trace_array->length())->value();
5946 for (int i = 0; i < array_length; i++) {
5947 Handle<Object> element =
5948 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5949 CHECK(!element->IsCode());
5950 }
5951}
5952
5953
5954static bool utils_has_been_collected = false;
5955
5956static void UtilsHasBeenCollected(
5957 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5958 utils_has_been_collected = true;
5959 data.GetParameter()->Reset();
5960}
5961
5962
5963TEST(BootstrappingExports) {
5964 // Expose utils object and delete it to observe that it is indeed
5965 // being garbage-collected.
5966 FLAG_expose_natives_as = "utils";
5967 CcTest::InitializeVM();
5968 v8::Isolate* isolate = CcTest::isolate();
5969 LocalContext env;
5970
5971 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
5972
5973 utils_has_been_collected = false;
5974
5975 v8::Persistent<v8::Object> utils;
5976
5977 {
5978 v8::HandleScope scope(isolate);
5979 v8::Local<v8::String> name = v8_str("utils");
5980 utils.Reset(isolate, CcTest::global()
5981 ->Get(env.local(), name)
5982 .ToLocalChecked()
5983 ->ToObject(env.local())
5984 .ToLocalChecked());
5985 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
5986 }
5987
5988 utils.SetWeak(&utils, UtilsHasBeenCollected,
5989 v8::WeakCallbackType::kParameter);
5990
5991 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
5992
5993 CHECK(utils_has_been_collected);
5994}
5995
5996
5997TEST(Regress1878) {
5998 FLAG_allow_natives_syntax = true;
5999 CcTest::InitializeVM();
6000 v8::Isolate* isolate = CcTest::isolate();
6001 v8::HandleScope scope(isolate);
6002 v8::Local<v8::Function> constructor = v8::Utils::CallableToLocal(
6003 CcTest::i_isolate()->internal_array_function());
6004 LocalContext env;
6005 CHECK(CcTest::global()
6006 ->Set(env.local(), v8_str("InternalArray"), constructor)
6007 .FromJust());
6008
6009 v8::TryCatch try_catch(isolate);
6010
6011 CompileRun(
6012 "var a = Array();"
6013 "for (var i = 0; i < 1000; i++) {"
6014 " var ai = new InternalArray(10000);"
6015 " if (%HaveSameMap(ai, a)) throw Error();"
6016 " if (!%HasFastObjectElements(ai)) throw Error();"
6017 "}"
6018 "for (var i = 0; i < 1000; i++) {"
6019 " var ai = new InternalArray(10000);"
6020 " if (%HaveSameMap(ai, a)) throw Error();"
6021 " if (!%HasFastObjectElements(ai)) throw Error();"
6022 "}");
6023
6024 CHECK(!try_catch.HasCaught());
6025}
6026
6027
6028void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6029 CHECK(bytes >= FixedArray::kHeaderSize);
6030 CHECK(bytes % kPointerSize == 0);
6031 Factory* factory = isolate->factory();
6032 HandleScope scope(isolate);
6033 AlwaysAllocateScope always_allocate(isolate);
6034 int elements =
6035 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6036 Handle<FixedArray> array = factory->NewFixedArray(
6037 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6038 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6039 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6040}
6041
6042
6043TEST(NewSpaceAllocationCounter) {
6044 CcTest::InitializeVM();
6045 v8::HandleScope scope(CcTest::isolate());
6046 Isolate* isolate = CcTest::i_isolate();
6047 Heap* heap = isolate->heap();
6048 size_t counter1 = heap->NewSpaceAllocationCounter();
6049 heap->CollectGarbage(NEW_SPACE);
6050 const size_t kSize = 1024;
6051 AllocateInSpace(isolate, kSize, NEW_SPACE);
6052 size_t counter2 = heap->NewSpaceAllocationCounter();
6053 CHECK_EQ(kSize, counter2 - counter1);
6054 heap->CollectGarbage(NEW_SPACE);
6055 size_t counter3 = heap->NewSpaceAllocationCounter();
6056 CHECK_EQ(0U, counter3 - counter2);
6057 // Test counter overflow.
6058 size_t max_counter = -1;
6059 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6060 size_t start = heap->NewSpaceAllocationCounter();
6061 for (int i = 0; i < 20; i++) {
6062 AllocateInSpace(isolate, kSize, NEW_SPACE);
6063 size_t counter = heap->NewSpaceAllocationCounter();
6064 CHECK_EQ(kSize, counter - start);
6065 start = counter;
6066 }
6067}
6068
6069
6070TEST(OldSpaceAllocationCounter) {
6071 CcTest::InitializeVM();
6072 v8::HandleScope scope(CcTest::isolate());
6073 Isolate* isolate = CcTest::i_isolate();
6074 Heap* heap = isolate->heap();
6075 size_t counter1 = heap->OldGenerationAllocationCounter();
6076 heap->CollectGarbage(NEW_SPACE);
6077 heap->CollectGarbage(NEW_SPACE);
6078 const size_t kSize = 1024;
6079 AllocateInSpace(isolate, kSize, OLD_SPACE);
6080 size_t counter2 = heap->OldGenerationAllocationCounter();
6081 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6082 CHECK_LE(kSize, counter2 - counter1);
6083 heap->CollectGarbage(NEW_SPACE);
6084 size_t counter3 = heap->OldGenerationAllocationCounter();
6085 CHECK_EQ(0u, counter3 - counter2);
6086 AllocateInSpace(isolate, kSize, OLD_SPACE);
6087 heap->CollectGarbage(OLD_SPACE);
6088 size_t counter4 = heap->OldGenerationAllocationCounter();
6089 CHECK_LE(kSize, counter4 - counter3);
6090 // Test counter overflow.
6091 size_t max_counter = -1;
6092 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6093 size_t start = heap->OldGenerationAllocationCounter();
6094 for (int i = 0; i < 20; i++) {
6095 AllocateInSpace(isolate, kSize, OLD_SPACE);
6096 size_t counter = heap->OldGenerationAllocationCounter();
6097 CHECK_LE(kSize, counter - start);
6098 start = counter;
6099 }
6100}
6101
6102
6103TEST(NewSpaceAllocationThroughput) {
6104 CcTest::InitializeVM();
6105 v8::HandleScope scope(CcTest::isolate());
6106 Isolate* isolate = CcTest::i_isolate();
6107 Heap* heap = isolate->heap();
6108 GCTracer* tracer = heap->tracer();
Ben Murdoch61f157c2016-09-16 13:49:30 +01006109 tracer->ResetForTesting();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006110 int time1 = 100;
6111 size_t counter1 = 1000;
6112 tracer->SampleAllocation(time1, counter1, 0);
6113 int time2 = 200;
6114 size_t counter2 = 2000;
6115 tracer->SampleAllocation(time2, counter2, 0);
6116 size_t throughput =
6117 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6118 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6119 int time3 = 1000;
6120 size_t counter3 = 30000;
6121 tracer->SampleAllocation(time3, counter3, 0);
6122 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6123 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6124}
6125
6126
6127TEST(NewSpaceAllocationThroughput2) {
6128 CcTest::InitializeVM();
6129 v8::HandleScope scope(CcTest::isolate());
6130 Isolate* isolate = CcTest::i_isolate();
6131 Heap* heap = isolate->heap();
6132 GCTracer* tracer = heap->tracer();
Ben Murdoch61f157c2016-09-16 13:49:30 +01006133 tracer->ResetForTesting();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006134 int time1 = 100;
6135 size_t counter1 = 1000;
6136 tracer->SampleAllocation(time1, counter1, 0);
6137 int time2 = 200;
6138 size_t counter2 = 2000;
6139 tracer->SampleAllocation(time2, counter2, 0);
6140 size_t throughput =
6141 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6142 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6143 int time3 = 1000;
6144 size_t counter3 = 30000;
6145 tracer->SampleAllocation(time3, counter3, 0);
6146 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6147 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6148}
6149
6150
6151static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6152 Isolate* isolate = CcTest::i_isolate();
6153 Object* message =
6154 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
Ben Murdoch61f157c2016-09-16 13:49:30 +01006155 CHECK(message->IsTheHole(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006156}
6157
6158
6159TEST(MessageObjectLeak) {
6160 CcTest::InitializeVM();
6161 v8::Isolate* isolate = CcTest::isolate();
6162 v8::HandleScope scope(isolate);
6163 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6164 global->Set(
6165 v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
6166 .ToLocalChecked(),
6167 v8::FunctionTemplate::New(isolate, CheckLeak));
6168 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6169 v8::Context::Scope cscope(context);
6170
6171 const char* test =
6172 "try {"
6173 " throw 'message 1';"
6174 "} catch (e) {"
6175 "}"
6176 "check();"
6177 "L: try {"
6178 " throw 'message 2';"
6179 "} finally {"
6180 " break L;"
6181 "}"
6182 "check();";
6183 CompileRun(test);
6184
6185 const char* flag = "--turbo-filter=*";
6186 FlagList::SetFlagsFromString(flag, StrLength(flag));
6187 FLAG_always_opt = true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006188
6189 CompileRun(test);
6190}
6191
6192
6193static void CheckEqualSharedFunctionInfos(
6194 const v8::FunctionCallbackInfo<v8::Value>& args) {
6195 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6196 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6197 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6198 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6199 CHECK(fun1->shared() == fun2->shared());
6200}
6201
6202
6203static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6204 Isolate* isolate = CcTest::i_isolate();
6205 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6206 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6207 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6208 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
Ben Murdochda12d292016-06-02 14:46:10 +01006209 fun->shared()->ClearBytecodeArray(); // Bytecode is code too.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006210 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6211}
6212
6213
6214TEST(CanonicalSharedFunctionInfo) {
6215 CcTest::InitializeVM();
6216 v8::Isolate* isolate = CcTest::isolate();
6217 v8::HandleScope scope(isolate);
6218 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6219 global->Set(isolate, "check", v8::FunctionTemplate::New(
6220 isolate, CheckEqualSharedFunctionInfos));
6221 global->Set(isolate, "remove",
6222 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6223 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6224 v8::Context::Scope cscope(context);
6225 CompileRun(
6226 "function f() { return function g() {}; }"
6227 "var g1 = f();"
6228 "remove(f);"
6229 "var g2 = f();"
6230 "check(g1, g2);");
6231
6232 CompileRun(
6233 "function f() { return (function() { return function g() {}; })(); }"
6234 "var g1 = f();"
6235 "remove(f);"
6236 "var g2 = f();"
6237 "check(g1, g2);");
6238}
6239
Ben Murdochc5610432016-08-08 18:44:38 +01006240TEST(RemoveCodeFromSharedFunctionInfoButNotFromClosure) {
6241 CcTest::InitializeVM();
6242 v8::Isolate* isolate = CcTest::isolate();
6243 v8::HandleScope scope(isolate);
6244 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6245 global->Set(isolate, "check", v8::FunctionTemplate::New(
6246 isolate, CheckEqualSharedFunctionInfos));
6247 global->Set(isolate, "remove",
6248 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6249 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6250 v8::Context::Scope cscope(context);
6251 CompileRun(
6252 "function f() { return function g() {}; }"
6253 "var g1 = f();"
6254 "var g2 = f();"
6255 "check(g1, g2);"
6256 "g1();"
6257 "g2();"
6258 "remove(g1);"
6259 "g2();"
6260 "check(g1, g2);");
6261}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006262
6263TEST(OldGenerationAllocationThroughput) {
6264 CcTest::InitializeVM();
6265 v8::HandleScope scope(CcTest::isolate());
6266 Isolate* isolate = CcTest::i_isolate();
6267 Heap* heap = isolate->heap();
6268 GCTracer* tracer = heap->tracer();
Ben Murdoch61f157c2016-09-16 13:49:30 +01006269 tracer->ResetForTesting();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006270 int time1 = 100;
6271 size_t counter1 = 1000;
6272 tracer->SampleAllocation(time1, 0, counter1);
6273 int time2 = 200;
6274 size_t counter2 = 2000;
6275 tracer->SampleAllocation(time2, 0, counter2);
Ben Murdochda12d292016-06-02 14:46:10 +01006276 size_t throughput = static_cast<size_t>(
6277 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006278 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6279 int time3 = 1000;
6280 size_t counter3 = 30000;
6281 tracer->SampleAllocation(time3, 0, counter3);
Ben Murdochda12d292016-06-02 14:46:10 +01006282 throughput = static_cast<size_t>(
6283 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006284 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6285}
6286
6287
6288TEST(AllocationThroughput) {
6289 CcTest::InitializeVM();
6290 v8::HandleScope scope(CcTest::isolate());
6291 Isolate* isolate = CcTest::i_isolate();
6292 Heap* heap = isolate->heap();
6293 GCTracer* tracer = heap->tracer();
Ben Murdoch61f157c2016-09-16 13:49:30 +01006294 tracer->ResetForTesting();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006295 int time1 = 100;
6296 size_t counter1 = 1000;
6297 tracer->SampleAllocation(time1, counter1, counter1);
6298 int time2 = 200;
6299 size_t counter2 = 2000;
6300 tracer->SampleAllocation(time2, counter2, counter2);
Ben Murdochda12d292016-06-02 14:46:10 +01006301 size_t throughput = static_cast<size_t>(
6302 tracer->AllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006303 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6304 int time3 = 1000;
6305 size_t counter3 = 30000;
6306 tracer->SampleAllocation(time3, counter3, counter3);
6307 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6308 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6309}
6310
6311
6312TEST(ContextMeasure) {
6313 CcTest::InitializeVM();
6314 v8::HandleScope scope(CcTest::isolate());
6315 Isolate* isolate = CcTest::i_isolate();
6316 LocalContext context;
6317
6318 int size_upper_limit = 0;
6319 int count_upper_limit = 0;
6320 HeapIterator it(CcTest::heap());
6321 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6322 size_upper_limit += obj->Size();
6323 count_upper_limit++;
6324 }
6325
6326 ContextMeasure measure(*isolate->native_context());
6327
6328 PrintF("Context size : %d bytes\n", measure.Size());
6329 PrintF("Context object count: %d\n", measure.Count());
6330
6331 CHECK_LE(1000, measure.Count());
6332 CHECK_LE(50000, measure.Size());
6333
6334 CHECK_LE(measure.Count(), count_upper_limit);
6335 CHECK_LE(measure.Size(), size_upper_limit);
6336}
6337
6338
6339TEST(ScriptIterator) {
6340 CcTest::InitializeVM();
6341 v8::HandleScope scope(CcTest::isolate());
6342 Isolate* isolate = CcTest::i_isolate();
6343 Heap* heap = CcTest::heap();
6344 LocalContext context;
6345
6346 heap->CollectAllGarbage();
6347
6348 int script_count = 0;
6349 {
6350 HeapIterator it(heap);
6351 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6352 if (obj->IsScript()) script_count++;
6353 }
6354 }
6355
6356 {
6357 Script::Iterator iterator(isolate);
6358 while (iterator.Next()) script_count--;
6359 }
6360
6361 CHECK_EQ(0, script_count);
6362}
6363
6364
6365TEST(SharedFunctionInfoIterator) {
6366 CcTest::InitializeVM();
6367 v8::HandleScope scope(CcTest::isolate());
6368 Isolate* isolate = CcTest::i_isolate();
6369 Heap* heap = CcTest::heap();
6370 LocalContext context;
6371
6372 heap->CollectAllGarbage();
6373 heap->CollectAllGarbage();
6374
6375 int sfi_count = 0;
6376 {
6377 HeapIterator it(heap);
6378 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6379 if (!obj->IsSharedFunctionInfo()) continue;
6380 sfi_count++;
6381 }
6382 }
6383
6384 {
6385 SharedFunctionInfo::Iterator iterator(isolate);
6386 while (iterator.Next()) sfi_count--;
6387 }
6388
6389 CHECK_EQ(0, sfi_count);
6390}
6391
6392
6393template <typename T>
6394static UniqueId MakeUniqueId(const Persistent<T>& p) {
6395 return UniqueId(reinterpret_cast<uintptr_t>(*v8::Utils::OpenPersistent(p)));
6396}
6397
6398
6399TEST(Regress519319) {
6400 CcTest::InitializeVM();
6401 v8::Isolate* isolate = CcTest::isolate();
6402 v8::HandleScope scope(isolate);
6403 Heap* heap = CcTest::heap();
6404 LocalContext context;
6405
6406 v8::Persistent<Value> parent;
6407 v8::Persistent<Value> child;
6408
6409 parent.Reset(isolate, v8::Object::New(isolate));
6410 child.Reset(isolate, v8::Object::New(isolate));
6411
Ben Murdoch61f157c2016-09-16 13:49:30 +01006412 heap::SimulateFullSpace(heap->old_space());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006413 heap->CollectGarbage(OLD_SPACE);
6414 {
6415 UniqueId id = MakeUniqueId(parent);
6416 isolate->SetObjectGroupId(parent, id);
6417 isolate->SetReferenceFromGroup(id, child);
6418 }
6419 // The CollectGarbage call above starts sweeper threads.
6420 // The crash will happen if the following two functions
6421 // are called before sweeping finishes.
6422 heap->StartIncrementalMarking();
6423 heap->FinalizeIncrementalMarkingIfComplete("test");
6424}
6425
6426
6427HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6428 CcTest::InitializeVM();
6429 v8::HandleScope scope(CcTest::isolate());
6430 Heap* heap = CcTest::heap();
6431 Isolate* isolate = CcTest::i_isolate();
6432 MemoryReducer* memory_reducer = heap->memory_reducer_;
6433 memory_reducer->SampleAndGetJsCallsPerMs(0);
6434 isolate->IncrementJsCallsFromApiCounter();
6435 isolate->IncrementJsCallsFromApiCounter();
6436 isolate->IncrementJsCallsFromApiCounter();
6437 double calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(1);
6438 CheckDoubleEquals(3, calls_per_ms);
6439
6440 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(2);
6441 CheckDoubleEquals(0, calls_per_ms);
6442
6443 isolate->IncrementJsCallsFromApiCounter();
6444 isolate->IncrementJsCallsFromApiCounter();
6445 isolate->IncrementJsCallsFromApiCounter();
6446 isolate->IncrementJsCallsFromApiCounter();
6447 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(4);
6448 CheckDoubleEquals(2, calls_per_ms);
6449}
6450
Ben Murdoch097c5b22016-05-18 11:27:45 +01006451HEAP_TEST(Regress587004) {
6452 FLAG_concurrent_sweeping = false;
6453#ifdef VERIFY_HEAP
6454 FLAG_verify_heap = false;
6455#endif
6456 CcTest::InitializeVM();
6457 v8::HandleScope scope(CcTest::isolate());
6458 Heap* heap = CcTest::heap();
6459 Isolate* isolate = CcTest::i_isolate();
6460 Factory* factory = isolate->factory();
6461 const int N = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
6462 kPointerSize;
6463 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
6464 CHECK(heap->old_space()->Contains(*array));
6465 Handle<Object> number = factory->NewHeapNumber(1.0);
6466 CHECK(heap->InNewSpace(*number));
6467 for (int i = 0; i < N; i++) {
6468 array->set(i, *number);
6469 }
6470 heap->CollectGarbage(OLD_SPACE);
Ben Murdoch61f157c2016-09-16 13:49:30 +01006471 heap::SimulateFullSpace(heap->old_space());
Ben Murdoch097c5b22016-05-18 11:27:45 +01006472 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
6473 heap->mark_compact_collector()->EnsureSweepingCompleted();
6474 ByteArray* byte_array;
6475 const int M = 256;
6476 // Don't allow old space expansion. The test works without this flag too,
6477 // but becomes very slow.
6478 heap->set_force_oom(true);
6479 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
6480 for (int j = 0; j < M; j++) {
6481 byte_array->set(j, 0x31);
6482 }
6483 }
6484 // Re-enable old space expansion to avoid OOM crash.
6485 heap->set_force_oom(false);
6486 heap->CollectGarbage(NEW_SPACE);
6487}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006488
Ben Murdochda12d292016-06-02 14:46:10 +01006489HEAP_TEST(Regress589413) {
6490 FLAG_stress_compaction = true;
6491 FLAG_manual_evacuation_candidates_selection = true;
6492 FLAG_parallel_compaction = false;
6493 FLAG_concurrent_sweeping = false;
6494 CcTest::InitializeVM();
6495 v8::HandleScope scope(CcTest::isolate());
6496 Heap* heap = CcTest::heap();
6497 // Get the heap in clean state.
6498 heap->CollectGarbage(OLD_SPACE);
6499 heap->CollectGarbage(OLD_SPACE);
6500 Isolate* isolate = CcTest::i_isolate();
6501 Factory* factory = isolate->factory();
6502 // Fill the new space with byte arrays with elements looking like pointers.
6503 const int M = 256;
6504 ByteArray* byte_array;
6505 while (heap->AllocateByteArray(M).To(&byte_array)) {
6506 for (int j = 0; j < M; j++) {
6507 byte_array->set(j, 0x31);
6508 }
6509 // Add the array in root set.
6510 handle(byte_array);
6511 }
6512 // Make sure the byte arrays will be promoted on the next GC.
6513 heap->CollectGarbage(NEW_SPACE);
6514 // This number is close to large free list category threshold.
6515 const int N = 0x3eee;
6516 {
6517 std::vector<FixedArray*> arrays;
6518 std::set<Page*> pages;
6519 FixedArray* array;
6520 // Fill all pages with fixed arrays.
6521 heap->set_force_oom(true);
6522 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6523 arrays.push_back(array);
6524 pages.insert(Page::FromAddress(array->address()));
6525 // Add the array in root set.
6526 handle(array);
6527 }
6528 // Expand and full one complete page with fixed arrays.
6529 heap->set_force_oom(false);
6530 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6531 arrays.push_back(array);
6532 pages.insert(Page::FromAddress(array->address()));
6533 // Add the array in root set.
6534 handle(array);
6535 // Do not expand anymore.
6536 heap->set_force_oom(true);
6537 }
6538 // Expand and mark the new page as evacuation candidate.
6539 heap->set_force_oom(false);
6540 {
6541 AlwaysAllocateScope always_allocate(isolate);
6542 Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
6543 Page* ec_page = Page::FromAddress(ec_obj->address());
6544 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
6545 // Make all arrays point to evacuation candidate so that
6546 // slots are recorded for them.
6547 for (size_t j = 0; j < arrays.size(); j++) {
6548 array = arrays[j];
6549 for (int i = 0; i < N; i++) {
6550 array->set(i, *ec_obj);
6551 }
6552 }
6553 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01006554 heap::SimulateIncrementalMarking(heap);
Ben Murdochda12d292016-06-02 14:46:10 +01006555 for (size_t j = 0; j < arrays.size(); j++) {
6556 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
6557 }
6558 }
6559 // Force allocation from the free list.
6560 heap->set_force_oom(true);
6561 heap->CollectGarbage(OLD_SPACE);
6562}
6563
Ben Murdochc5610432016-08-08 18:44:38 +01006564TEST(Regress598319) {
6565 // This test ensures that no white objects can cross the progress bar of large
6566 // objects during incremental marking. It checks this by using Shift() during
6567 // incremental marking.
6568 CcTest::InitializeVM();
6569 v8::HandleScope scope(CcTest::isolate());
6570 Heap* heap = CcTest::heap();
6571 Isolate* isolate = heap->isolate();
6572
6573 const int kNumberOfObjects = Page::kMaxRegularHeapObjectSize / kPointerSize;
6574
6575 struct Arr {
6576 Arr(Isolate* isolate, int number_of_objects) {
6577 root = isolate->factory()->NewFixedArray(1, TENURED);
6578 {
6579 // Temporary scope to avoid getting any other objects into the root set.
6580 v8::HandleScope scope(CcTest::isolate());
6581 Handle<FixedArray> tmp =
6582 isolate->factory()->NewFixedArray(number_of_objects);
6583 root->set(0, *tmp);
6584 for (int i = 0; i < get()->length(); i++) {
6585 tmp = isolate->factory()->NewFixedArray(100, TENURED);
6586 get()->set(i, *tmp);
6587 }
6588 }
6589 }
6590
6591 FixedArray* get() { return FixedArray::cast(root->get(0)); }
6592
6593 Handle<FixedArray> root;
6594 } arr(isolate, kNumberOfObjects);
6595
6596 CHECK_EQ(arr.get()->length(), kNumberOfObjects);
6597 CHECK(heap->lo_space()->Contains(arr.get()));
6598 LargePage* page = heap->lo_space()->FindPage(arr.get()->address());
6599 CHECK_NOT_NULL(page);
6600
6601 // GC to cleanup state
6602 heap->CollectGarbage(OLD_SPACE);
6603 MarkCompactCollector* collector = heap->mark_compact_collector();
6604 if (collector->sweeping_in_progress()) {
6605 collector->EnsureSweepingCompleted();
6606 }
6607
6608 CHECK(heap->lo_space()->Contains(arr.get()));
6609 CHECK(Marking::IsWhite(Marking::MarkBitFrom(arr.get())));
6610 for (int i = 0; i < arr.get()->length(); i++) {
6611 CHECK(Marking::IsWhite(
6612 Marking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6613 }
6614
6615 // Start incremental marking.
6616 IncrementalMarking* marking = heap->incremental_marking();
6617 CHECK(marking->IsMarking() || marking->IsStopped());
6618 if (marking->IsStopped()) {
6619 heap->StartIncrementalMarking();
6620 }
6621 CHECK(marking->IsMarking());
6622
6623 // Check that we have not marked the interesting array during root scanning.
6624 for (int i = 0; i < arr.get()->length(); i++) {
6625 CHECK(Marking::IsWhite(
6626 Marking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6627 }
6628
6629 // Now we search for a state where we are in incremental marking and have
6630 // only partially marked the large object.
6631 while (!marking->IsComplete()) {
6632 marking->Step(i::KB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
6633 if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->progress_bar() > 0) {
6634 CHECK_NE(page->progress_bar(), arr.get()->Size());
6635 {
6636 // Shift by 1, effectively moving one white object across the progress
6637 // bar, meaning that we will miss marking it.
6638 v8::HandleScope scope(CcTest::isolate());
6639 Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
6640 Handle<FixedArray>(arr.get()));
6641 js_array->GetElementsAccessor()->Shift(js_array);
6642 }
6643 break;
6644 }
6645 }
6646
6647 // Finish marking with bigger steps to speed up test.
6648 while (!marking->IsComplete()) {
6649 marking->Step(10 * i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
6650 if (marking->IsReadyToOverApproximateWeakClosure()) {
6651 marking->FinalizeIncrementally();
6652 }
6653 }
6654 CHECK(marking->IsComplete());
6655
6656 // All objects need to be black after marking. If a white object crossed the
6657 // progress bar, we would fail here.
6658 for (int i = 0; i < arr.get()->length(); i++) {
6659 CHECK(Marking::IsBlack(
6660 Marking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6661 }
6662}
6663
Ben Murdochda12d292016-06-02 14:46:10 +01006664TEST(Regress609761) {
6665 CcTest::InitializeVM();
6666 v8::HandleScope scope(CcTest::isolate());
6667 Heap* heap = CcTest::heap();
6668 Isolate* isolate = heap->isolate();
6669
6670 intptr_t size_before = heap->SizeOfObjects();
6671 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
6672 array->Shrink(1);
6673 intptr_t size_after = heap->SizeOfObjects();
6674 CHECK_EQ(size_after, size_before + array->Size());
6675}
6676
Ben Murdoch61f157c2016-09-16 13:49:30 +01006677TEST(Regress615489) {
6678 FLAG_black_allocation = true;
6679 CcTest::InitializeVM();
6680 v8::HandleScope scope(CcTest::isolate());
6681 Heap* heap = CcTest::heap();
6682 Isolate* isolate = heap->isolate();
6683 heap->CollectAllGarbage();
6684
6685 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6686 i::IncrementalMarking* marking = heap->incremental_marking();
6687 if (collector->sweeping_in_progress()) {
6688 collector->EnsureSweepingCompleted();
6689 }
6690 CHECK(marking->IsMarking() || marking->IsStopped());
6691 if (marking->IsStopped()) {
6692 heap->StartIncrementalMarking();
6693 }
6694 CHECK(marking->IsMarking());
6695 marking->StartBlackAllocationForTesting();
6696 {
6697 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
6698 v8::HandleScope inner(CcTest::isolate());
6699 isolate->factory()->NewFixedArray(500, TENURED)->Size();
6700 }
6701 while (!marking->IsComplete()) {
6702 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
6703 if (marking->IsReadyToOverApproximateWeakClosure()) {
6704 marking->FinalizeIncrementally();
6705 }
6706 }
6707 CHECK(marking->IsComplete());
6708 intptr_t size_before = heap->SizeOfObjects();
6709 CcTest::heap()->CollectAllGarbage();
6710 intptr_t size_after = heap->SizeOfObjects();
6711 // Live size does not increase after garbage collection.
6712 CHECK_LE(size_after, size_before);
6713}
6714
6715TEST(Regress618958) {
6716 CcTest::InitializeVM();
6717 v8::HandleScope scope(CcTest::isolate());
6718 Heap* heap = CcTest::heap();
6719 bool isolate_is_locked = true;
6720 heap->update_external_memory(100 * MB);
6721 int mark_sweep_count_before = heap->ms_count();
6722 heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
6723 isolate_is_locked);
6724 int mark_sweep_count_after = heap->ms_count();
6725 int mark_sweeps_performed = mark_sweep_count_after - mark_sweep_count_before;
6726 // The memory pressuer handler either performed two GCs or performed one and
6727 // started incremental marking.
6728 CHECK(mark_sweeps_performed == 2 ||
6729 (mark_sweeps_performed == 1 &&
6730 !heap->incremental_marking()->IsStopped()));
6731}
6732
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006733} // namespace internal
6734} // namespace v8