blob: ef4b217a7a6601bd804b3c715742397adda07146 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include <stdlib.h>
29#include <utility>
30
31#include "src/compilation-cache.h"
32#include "src/context-measure.h"
33#include "src/deoptimizer.h"
Ben Murdochc5610432016-08-08 18:44:38 +010034#include "src/elements.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035#include "src/execution.h"
36#include "src/factory.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010037#include "src/field-type.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038#include "src/global-handles.h"
39#include "src/heap/gc-tracer.h"
40#include "src/heap/memory-reducer.h"
41#include "src/ic/ic.h"
42#include "src/macro-assembler.h"
43#include "src/regexp/jsregexp.h"
44#include "src/snapshot/snapshot.h"
45#include "test/cctest/cctest.h"
46#include "test/cctest/heap/heap-tester.h"
47#include "test/cctest/heap/utils-inl.h"
48#include "test/cctest/test-feedback-vector.h"
49
50
51namespace v8 {
52namespace internal {
53
54static void CheckMap(Map* map, int type, int instance_size) {
55 CHECK(map->IsHeapObject());
56#ifdef DEBUG
57 CHECK(CcTest::heap()->Contains(map));
58#endif
59 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
60 CHECK_EQ(type, map->instance_type());
61 CHECK_EQ(instance_size, map->instance_size());
62}
63
64
65TEST(HeapMaps) {
66 CcTest::InitializeVM();
67 Heap* heap = CcTest::heap();
68 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
69 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
70#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
71 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
72 SIMD128_TYPES(SIMD128_TYPE)
73#undef SIMD128_TYPE
74 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
75 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
76}
77
78
79static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
80 CHECK(obj->IsOddball());
81 Handle<Object> handle(obj, isolate);
82 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
83 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
84}
85
86
87static void CheckSmi(Isolate* isolate, int value, const char* string) {
88 Handle<Object> handle(Smi::FromInt(value), isolate);
89 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
90 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
91}
92
93
94static void CheckNumber(Isolate* isolate, double value, const char* string) {
95 Handle<Object> number = isolate->factory()->NewNumber(value);
96 CHECK(number->IsNumber());
97 Handle<Object> print_string =
98 Object::ToString(isolate, number).ToHandleChecked();
99 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
100}
101
102
103static void CheckFindCodeObject(Isolate* isolate) {
104 // Test FindCodeObject
105#define __ assm.
106
107 Assembler assm(isolate, NULL, 0);
108
109 __ nop(); // supported on all architectures
110
111 CodeDesc desc;
112 assm.GetCode(&desc);
113 Handle<Code> code = isolate->factory()->NewCode(
114 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
115 CHECK(code->IsCode());
116
117 HeapObject* obj = HeapObject::cast(*code);
118 Address obj_addr = obj->address();
119
120 for (int i = 0; i < obj->Size(); i += kPointerSize) {
121 Object* found = isolate->FindCodeObject(obj_addr + i);
122 CHECK_EQ(*code, found);
123 }
124
125 Handle<Code> copy = isolate->factory()->NewCode(
126 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
127 HeapObject* obj_copy = HeapObject::cast(*copy);
128 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
129 obj_copy->Size() / 2);
130 CHECK(not_right != *code);
131}
132
133
134TEST(HandleNull) {
135 CcTest::InitializeVM();
136 Isolate* isolate = CcTest::i_isolate();
137 HandleScope outer_scope(isolate);
138 LocalContext context;
139 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
140 CHECK(!n.is_null());
141}
142
143
144TEST(HeapObjects) {
145 CcTest::InitializeVM();
146 Isolate* isolate = CcTest::i_isolate();
147 Factory* factory = isolate->factory();
148 Heap* heap = isolate->heap();
149
150 HandleScope sc(isolate);
151 Handle<Object> value = factory->NewNumber(1.000123);
152 CHECK(value->IsHeapNumber());
153 CHECK(value->IsNumber());
154 CHECK_EQ(1.000123, value->Number());
155
156 value = factory->NewNumber(1.0);
157 CHECK(value->IsSmi());
158 CHECK(value->IsNumber());
159 CHECK_EQ(1.0, value->Number());
160
161 value = factory->NewNumberFromInt(1024);
162 CHECK(value->IsSmi());
163 CHECK(value->IsNumber());
164 CHECK_EQ(1024.0, value->Number());
165
166 value = factory->NewNumberFromInt(Smi::kMinValue);
167 CHECK(value->IsSmi());
168 CHECK(value->IsNumber());
169 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
170
171 value = factory->NewNumberFromInt(Smi::kMaxValue);
172 CHECK(value->IsSmi());
173 CHECK(value->IsNumber());
174 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
175
176#if !defined(V8_TARGET_ARCH_64_BIT)
177 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
178 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
179 CHECK(value->IsHeapNumber());
180 CHECK(value->IsNumber());
181 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
182#endif
183
184 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
185 CHECK(value->IsHeapNumber());
186 CHECK(value->IsNumber());
187 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
188 value->Number());
189
190 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
191 CHECK(value->IsHeapNumber());
192 CHECK(value->IsNumber());
193 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
194 value->Number());
195
196 // nan oddball checks
197 CHECK(factory->nan_value()->IsNumber());
198 CHECK(std::isnan(factory->nan_value()->Number()));
199
200 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
201 CHECK(s->IsString());
202 CHECK_EQ(10, s->length());
203
204 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
205 Handle<JSGlobalObject> global(
206 CcTest::i_isolate()->context()->global_object());
207 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
208
209 // Check ToString for oddballs
210 CheckOddball(isolate, heap->true_value(), "true");
211 CheckOddball(isolate, heap->false_value(), "false");
212 CheckOddball(isolate, heap->null_value(), "null");
213 CheckOddball(isolate, heap->undefined_value(), "undefined");
214
215 // Check ToString for Smis
216 CheckSmi(isolate, 0, "0");
217 CheckSmi(isolate, 42, "42");
218 CheckSmi(isolate, -42, "-42");
219
220 // Check ToString for Numbers
221 CheckNumber(isolate, 1.1, "1.1");
222
223 CheckFindCodeObject(isolate);
224}
225
226
227template <typename T, typename LANE_TYPE, int LANES>
228static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
229 LANE_TYPE other_value) {
230 // Check against lane_values, and check that all lanes can be set to
231 // other_value without disturbing the other lanes.
232 for (int i = 0; i < LANES; i++) {
233 CHECK_EQ(lane_values[i], value->get_lane(i));
234 }
235 for (int i = 0; i < LANES; i++) {
236 value->set_lane(i, other_value); // change the value
237 for (int j = 0; j < LANES; j++) {
238 if (i != j)
239 CHECK_EQ(lane_values[j], value->get_lane(j));
240 else
241 CHECK_EQ(other_value, value->get_lane(j));
242 }
243 value->set_lane(i, lane_values[i]); // restore the lane
244 }
245 CHECK(value->BooleanValue()); // SIMD values are 'true'.
246}
247
248
249TEST(SimdObjects) {
250 CcTest::InitializeVM();
251 Isolate* isolate = CcTest::i_isolate();
252 Factory* factory = isolate->factory();
253
254 HandleScope sc(isolate);
255
256 // Float32x4
257 {
258 float lanes[4] = {1, 2, 3, 4};
259 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
260 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
261
262 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
263 CHECK(value->IsFloat32x4());
264 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
265
266 // Check special lane values.
267 value->set_lane(1, -0.0);
268 CHECK_EQ(-0.0f, value->get_lane(1));
269 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
270 value->set_lane(2, quiet_NaN);
271 CHECK(std::isnan(value->get_lane(2)));
272 value->set_lane(3, signaling_NaN);
273 CHECK(std::isnan(value->get_lane(3)));
274
275#ifdef OBJECT_PRINT
276 // Check value printing.
277 {
278 value = factory->NewFloat32x4(lanes);
279 std::ostringstream os;
280 value->Float32x4Print(os);
281 CHECK_EQ("1, 2, 3, 4", os.str());
282 }
283 {
284 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
285 value = factory->NewFloat32x4(special_lanes);
286 std::ostringstream os;
287 value->Float32x4Print(os);
288 // Value printing doesn't preserve signed zeroes.
289 CHECK_EQ("0, 0, NaN, NaN", os.str());
290 }
291#endif // OBJECT_PRINT
292 }
293 // Int32x4
294 {
295 int32_t lanes[4] = {1, 2, 3, 4};
296
297 Handle<Int32x4> value = factory->NewInt32x4(lanes);
298 CHECK(value->IsInt32x4());
299 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
300
301#ifdef OBJECT_PRINT
302 std::ostringstream os;
303 value->Int32x4Print(os);
304 CHECK_EQ("1, 2, 3, 4", os.str());
305#endif // OBJECT_PRINT
306 }
307 // Uint32x4
308 {
309 uint32_t lanes[4] = {1, 2, 3, 4};
310
311 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
312 CHECK(value->IsUint32x4());
313 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
314
315#ifdef OBJECT_PRINT
316 std::ostringstream os;
317 value->Uint32x4Print(os);
318 CHECK_EQ("1, 2, 3, 4", os.str());
319#endif // OBJECT_PRINT
320 }
321 // Bool32x4
322 {
323 bool lanes[4] = {true, false, true, false};
324
325 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
326 CHECK(value->IsBool32x4());
327 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
328
329#ifdef OBJECT_PRINT
330 std::ostringstream os;
331 value->Bool32x4Print(os);
332 CHECK_EQ("true, false, true, false", os.str());
333#endif // OBJECT_PRINT
334 }
335 // Int16x8
336 {
337 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
338
339 Handle<Int16x8> value = factory->NewInt16x8(lanes);
340 CHECK(value->IsInt16x8());
341 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
342
343#ifdef OBJECT_PRINT
344 std::ostringstream os;
345 value->Int16x8Print(os);
346 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
347#endif // OBJECT_PRINT
348 }
349 // Uint16x8
350 {
351 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
352
353 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
354 CHECK(value->IsUint16x8());
355 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
356
357#ifdef OBJECT_PRINT
358 std::ostringstream os;
359 value->Uint16x8Print(os);
360 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
361#endif // OBJECT_PRINT
362 }
363 // Bool16x8
364 {
365 bool lanes[8] = {true, false, true, false, true, false, true, false};
366
367 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
368 CHECK(value->IsBool16x8());
369 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
370
371#ifdef OBJECT_PRINT
372 std::ostringstream os;
373 value->Bool16x8Print(os);
374 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
375#endif // OBJECT_PRINT
376 }
377 // Int8x16
378 {
379 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
380
381 Handle<Int8x16> value = factory->NewInt8x16(lanes);
382 CHECK(value->IsInt8x16());
383 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
384
385#ifdef OBJECT_PRINT
386 std::ostringstream os;
387 value->Int8x16Print(os);
388 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
389#endif // OBJECT_PRINT
390 }
391 // Uint8x16
392 {
393 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
394
395 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
396 CHECK(value->IsUint8x16());
397 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
398
399#ifdef OBJECT_PRINT
400 std::ostringstream os;
401 value->Uint8x16Print(os);
402 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
403#endif // OBJECT_PRINT
404 }
405 // Bool8x16
406 {
407 bool lanes[16] = {true, false, true, false, true, false, true, false,
408 true, false, true, false, true, false, true, false};
409
410 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
411 CHECK(value->IsBool8x16());
412 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
413
414#ifdef OBJECT_PRINT
415 std::ostringstream os;
416 value->Bool8x16Print(os);
417 CHECK_EQ(
418 "true, false, true, false, true, false, true, false, true, false, "
419 "true, false, true, false, true, false",
420 os.str());
421#endif // OBJECT_PRINT
422 }
423}
424
425
426TEST(Tagging) {
427 CcTest::InitializeVM();
428 int request = 24;
429 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
430 CHECK(Smi::FromInt(42)->IsSmi());
431 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
432 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
433}
434
435
436TEST(GarbageCollection) {
437 CcTest::InitializeVM();
438 Isolate* isolate = CcTest::i_isolate();
439 Heap* heap = isolate->heap();
440 Factory* factory = isolate->factory();
441
442 HandleScope sc(isolate);
443 // Check GC.
444 heap->CollectGarbage(NEW_SPACE);
445
446 Handle<JSGlobalObject> global(
447 CcTest::i_isolate()->context()->global_object());
448 Handle<String> name = factory->InternalizeUtf8String("theFunction");
449 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
450 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
451 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
452 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
453 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
454
455 {
456 HandleScope inner_scope(isolate);
457 // Allocate a function and keep it in global object's property.
458 Handle<JSFunction> function = factory->NewFunction(name);
459 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
460 // Allocate an object. Unrooted after leaving the scope.
461 Handle<JSObject> obj = factory->NewJSObject(function);
462 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
463 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
464
465 CHECK_EQ(Smi::FromInt(23),
466 *Object::GetProperty(obj, prop_name).ToHandleChecked());
467 CHECK_EQ(Smi::FromInt(24),
468 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
469 }
470
471 heap->CollectGarbage(NEW_SPACE);
472
473 // Function should be alive.
474 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
475 // Check function is retained.
476 Handle<Object> func_value =
477 Object::GetProperty(global, name).ToHandleChecked();
478 CHECK(func_value->IsJSFunction());
479 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
480
481 {
482 HandleScope inner_scope(isolate);
483 // Allocate another object, make it reachable from global.
484 Handle<JSObject> obj = factory->NewJSObject(function);
485 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
486 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
487 }
488
489 // After gc, it should survive.
490 heap->CollectGarbage(NEW_SPACE);
491
492 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
493 Handle<Object> obj =
494 Object::GetProperty(global, obj_name).ToHandleChecked();
495 CHECK(obj->IsJSObject());
496 CHECK_EQ(Smi::FromInt(23),
497 *Object::GetProperty(obj, prop_name).ToHandleChecked());
498}
499
500
501static void VerifyStringAllocation(Isolate* isolate, const char* string) {
502 HandleScope scope(isolate);
503 Handle<String> s = isolate->factory()->NewStringFromUtf8(
504 CStrVector(string)).ToHandleChecked();
505 CHECK_EQ(StrLength(string), s->length());
506 for (int index = 0; index < s->length(); index++) {
507 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
508 }
509}
510
511
512TEST(String) {
513 CcTest::InitializeVM();
514 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
515
516 VerifyStringAllocation(isolate, "a");
517 VerifyStringAllocation(isolate, "ab");
518 VerifyStringAllocation(isolate, "abc");
519 VerifyStringAllocation(isolate, "abcd");
520 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
521}
522
523
524TEST(LocalHandles) {
525 CcTest::InitializeVM();
526 Isolate* isolate = CcTest::i_isolate();
527 Factory* factory = isolate->factory();
528
529 v8::HandleScope scope(CcTest::isolate());
530 const char* name = "Kasper the spunky";
531 Handle<String> string = factory->NewStringFromAsciiChecked(name);
532 CHECK_EQ(StrLength(name), string->length());
533}
534
535
536TEST(GlobalHandles) {
537 CcTest::InitializeVM();
538 Isolate* isolate = CcTest::i_isolate();
539 Heap* heap = isolate->heap();
540 Factory* factory = isolate->factory();
541 GlobalHandles* global_handles = isolate->global_handles();
542
543 Handle<Object> h1;
544 Handle<Object> h2;
545 Handle<Object> h3;
546 Handle<Object> h4;
547
548 {
549 HandleScope scope(isolate);
550
551 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
552 Handle<Object> u = factory->NewNumber(1.12344);
553
554 h1 = global_handles->Create(*i);
555 h2 = global_handles->Create(*u);
556 h3 = global_handles->Create(*i);
557 h4 = global_handles->Create(*u);
558 }
559
560 // after gc, it should survive
561 heap->CollectGarbage(NEW_SPACE);
562
563 CHECK((*h1)->IsString());
564 CHECK((*h2)->IsHeapNumber());
565 CHECK((*h3)->IsString());
566 CHECK((*h4)->IsHeapNumber());
567
568 CHECK_EQ(*h3, *h1);
569 GlobalHandles::Destroy(h1.location());
570 GlobalHandles::Destroy(h3.location());
571
572 CHECK_EQ(*h4, *h2);
573 GlobalHandles::Destroy(h2.location());
574 GlobalHandles::Destroy(h4.location());
575}
576
577
578static bool WeakPointerCleared = false;
579
580static void TestWeakGlobalHandleCallback(
Ben Murdochc5610432016-08-08 18:44:38 +0100581 const v8::WeakCallbackInfo<void>& data) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000582 std::pair<v8::Persistent<v8::Value>*, int>* p =
583 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
584 data.GetParameter());
585 if (p->second == 1234) WeakPointerCleared = true;
586 p->first->Reset();
587}
588
589
590TEST(WeakGlobalHandlesScavenge) {
591 i::FLAG_stress_compaction = false;
592 CcTest::InitializeVM();
593 Isolate* isolate = CcTest::i_isolate();
594 Heap* heap = isolate->heap();
595 Factory* factory = isolate->factory();
596 GlobalHandles* global_handles = isolate->global_handles();
597
598 WeakPointerCleared = false;
599
600 Handle<Object> h1;
601 Handle<Object> h2;
602
603 {
604 HandleScope scope(isolate);
605
606 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
607 Handle<Object> u = factory->NewNumber(1.12344);
608
609 h1 = global_handles->Create(*i);
610 h2 = global_handles->Create(*u);
611 }
612
613 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
Ben Murdochc5610432016-08-08 18:44:38 +0100614 GlobalHandles::MakeWeak(
615 h2.location(), reinterpret_cast<void*>(&handle_and_id),
616 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000617
618 // Scavenge treats weak pointers as normal roots.
619 heap->CollectGarbage(NEW_SPACE);
620
621 CHECK((*h1)->IsString());
622 CHECK((*h2)->IsHeapNumber());
623
624 CHECK(!WeakPointerCleared);
625 CHECK(!global_handles->IsNearDeath(h2.location()));
626 CHECK(!global_handles->IsNearDeath(h1.location()));
627
628 GlobalHandles::Destroy(h1.location());
629 GlobalHandles::Destroy(h2.location());
630}
631
632
633TEST(WeakGlobalHandlesMark) {
634 CcTest::InitializeVM();
635 Isolate* isolate = CcTest::i_isolate();
636 Heap* heap = isolate->heap();
637 Factory* factory = isolate->factory();
638 GlobalHandles* global_handles = isolate->global_handles();
639
640 WeakPointerCleared = false;
641
642 Handle<Object> h1;
643 Handle<Object> h2;
644
645 {
646 HandleScope scope(isolate);
647
648 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
649 Handle<Object> u = factory->NewNumber(1.12344);
650
651 h1 = global_handles->Create(*i);
652 h2 = global_handles->Create(*u);
653 }
654
655 // Make sure the objects are promoted.
656 heap->CollectGarbage(OLD_SPACE);
657 heap->CollectGarbage(NEW_SPACE);
658 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
659
660 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
Ben Murdochc5610432016-08-08 18:44:38 +0100661 GlobalHandles::MakeWeak(
662 h2.location(), reinterpret_cast<void*>(&handle_and_id),
663 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
665 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
666
667 // Incremental marking potentially marked handles before they turned weak.
668 heap->CollectAllGarbage();
669
670 CHECK((*h1)->IsString());
671
672 CHECK(WeakPointerCleared);
673 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
674
675 GlobalHandles::Destroy(h1.location());
676}
677
678
679TEST(DeleteWeakGlobalHandle) {
680 i::FLAG_stress_compaction = false;
681 CcTest::InitializeVM();
682 Isolate* isolate = CcTest::i_isolate();
683 Heap* heap = isolate->heap();
684 Factory* factory = isolate->factory();
685 GlobalHandles* global_handles = isolate->global_handles();
686
687 WeakPointerCleared = false;
688
689 Handle<Object> h;
690
691 {
692 HandleScope scope(isolate);
693
694 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
695 h = global_handles->Create(*i);
696 }
697
698 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
Ben Murdochc5610432016-08-08 18:44:38 +0100699 GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
700 &TestWeakGlobalHandleCallback,
701 v8::WeakCallbackType::kParameter);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702
703 // Scanvenge does not recognize weak reference.
704 heap->CollectGarbage(NEW_SPACE);
705
706 CHECK(!WeakPointerCleared);
707
708 // Mark-compact treats weak reference properly.
709 heap->CollectGarbage(OLD_SPACE);
710
711 CHECK(WeakPointerCleared);
712}
713
714
715TEST(BytecodeArray) {
716 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
717 static const int kRawBytesSize = sizeof(kRawBytes);
718 static const int kFrameSize = 32;
719 static const int kParameterCount = 2;
720
721 i::FLAG_manual_evacuation_candidates_selection = true;
722 CcTest::InitializeVM();
723 Isolate* isolate = CcTest::i_isolate();
724 Heap* heap = isolate->heap();
725 Factory* factory = isolate->factory();
726 HandleScope scope(isolate);
727
728 SimulateFullSpace(heap->old_space());
729 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
730 for (int i = 0; i < 5; i++) {
731 Handle<Object> number = factory->NewHeapNumber(i);
732 constant_pool->set(i, *number);
733 }
734
735 // Allocate and initialize BytecodeArray
736 Handle<BytecodeArray> array = factory->NewBytecodeArray(
737 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
738
739 CHECK(array->IsBytecodeArray());
740 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
741 CHECK_EQ(array->frame_size(), kFrameSize);
742 CHECK_EQ(array->parameter_count(), kParameterCount);
743 CHECK_EQ(array->constant_pool(), *constant_pool);
744 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
745 CHECK_GE(array->address() + array->BytecodeArraySize(),
746 array->GetFirstBytecodeAddress() + array->length());
747 for (int i = 0; i < kRawBytesSize; i++) {
748 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
749 CHECK_EQ(array->get(i), kRawBytes[i]);
750 }
751
752 FixedArray* old_constant_pool_address = *constant_pool;
753
754 // Perform a full garbage collection and force the constant pool to be on an
755 // evacuation candidate.
756 Page* evac_page = Page::FromAddress(constant_pool->address());
757 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
758 heap->CollectAllGarbage();
759
760 // BytecodeArray should survive.
761 CHECK_EQ(array->length(), kRawBytesSize);
762 CHECK_EQ(array->frame_size(), kFrameSize);
763 for (int i = 0; i < kRawBytesSize; i++) {
764 CHECK_EQ(array->get(i), kRawBytes[i]);
765 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
766 }
767
768 // Constant pool should have been migrated.
769 CHECK_EQ(array->constant_pool(), *constant_pool);
770 CHECK_NE(array->constant_pool(), old_constant_pool_address);
771}
772
773
774static const char* not_so_random_string_table[] = {
775 "abstract",
776 "boolean",
777 "break",
778 "byte",
779 "case",
780 "catch",
781 "char",
782 "class",
783 "const",
784 "continue",
785 "debugger",
786 "default",
787 "delete",
788 "do",
789 "double",
790 "else",
791 "enum",
792 "export",
793 "extends",
794 "false",
795 "final",
796 "finally",
797 "float",
798 "for",
799 "function",
800 "goto",
801 "if",
802 "implements",
803 "import",
804 "in",
805 "instanceof",
806 "int",
807 "interface",
808 "long",
809 "native",
810 "new",
811 "null",
812 "package",
813 "private",
814 "protected",
815 "public",
816 "return",
817 "short",
818 "static",
819 "super",
820 "switch",
821 "synchronized",
822 "this",
823 "throw",
824 "throws",
825 "transient",
826 "true",
827 "try",
828 "typeof",
829 "var",
830 "void",
831 "volatile",
832 "while",
833 "with",
834 0
835};
836
837
838static void CheckInternalizedStrings(const char** strings) {
839 Isolate* isolate = CcTest::i_isolate();
840 Factory* factory = isolate->factory();
841 for (const char* string = *strings; *strings != 0; string = *strings++) {
842 HandleScope scope(isolate);
843 Handle<String> a =
844 isolate->factory()->InternalizeUtf8String(CStrVector(string));
845 // InternalizeUtf8String may return a failure if a GC is needed.
846 CHECK(a->IsInternalizedString());
847 Handle<String> b = factory->InternalizeUtf8String(string);
848 CHECK_EQ(*b, *a);
849 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
850 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
851 CHECK_EQ(*b, *a);
852 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
853 }
854}
855
856
857TEST(StringTable) {
858 CcTest::InitializeVM();
859
860 v8::HandleScope sc(CcTest::isolate());
861 CheckInternalizedStrings(not_so_random_string_table);
862 CheckInternalizedStrings(not_so_random_string_table);
863}
864
865
866TEST(FunctionAllocation) {
867 CcTest::InitializeVM();
868 Isolate* isolate = CcTest::i_isolate();
869 Factory* factory = isolate->factory();
870
871 v8::HandleScope sc(CcTest::isolate());
872 Handle<String> name = factory->InternalizeUtf8String("theFunction");
873 Handle<JSFunction> function = factory->NewFunction(name);
874
875 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
876 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
877
878 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
879 Handle<JSObject> obj = factory->NewJSObject(function);
880 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
881 CHECK_EQ(Smi::FromInt(23),
882 *Object::GetProperty(obj, prop_name).ToHandleChecked());
883 // Check that we can add properties to function objects.
884 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
885 CHECK_EQ(Smi::FromInt(24),
886 *Object::GetProperty(function, prop_name).ToHandleChecked());
887}
888
889
890TEST(ObjectProperties) {
891 CcTest::InitializeVM();
892 Isolate* isolate = CcTest::i_isolate();
893 Factory* factory = isolate->factory();
894
895 v8::HandleScope sc(CcTest::isolate());
896 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
897 Handle<Object> object = Object::GetProperty(
898 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
899 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
900 Handle<JSObject> obj = factory->NewJSObject(constructor);
901 Handle<String> first = factory->InternalizeUtf8String("first");
902 Handle<String> second = factory->InternalizeUtf8String("second");
903
904 Handle<Smi> one(Smi::FromInt(1), isolate);
905 Handle<Smi> two(Smi::FromInt(2), isolate);
906
907 // check for empty
908 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
909
910 // add first
911 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
912 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
913
914 // delete first
915 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
916 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
917
918 // add first and then second
919 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
920 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
921 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
922 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
923
924 // delete first and then second
925 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
926 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
927 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
928 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
929 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
930
931 // add first and then second
932 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
933 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
934 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
935 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
936
937 // delete second and then first
938 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
939 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
940 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
941 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
942 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
943
944 // check string and internalized string match
945 const char* string1 = "fisk";
946 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
947 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
948 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
949 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
950
951 // check internalized string and string match
952 const char* string2 = "fugl";
953 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
954 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
955 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
956 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
957}
958
959
960TEST(JSObjectMaps) {
961 CcTest::InitializeVM();
962 Isolate* isolate = CcTest::i_isolate();
963 Factory* factory = isolate->factory();
964
965 v8::HandleScope sc(CcTest::isolate());
966 Handle<String> name = factory->InternalizeUtf8String("theFunction");
967 Handle<JSFunction> function = factory->NewFunction(name);
968
969 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
970 Handle<JSObject> obj = factory->NewJSObject(function);
971 Handle<Map> initial_map(function->initial_map());
972
973 // Set a propery
974 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
975 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
976 CHECK_EQ(Smi::FromInt(23),
977 *Object::GetProperty(obj, prop_name).ToHandleChecked());
978
979 // Check the map has changed
980 CHECK(*initial_map != obj->map());
981}
982
983
984TEST(JSArray) {
985 CcTest::InitializeVM();
986 Isolate* isolate = CcTest::i_isolate();
987 Factory* factory = isolate->factory();
988
989 v8::HandleScope sc(CcTest::isolate());
990 Handle<String> name = factory->InternalizeUtf8String("Array");
991 Handle<Object> fun_obj = Object::GetProperty(
992 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
993 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
994
995 // Allocate the object.
996 Handle<Object> element;
997 Handle<JSObject> object = factory->NewJSObject(function);
998 Handle<JSArray> array = Handle<JSArray>::cast(object);
999 // We just initialized the VM, no heap allocation failure yet.
1000 JSArray::Initialize(array, 0);
1001
1002 // Set array length to 0.
1003 JSArray::SetLength(array, 0);
1004 CHECK_EQ(Smi::FromInt(0), array->length());
1005 // Must be in fast mode.
1006 CHECK(array->HasFastSmiOrObjectElements());
1007
1008 // array[length] = name.
1009 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
1010 CHECK_EQ(Smi::FromInt(1), array->length());
1011 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1012 CHECK_EQ(*element, *name);
1013
1014 // Set array length with larger than smi value.
1015 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1016
1017 uint32_t int_length = 0;
1018 CHECK(array->length()->ToArrayIndex(&int_length));
1019 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1020 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1021
1022 // array[length] = name.
1023 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1024 uint32_t new_int_length = 0;
1025 CHECK(array->length()->ToArrayIndex(&new_int_length));
1026 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1027 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1028 CHECK_EQ(*element, *name);
1029 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1030 CHECK_EQ(*element, *name);
1031}
1032
1033
1034TEST(JSObjectCopy) {
1035 CcTest::InitializeVM();
1036 Isolate* isolate = CcTest::i_isolate();
1037 Factory* factory = isolate->factory();
1038
1039 v8::HandleScope sc(CcTest::isolate());
1040 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1041 Handle<Object> object = Object::GetProperty(
1042 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1043 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1044 Handle<JSObject> obj = factory->NewJSObject(constructor);
1045 Handle<String> first = factory->InternalizeUtf8String("first");
1046 Handle<String> second = factory->InternalizeUtf8String("second");
1047
1048 Handle<Smi> one(Smi::FromInt(1), isolate);
1049 Handle<Smi> two(Smi::FromInt(2), isolate);
1050
1051 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1052 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1053
1054 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1055 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1056
1057 // Make the clone.
1058 Handle<Object> value1, value2;
1059 Handle<JSObject> clone = factory->CopyJSObject(obj);
1060 CHECK(!clone.is_identical_to(obj));
1061
1062 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1063 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1064 CHECK_EQ(*value1, *value2);
1065 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1066 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1067 CHECK_EQ(*value1, *value2);
1068
1069 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1070 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1071 CHECK_EQ(*value1, *value2);
1072 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1073 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1074 CHECK_EQ(*value1, *value2);
1075
1076 // Flip the values.
1077 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1078 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1079
1080 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1081 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1082
1083 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1084 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1085 CHECK_EQ(*value1, *value2);
1086 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1087 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1088 CHECK_EQ(*value1, *value2);
1089
1090 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1091 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1092 CHECK_EQ(*value1, *value2);
1093 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1094 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1095 CHECK_EQ(*value1, *value2);
1096}
1097
1098
1099TEST(StringAllocation) {
1100 CcTest::InitializeVM();
1101 Isolate* isolate = CcTest::i_isolate();
1102 Factory* factory = isolate->factory();
1103
1104 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1105 for (int length = 0; length < 100; length++) {
1106 v8::HandleScope scope(CcTest::isolate());
1107 char* non_one_byte = NewArray<char>(3 * length + 1);
1108 char* one_byte = NewArray<char>(length + 1);
1109 non_one_byte[3 * length] = 0;
1110 one_byte[length] = 0;
1111 for (int i = 0; i < length; i++) {
1112 one_byte[i] = 'a';
1113 non_one_byte[3 * i] = chars[0];
1114 non_one_byte[3 * i + 1] = chars[1];
1115 non_one_byte[3 * i + 2] = chars[2];
1116 }
1117 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1118 Vector<const char>(non_one_byte, 3 * length));
1119 CHECK_EQ(length, non_one_byte_sym->length());
1120 Handle<String> one_byte_sym =
1121 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1122 CHECK_EQ(length, one_byte_sym->length());
1123 Handle<String> non_one_byte_str =
1124 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1125 .ToHandleChecked();
1126 non_one_byte_str->Hash();
1127 CHECK_EQ(length, non_one_byte_str->length());
1128 Handle<String> one_byte_str =
1129 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1130 .ToHandleChecked();
1131 one_byte_str->Hash();
1132 CHECK_EQ(length, one_byte_str->length());
1133 DeleteArray(non_one_byte);
1134 DeleteArray(one_byte);
1135 }
1136}
1137
1138
1139static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1140 // Count the number of objects found in the heap.
1141 int found_count = 0;
1142 HeapIterator iterator(heap);
1143 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1144 for (int i = 0; i < size; i++) {
1145 if (*objs[i] == obj) {
1146 found_count++;
1147 }
1148 }
1149 }
1150 return found_count;
1151}
1152
1153
1154TEST(Iteration) {
1155 CcTest::InitializeVM();
1156 Isolate* isolate = CcTest::i_isolate();
1157 Factory* factory = isolate->factory();
1158 v8::HandleScope scope(CcTest::isolate());
1159
1160 // Array of objects to scan haep for.
1161 const int objs_count = 6;
1162 Handle<Object> objs[objs_count];
1163 int next_objs_index = 0;
1164
1165 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1166 objs[next_objs_index++] = factory->NewJSArray(10);
1167 objs[next_objs_index++] =
Ben Murdochda12d292016-06-02 14:46:10 +01001168 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, TENURED);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001169
1170 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1171 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1172 objs[next_objs_index++] =
1173 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1174
1175 // Allocate a large string (for large object space).
1176 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1177 char* str = new char[large_size];
1178 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1179 str[large_size - 1] = '\0';
1180 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1181 delete[] str;
1182
1183 // Add a Map object to look for.
1184 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1185
1186 CHECK_EQ(objs_count, next_objs_index);
1187 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1188}
1189
1190
1191UNINITIALIZED_TEST(TestCodeFlushing) {
1192 // If we do not flush code this test is invalid.
1193 if (!FLAG_flush_code) return;
1194 i::FLAG_allow_natives_syntax = true;
1195 i::FLAG_optimize_for_size = false;
1196 v8::Isolate::CreateParams create_params;
1197 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1198 v8::Isolate* isolate = v8::Isolate::New(create_params);
1199 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1200 isolate->Enter();
1201 Factory* factory = i_isolate->factory();
1202 {
1203 v8::HandleScope scope(isolate);
1204 v8::Context::New(isolate)->Enter();
1205 const char* source =
1206 "function foo() {"
1207 " var x = 42;"
1208 " var y = 42;"
1209 " var z = x + y;"
1210 "};"
1211 "foo()";
1212 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1213
1214 // This compile will add the code to the compilation cache.
1215 {
1216 v8::HandleScope scope(isolate);
1217 CompileRun(source);
1218 }
1219
1220 // Check function is compiled.
1221 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1222 foo_name).ToHandleChecked();
1223 CHECK(func_value->IsJSFunction());
1224 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1225 CHECK(function->shared()->is_compiled());
1226
1227 // The code will survive at least two GCs.
1228 i_isolate->heap()->CollectAllGarbage();
1229 i_isolate->heap()->CollectAllGarbage();
1230 CHECK(function->shared()->is_compiled());
1231
1232 // Simulate several GCs that use full marking.
1233 const int kAgingThreshold = 6;
1234 for (int i = 0; i < kAgingThreshold; i++) {
1235 i_isolate->heap()->CollectAllGarbage();
1236 }
1237
1238 // foo should no longer be in the compilation cache
1239 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1240 CHECK(!function->is_compiled() || function->IsOptimized());
1241 // Call foo to get it recompiled.
1242 CompileRun("foo()");
1243 CHECK(function->shared()->is_compiled());
1244 CHECK(function->is_compiled());
1245 }
1246 isolate->Exit();
1247 isolate->Dispose();
1248}
1249
1250
1251TEST(TestCodeFlushingPreAged) {
1252 // If we do not flush code this test is invalid.
1253 if (!FLAG_flush_code) return;
1254 i::FLAG_allow_natives_syntax = true;
1255 i::FLAG_optimize_for_size = true;
1256 CcTest::InitializeVM();
1257 Isolate* isolate = CcTest::i_isolate();
1258 Factory* factory = isolate->factory();
1259 v8::HandleScope scope(CcTest::isolate());
1260 const char* source = "function foo() {"
1261 " var x = 42;"
1262 " var y = 42;"
1263 " var z = x + y;"
1264 "};"
1265 "foo()";
1266 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1267
1268 // Compile foo, but don't run it.
1269 { v8::HandleScope scope(CcTest::isolate());
1270 CompileRun(source);
1271 }
1272
1273 // Check function is compiled.
1274 Handle<Object> func_value =
1275 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1276 CHECK(func_value->IsJSFunction());
1277 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1278 CHECK(function->shared()->is_compiled());
1279
1280 // The code has been run so will survive at least one GC.
1281 CcTest::heap()->CollectAllGarbage();
1282 CHECK(function->shared()->is_compiled());
1283
1284 // The code was only run once, so it should be pre-aged and collected on the
1285 // next GC.
1286 CcTest::heap()->CollectAllGarbage();
1287 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1288
1289 // Execute the function again twice, and ensure it is reset to the young age.
1290 { v8::HandleScope scope(CcTest::isolate());
1291 CompileRun("foo();"
1292 "foo();");
1293 }
1294
1295 // The code will survive at least two GC now that it is young again.
1296 CcTest::heap()->CollectAllGarbage();
1297 CcTest::heap()->CollectAllGarbage();
1298 CHECK(function->shared()->is_compiled());
1299
1300 // Simulate several GCs that use full marking.
1301 const int kAgingThreshold = 6;
1302 for (int i = 0; i < kAgingThreshold; i++) {
1303 CcTest::heap()->CollectAllGarbage();
1304 }
1305
1306 // foo should no longer be in the compilation cache
1307 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1308 CHECK(!function->is_compiled() || function->IsOptimized());
1309 // Call foo to get it recompiled.
1310 CompileRun("foo()");
1311 CHECK(function->shared()->is_compiled());
1312 CHECK(function->is_compiled());
1313}
1314
1315
1316TEST(TestCodeFlushingIncremental) {
1317 // If we do not flush code this test is invalid.
1318 if (!FLAG_flush_code) return;
1319 i::FLAG_allow_natives_syntax = true;
1320 i::FLAG_optimize_for_size = false;
1321 CcTest::InitializeVM();
1322 Isolate* isolate = CcTest::i_isolate();
1323 Factory* factory = isolate->factory();
1324 v8::HandleScope scope(CcTest::isolate());
1325 const char* source = "function foo() {"
1326 " var x = 42;"
1327 " var y = 42;"
1328 " var z = x + y;"
1329 "};"
1330 "foo()";
1331 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1332
1333 // This compile will add the code to the compilation cache.
1334 { v8::HandleScope scope(CcTest::isolate());
1335 CompileRun(source);
1336 }
1337
1338 // Check function is compiled.
1339 Handle<Object> func_value =
1340 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1341 CHECK(func_value->IsJSFunction());
1342 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1343 CHECK(function->shared()->is_compiled());
1344
1345 // The code will survive at least two GCs.
1346 CcTest::heap()->CollectAllGarbage();
1347 CcTest::heap()->CollectAllGarbage();
1348 CHECK(function->shared()->is_compiled());
1349
1350 // Simulate several GCs that use incremental marking.
1351 const int kAgingThreshold = 6;
1352 for (int i = 0; i < kAgingThreshold; i++) {
1353 SimulateIncrementalMarking(CcTest::heap());
1354 CcTest::heap()->CollectAllGarbage();
1355 }
1356 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1357 CHECK(!function->is_compiled() || function->IsOptimized());
1358
1359 // This compile will compile the function again.
1360 { v8::HandleScope scope(CcTest::isolate());
1361 CompileRun("foo();");
1362 }
1363
1364 // Simulate several GCs that use incremental marking but make sure
1365 // the loop breaks once the function is enqueued as a candidate.
1366 for (int i = 0; i < kAgingThreshold; i++) {
1367 SimulateIncrementalMarking(CcTest::heap());
1368 if (!function->next_function_link()->IsUndefined()) break;
1369 CcTest::heap()->CollectAllGarbage();
1370 }
1371
1372 // Force optimization while incremental marking is active and while
1373 // the function is enqueued as a candidate.
1374 { v8::HandleScope scope(CcTest::isolate());
1375 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1376 }
1377
1378 // Simulate one final GC to make sure the candidate queue is sane.
1379 CcTest::heap()->CollectAllGarbage();
1380 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1381 CHECK(function->is_compiled() || !function->IsOptimized());
1382}
1383
1384
1385TEST(TestCodeFlushingIncrementalScavenge) {
1386 // If we do not flush code this test is invalid.
1387 if (!FLAG_flush_code) return;
1388 i::FLAG_allow_natives_syntax = true;
1389 i::FLAG_optimize_for_size = false;
1390 CcTest::InitializeVM();
1391 Isolate* isolate = CcTest::i_isolate();
1392 Factory* factory = isolate->factory();
1393 v8::HandleScope scope(CcTest::isolate());
1394 const char* source = "var foo = function() {"
1395 " var x = 42;"
1396 " var y = 42;"
1397 " var z = x + y;"
1398 "};"
1399 "foo();"
1400 "var bar = function() {"
1401 " var x = 23;"
1402 "};"
1403 "bar();";
1404 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1405 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1406
1407 // Perfrom one initial GC to enable code flushing.
1408 CcTest::heap()->CollectAllGarbage();
1409
1410 // This compile will add the code to the compilation cache.
1411 { v8::HandleScope scope(CcTest::isolate());
1412 CompileRun(source);
1413 }
1414
1415 // Check functions are compiled.
1416 Handle<Object> func_value =
1417 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1418 CHECK(func_value->IsJSFunction());
1419 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1420 CHECK(function->shared()->is_compiled());
1421 Handle<Object> func_value2 =
1422 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1423 CHECK(func_value2->IsJSFunction());
1424 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1425 CHECK(function2->shared()->is_compiled());
1426
1427 // Clear references to functions so that one of them can die.
1428 { v8::HandleScope scope(CcTest::isolate());
1429 CompileRun("foo = 0; bar = 0;");
1430 }
1431
1432 // Bump the code age so that flushing is triggered while the function
1433 // object is still located in new-space.
1434 const int kAgingThreshold = 6;
1435 for (int i = 0; i < kAgingThreshold; i++) {
1436 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1437 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1438 }
1439
1440 // Simulate incremental marking so that the functions are enqueued as
1441 // code flushing candidates. Then kill one of the functions. Finally
1442 // perform a scavenge while incremental marking is still running.
1443 SimulateIncrementalMarking(CcTest::heap());
1444 *function2.location() = NULL;
1445 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1446
1447 // Simulate one final GC to make sure the candidate queue is sane.
1448 CcTest::heap()->CollectAllGarbage();
1449 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1450 CHECK(!function->is_compiled() || function->IsOptimized());
1451}
1452
1453
1454TEST(TestCodeFlushingIncrementalAbort) {
1455 // If we do not flush code this test is invalid.
1456 if (!FLAG_flush_code) return;
1457 i::FLAG_allow_natives_syntax = true;
1458 i::FLAG_optimize_for_size = false;
1459 CcTest::InitializeVM();
1460 Isolate* isolate = CcTest::i_isolate();
1461 Factory* factory = isolate->factory();
1462 Heap* heap = isolate->heap();
1463 v8::HandleScope scope(CcTest::isolate());
1464 const char* source = "function foo() {"
1465 " var x = 42;"
1466 " var y = 42;"
1467 " var z = x + y;"
1468 "};"
1469 "foo()";
1470 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1471
1472 // This compile will add the code to the compilation cache.
1473 { v8::HandleScope scope(CcTest::isolate());
1474 CompileRun(source);
1475 }
1476
1477 // Check function is compiled.
1478 Handle<Object> func_value =
1479 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1480 CHECK(func_value->IsJSFunction());
1481 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1482 CHECK(function->shared()->is_compiled());
1483
1484 // The code will survive at least two GCs.
1485 heap->CollectAllGarbage();
1486 heap->CollectAllGarbage();
1487 CHECK(function->shared()->is_compiled());
1488
1489 // Bump the code age so that flushing is triggered.
1490 const int kAgingThreshold = 6;
1491 for (int i = 0; i < kAgingThreshold; i++) {
1492 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1493 }
1494
1495 // Simulate incremental marking so that the function is enqueued as
1496 // code flushing candidate.
1497 SimulateIncrementalMarking(heap);
1498
1499 // Enable the debugger and add a breakpoint while incremental marking
1500 // is running so that incremental marking aborts and code flushing is
1501 // disabled.
1502 int position = 0;
1503 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1504 EnableDebugger(CcTest::isolate());
1505 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1506 isolate->debug()->ClearAllBreakPoints();
1507 DisableDebugger(CcTest::isolate());
1508
1509 // Force optimization now that code flushing is disabled.
1510 { v8::HandleScope scope(CcTest::isolate());
1511 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1512 }
1513
1514 // Simulate one final GC to make sure the candidate queue is sane.
1515 heap->CollectAllGarbage();
1516 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1517 CHECK(function->is_compiled() || !function->IsOptimized());
1518}
1519
Ben Murdoch097c5b22016-05-18 11:27:45 +01001520TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1521 // Turn off always_opt because it interferes with running the built-in for
1522 // the last call to g().
1523 i::FLAG_always_opt = false;
1524 i::FLAG_allow_natives_syntax = true;
1525 CcTest::InitializeVM();
1526 Isolate* isolate = CcTest::i_isolate();
1527 Factory* factory = isolate->factory();
1528 Heap* heap = isolate->heap();
1529 v8::HandleScope scope(CcTest::isolate());
1530
1531 CompileRun(
1532 "function make_closure(x) {"
1533 " return function() { return x + 3 };"
1534 "}"
1535 "var f = make_closure(5); f();"
1536 "var g = make_closure(5);");
1537
1538 // Check f is compiled.
1539 Handle<String> f_name = factory->InternalizeUtf8String("f");
1540 Handle<Object> f_value =
1541 Object::GetProperty(isolate->global_object(), f_name).ToHandleChecked();
1542 Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1543 CHECK(f_function->is_compiled());
1544
1545 // Check g is not compiled.
1546 Handle<String> g_name = factory->InternalizeUtf8String("g");
1547 Handle<Object> g_value =
1548 Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked();
1549 Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
Ben Murdochc5610432016-08-08 18:44:38 +01001550 CHECK(!g_function->is_compiled());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001551
1552 SimulateIncrementalMarking(heap);
1553 CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1554
1555 // g should now have available an optimized function, unmarked by gc. The
1556 // CompileLazy built-in will discover it and install it in the closure, and
1557 // the incremental write barrier should be used.
1558 CompileRun("g();");
1559 CHECK(g_function->is_compiled());
1560}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001561
1562TEST(CompilationCacheCachingBehavior) {
1563 // If we do not flush code, or have the compilation cache turned off, this
1564 // test is invalid.
1565 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1566 return;
1567 }
1568 CcTest::InitializeVM();
1569 Isolate* isolate = CcTest::i_isolate();
1570 Factory* factory = isolate->factory();
1571 Heap* heap = isolate->heap();
1572 CompilationCache* compilation_cache = isolate->compilation_cache();
Ben Murdochda12d292016-06-02 14:46:10 +01001573 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001574
1575 v8::HandleScope scope(CcTest::isolate());
1576 const char* raw_source =
1577 "function foo() {"
1578 " var x = 42;"
1579 " var y = 42;"
1580 " var z = x + y;"
1581 "};"
1582 "foo()";
1583 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1584 Handle<Context> native_context = isolate->native_context();
1585
1586 {
1587 v8::HandleScope scope(CcTest::isolate());
1588 CompileRun(raw_source);
1589 }
1590
Ben Murdochc5610432016-08-08 18:44:38 +01001591 // The script should be in the cache now.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1593 source, Handle<Object>(), 0, 0,
1594 v8::ScriptOriginOptions(false, true, false), native_context,
1595 language_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001596 CHECK(!info.is_null());
1597
1598 // Check that the code cache entry survives at least on GC.
1599 // (Unless --optimize-for-size, in which case it might get collected
1600 // immediately.)
1601 if (!FLAG_optimize_for_size) {
1602 heap->CollectAllGarbage();
1603 info = compilation_cache->LookupScript(
1604 source, Handle<Object>(), 0, 0,
1605 v8::ScriptOriginOptions(false, true, false), native_context,
1606 language_mode);
1607 CHECK(!info.is_null());
1608 }
1609
1610 // Progress code age until it's old and ready for GC.
1611 while (!info.ToHandleChecked()->code()->IsOld()) {
1612 // To guarantee progress, we have to MakeOlder with different parities.
1613 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
1614 // always NO_MARKING_PARITY and the code age only progresses if the parity
1615 // is different.
1616 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
1617 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
1618 }
1619
1620 heap->CollectAllGarbage();
1621 // Ensure code aging cleared the entry from the cache.
1622 info = compilation_cache->LookupScript(
1623 source, Handle<Object>(), 0, 0,
1624 v8::ScriptOriginOptions(false, true, false), native_context,
1625 language_mode);
1626 CHECK(info.is_null());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627}
1628
1629
1630static void OptimizeEmptyFunction(const char* name) {
1631 HandleScope scope(CcTest::i_isolate());
1632 EmbeddedVector<char, 256> source;
1633 SNPrintF(source,
1634 "function %s() { return 0; }"
1635 "%s(); %s();"
1636 "%%OptimizeFunctionOnNextCall(%s);"
1637 "%s();",
1638 name, name, name, name, name);
1639 CompileRun(source.start());
1640}
1641
1642
1643// Count the number of native contexts in the weak list of native contexts.
1644int CountNativeContexts() {
1645 int count = 0;
1646 Object* object = CcTest::heap()->native_contexts_list();
1647 while (!object->IsUndefined()) {
1648 count++;
Ben Murdochc5610432016-08-08 18:44:38 +01001649 object = Context::cast(object)->next_context_link();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001650 }
1651 return count;
1652}
1653
1654
1655// Count the number of user functions in the weak list of optimized
1656// functions attached to a native context.
1657static int CountOptimizedUserFunctions(v8::Local<v8::Context> context) {
1658 int count = 0;
1659 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1660 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1661 while (object->IsJSFunction() &&
1662 !JSFunction::cast(object)->shared()->IsBuiltin()) {
1663 count++;
1664 object = JSFunction::cast(object)->next_function_link();
1665 }
1666 return count;
1667}
1668
1669
1670TEST(TestInternalWeakLists) {
1671 FLAG_always_opt = false;
1672 FLAG_allow_natives_syntax = true;
1673 v8::V8::Initialize();
1674
1675 // Some flags turn Scavenge collections into Mark-sweep collections
1676 // and hence are incompatible with this test case.
1677 if (FLAG_gc_global || FLAG_stress_compaction) return;
1678 FLAG_retain_maps_for_n_gc = 0;
1679
1680 static const int kNumTestContexts = 10;
1681
1682 Isolate* isolate = CcTest::i_isolate();
1683 Heap* heap = isolate->heap();
1684 HandleScope scope(isolate);
1685 v8::Local<v8::Context> ctx[kNumTestContexts];
1686 if (!isolate->use_crankshaft()) return;
1687
1688 CHECK_EQ(0, CountNativeContexts());
1689
1690 // Create a number of global contests which gets linked together.
1691 for (int i = 0; i < kNumTestContexts; i++) {
1692 ctx[i] = v8::Context::New(CcTest::isolate());
1693
1694 // Collect garbage that might have been created by one of the
1695 // installed extensions.
1696 isolate->compilation_cache()->Clear();
1697 heap->CollectAllGarbage();
1698
1699 CHECK_EQ(i + 1, CountNativeContexts());
1700
1701 ctx[i]->Enter();
1702
1703 // Create a handle scope so no function objects get stuck in the outer
1704 // handle scope.
1705 HandleScope scope(isolate);
1706 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1707 OptimizeEmptyFunction("f1");
1708 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1709 OptimizeEmptyFunction("f2");
1710 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1711 OptimizeEmptyFunction("f3");
1712 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1713 OptimizeEmptyFunction("f4");
1714 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1715 OptimizeEmptyFunction("f5");
1716 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1717
1718 // Remove function f1, and
1719 CompileRun("f1=null");
1720
1721 // Scavenge treats these references as strong.
1722 for (int j = 0; j < 10; j++) {
1723 CcTest::heap()->CollectGarbage(NEW_SPACE);
1724 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1725 }
1726
1727 // Mark compact handles the weak references.
1728 isolate->compilation_cache()->Clear();
1729 heap->CollectAllGarbage();
1730 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1731
1732 // Get rid of f3 and f5 in the same way.
1733 CompileRun("f3=null");
1734 for (int j = 0; j < 10; j++) {
1735 CcTest::heap()->CollectGarbage(NEW_SPACE);
1736 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1737 }
1738 CcTest::heap()->CollectAllGarbage();
1739 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1740 CompileRun("f5=null");
1741 for (int j = 0; j < 10; j++) {
1742 CcTest::heap()->CollectGarbage(NEW_SPACE);
1743 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1744 }
1745 CcTest::heap()->CollectAllGarbage();
1746 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1747
1748 ctx[i]->Exit();
1749 }
1750
1751 // Force compilation cache cleanup.
1752 CcTest::heap()->NotifyContextDisposed(true);
1753 CcTest::heap()->CollectAllGarbage();
1754
1755 // Dispose the native contexts one by one.
1756 for (int i = 0; i < kNumTestContexts; i++) {
1757 // TODO(dcarney): is there a better way to do this?
1758 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1759 *unsafe = CcTest::heap()->undefined_value();
1760 ctx[i].Clear();
1761
1762 // Scavenge treats these references as strong.
1763 for (int j = 0; j < 10; j++) {
1764 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1765 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1766 }
1767
1768 // Mark compact handles the weak references.
1769 CcTest::heap()->CollectAllGarbage();
1770 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1771 }
1772
1773 CHECK_EQ(0, CountNativeContexts());
1774}
1775
1776
1777// Count the number of native contexts in the weak list of native contexts
1778// causing a GC after the specified number of elements.
1779static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1780 Heap* heap = isolate->heap();
1781 int count = 0;
1782 Handle<Object> object(heap->native_contexts_list(), isolate);
1783 while (!object->IsUndefined()) {
1784 count++;
1785 if (count == n) heap->CollectAllGarbage();
1786 object =
Ben Murdochc5610432016-08-08 18:44:38 +01001787 Handle<Object>(Context::cast(*object)->next_context_link(), isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001788 }
1789 return count;
1790}
1791
1792
1793// Count the number of user functions in the weak list of optimized
1794// functions attached to a native context causing a GC after the
1795// specified number of elements.
1796static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1797 int n) {
1798 int count = 0;
1799 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1800 Isolate* isolate = icontext->GetIsolate();
1801 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1802 isolate);
1803 while (object->IsJSFunction() &&
1804 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1805 count++;
1806 if (count == n) isolate->heap()->CollectAllGarbage();
1807 object = Handle<Object>(
1808 Object::cast(JSFunction::cast(*object)->next_function_link()),
1809 isolate);
1810 }
1811 return count;
1812}
1813
1814
1815TEST(TestInternalWeakListsTraverseWithGC) {
1816 FLAG_always_opt = false;
1817 FLAG_allow_natives_syntax = true;
1818 v8::V8::Initialize();
1819
1820 static const int kNumTestContexts = 10;
1821
1822 Isolate* isolate = CcTest::i_isolate();
1823 HandleScope scope(isolate);
1824 v8::Local<v8::Context> ctx[kNumTestContexts];
1825 if (!isolate->use_crankshaft()) return;
1826
1827 CHECK_EQ(0, CountNativeContexts());
1828
1829 // Create an number of contexts and check the length of the weak list both
1830 // with and without GCs while iterating the list.
1831 for (int i = 0; i < kNumTestContexts; i++) {
1832 ctx[i] = v8::Context::New(CcTest::isolate());
1833 CHECK_EQ(i + 1, CountNativeContexts());
1834 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1835 }
1836
1837 ctx[0]->Enter();
1838
1839 // Compile a number of functions the length of the weak list of optimized
1840 // functions both with and without GCs while iterating the list.
1841 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1842 OptimizeEmptyFunction("f1");
1843 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1844 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1845 OptimizeEmptyFunction("f2");
1846 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1847 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1848 OptimizeEmptyFunction("f3");
1849 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1850 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1851 OptimizeEmptyFunction("f4");
1852 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1853 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1854 OptimizeEmptyFunction("f5");
1855 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1856 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1857
1858 ctx[0]->Exit();
1859}
1860
1861
1862TEST(TestSizeOfRegExpCode) {
1863 if (!FLAG_regexp_optimization) return;
1864
1865 v8::V8::Initialize();
1866
1867 Isolate* isolate = CcTest::i_isolate();
1868 HandleScope scope(isolate);
1869
1870 LocalContext context;
1871
1872 // Adjust source below and this check to match
1873 // RegExpImple::kRegExpTooLargeToOptimize.
1874 CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1875
1876 // Compile a regexp that is much larger if we are using regexp optimizations.
1877 CompileRun(
1878 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1879 "var half_size_reg_exp;"
1880 "while (reg_exp_source.length < 20 * 1024) {"
1881 " half_size_reg_exp = reg_exp_source;"
1882 " reg_exp_source = reg_exp_source + reg_exp_source;"
1883 "}"
1884 // Flatten string.
1885 "reg_exp_source.match(/f/);");
1886
1887 // Get initial heap size after several full GCs, which will stabilize
1888 // the heap size and return with sweeping finished completely.
1889 CcTest::heap()->CollectAllGarbage();
1890 CcTest::heap()->CollectAllGarbage();
1891 CcTest::heap()->CollectAllGarbage();
1892 CcTest::heap()->CollectAllGarbage();
1893 CcTest::heap()->CollectAllGarbage();
1894 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1895 if (collector->sweeping_in_progress()) {
1896 collector->EnsureSweepingCompleted();
1897 }
1898 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1899
1900 CompileRun("'foo'.match(reg_exp_source);");
1901 CcTest::heap()->CollectAllGarbage();
1902 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1903
1904 CompileRun("'foo'.match(half_size_reg_exp);");
1905 CcTest::heap()->CollectAllGarbage();
1906 int size_with_optimized_regexp =
1907 static_cast<int>(CcTest::heap()->SizeOfObjects());
1908
1909 int size_of_regexp_code = size_with_regexp - initial_size;
1910
1911 // On some platforms the debug-code flag causes huge amounts of regexp code
1912 // to be emitted, breaking this test.
1913 if (!FLAG_debug_code) {
1914 CHECK_LE(size_of_regexp_code, 1 * MB);
1915 }
1916
1917 // Small regexp is half the size, but compiles to more than twice the code
1918 // due to the optimization steps.
1919 CHECK_GE(size_with_optimized_regexp,
1920 size_with_regexp + size_of_regexp_code * 2);
1921}
1922
1923
1924HEAP_TEST(TestSizeOfObjects) {
1925 v8::V8::Initialize();
1926
1927 // Get initial heap size after several full GCs, which will stabilize
1928 // the heap size and return with sweeping finished completely.
1929 CcTest::heap()->CollectAllGarbage();
1930 CcTest::heap()->CollectAllGarbage();
1931 CcTest::heap()->CollectAllGarbage();
1932 CcTest::heap()->CollectAllGarbage();
1933 CcTest::heap()->CollectAllGarbage();
1934 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1935 if (collector->sweeping_in_progress()) {
1936 collector->EnsureSweepingCompleted();
1937 }
1938 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1939
1940 {
1941 // Allocate objects on several different old-space pages so that
1942 // concurrent sweeper threads will be busy sweeping the old space on
1943 // subsequent GC runs.
1944 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1945 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1946 for (int i = 1; i <= 100; i++) {
1947 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1948 CHECK_EQ(initial_size + i * filler_size,
1949 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1950 }
1951 }
1952
1953 // The heap size should go back to initial size after a full GC, even
1954 // though sweeping didn't finish yet.
1955 CcTest::heap()->CollectAllGarbage();
1956
1957 // Normally sweeping would not be complete here, but no guarantees.
1958
1959 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1960
1961 // Waiting for sweeper threads should not change heap size.
1962 if (collector->sweeping_in_progress()) {
1963 collector->EnsureSweepingCompleted();
1964 }
1965 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1966}
1967
1968
1969TEST(TestAlignmentCalculations) {
1970 // Maximum fill amounts are consistent.
1971 int maximum_double_misalignment = kDoubleSize - kPointerSize;
1972 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
1973 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1974 CHECK_EQ(0, max_word_fill);
1975 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1976 CHECK_EQ(maximum_double_misalignment, max_double_fill);
1977 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1978 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1979 int max_simd128_unaligned_fill =
1980 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
1981 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
1982
1983 Address base = static_cast<Address>(NULL);
1984 int fill = 0;
1985
1986 // Word alignment never requires fill.
1987 fill = Heap::GetFillToAlign(base, kWordAligned);
1988 CHECK_EQ(0, fill);
1989 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
1990 CHECK_EQ(0, fill);
1991
1992 // No fill is required when address is double aligned.
1993 fill = Heap::GetFillToAlign(base, kDoubleAligned);
1994 CHECK_EQ(0, fill);
1995 // Fill is required if address is not double aligned.
1996 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
1997 CHECK_EQ(maximum_double_misalignment, fill);
1998 // kDoubleUnaligned has the opposite fill amounts.
1999 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2000 CHECK_EQ(maximum_double_misalignment, fill);
2001 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2002 CHECK_EQ(0, fill);
2003
2004 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2005 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2006 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2007 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2008 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2009 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2010 CHECK_EQ(kPointerSize, fill);
2011 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2012 CHECK_EQ(0, fill);
2013}
2014
2015
2016static HeapObject* NewSpaceAllocateAligned(int size,
2017 AllocationAlignment alignment) {
2018 Heap* heap = CcTest::heap();
2019 AllocationResult allocation =
2020 heap->new_space()->AllocateRawAligned(size, alignment);
2021 HeapObject* obj = NULL;
2022 allocation.To(&obj);
Ben Murdochda12d292016-06-02 14:46:10 +01002023 heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002024 return obj;
2025}
2026
2027
2028// Get new space allocation into the desired alignment.
2029static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2030 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2031 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2032 if (fill) {
2033 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2034 }
2035 return *top_addr;
2036}
2037
2038
2039TEST(TestAlignedAllocation) {
2040 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2041 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2042 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2043 Address start;
2044 HeapObject* obj;
2045 HeapObject* filler;
2046 if (double_misalignment) {
2047 // Allocate a pointer sized object that must be double aligned at an
2048 // aligned address.
2049 start = AlignNewSpace(kDoubleAligned, 0);
2050 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2051 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2052 // There is no filler.
2053 CHECK_EQ(kPointerSize, *top_addr - start);
2054
2055 // Allocate a second pointer sized object that must be double aligned at an
2056 // unaligned address.
2057 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2058 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2059 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2060 // There is a filler object before the object.
2061 filler = HeapObject::FromAddress(start);
2062 CHECK(obj != filler && filler->IsFiller() &&
2063 filler->Size() == kPointerSize);
2064 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2065
2066 // Similarly for kDoubleUnaligned.
2067 start = AlignNewSpace(kDoubleUnaligned, 0);
2068 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2069 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2070 CHECK_EQ(kPointerSize, *top_addr - start);
2071 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2072 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2073 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2074 // There is a filler object before the object.
2075 filler = HeapObject::FromAddress(start);
2076 CHECK(obj != filler && filler->IsFiller() &&
2077 filler->Size() == kPointerSize);
2078 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2079 }
2080
2081 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2082 // on platform.
2083 start = AlignNewSpace(kSimd128Unaligned, 0);
2084 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2085 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2086 // There is no filler.
2087 CHECK_EQ(kPointerSize, *top_addr - start);
2088 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2089 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2090 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2091 // There is a filler object before the object.
2092 filler = HeapObject::FromAddress(start);
2093 CHECK(obj != filler && filler->IsFiller() &&
2094 filler->Size() == kSimd128Size - kPointerSize);
2095 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2096
2097 if (double_misalignment) {
2098 // Test the 2 other alignments possible on 32 bit platforms.
2099 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2100 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2101 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2102 // There is a filler object before the object.
2103 filler = HeapObject::FromAddress(start);
2104 CHECK(obj != filler && filler->IsFiller() &&
2105 filler->Size() == 2 * kPointerSize);
2106 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2107 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2108 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2109 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2110 // There is a filler object before the object.
2111 filler = HeapObject::FromAddress(start);
2112 CHECK(obj != filler && filler->IsFiller() &&
2113 filler->Size() == kPointerSize);
2114 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2115 }
2116}
2117
2118
2119static HeapObject* OldSpaceAllocateAligned(int size,
2120 AllocationAlignment alignment) {
2121 Heap* heap = CcTest::heap();
2122 AllocationResult allocation =
2123 heap->old_space()->AllocateRawAligned(size, alignment);
2124 HeapObject* obj = NULL;
2125 allocation.To(&obj);
Ben Murdochda12d292016-06-02 14:46:10 +01002126 heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002127 return obj;
2128}
2129
2130
2131// Get old space allocation into the desired alignment.
2132static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2133 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2134 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2135 int allocation = fill + offset;
2136 if (allocation) {
2137 OldSpaceAllocateAligned(allocation, kWordAligned);
2138 }
2139 Address top = *top_addr;
2140 // Now force the remaining allocation onto the free list.
2141 CcTest::heap()->old_space()->EmptyAllocationInfo();
2142 return top;
2143}
2144
2145
2146// Test the case where allocation must be done from the free list, so filler
2147// may precede or follow the object.
2148TEST(TestAlignedOverAllocation) {
2149 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2150 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2151 Address start;
2152 HeapObject* obj;
2153 HeapObject* filler1;
2154 HeapObject* filler2;
2155 if (double_misalignment) {
2156 start = AlignOldSpace(kDoubleAligned, 0);
2157 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2158 // The object is aligned, and a filler object is created after.
2159 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2160 filler1 = HeapObject::FromAddress(start + kPointerSize);
2161 CHECK(obj != filler1 && filler1->IsFiller() &&
2162 filler1->Size() == kPointerSize);
2163 // Try the opposite alignment case.
2164 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2165 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2166 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2167 filler1 = HeapObject::FromAddress(start);
2168 CHECK(obj != filler1);
2169 CHECK(filler1->IsFiller());
2170 CHECK(filler1->Size() == kPointerSize);
2171 CHECK(obj != filler1 && filler1->IsFiller() &&
2172 filler1->Size() == kPointerSize);
2173
2174 // Similarly for kDoubleUnaligned.
2175 start = AlignOldSpace(kDoubleUnaligned, 0);
2176 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2177 // The object is aligned, and a filler object is created after.
2178 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2179 filler1 = HeapObject::FromAddress(start + kPointerSize);
2180 CHECK(obj != filler1 && filler1->IsFiller() &&
2181 filler1->Size() == kPointerSize);
2182 // Try the opposite alignment case.
2183 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2184 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2185 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2186 filler1 = HeapObject::FromAddress(start);
2187 CHECK(obj != filler1 && filler1->IsFiller() &&
2188 filler1->Size() == kPointerSize);
2189 }
2190
2191 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2192 // on platform.
2193 start = AlignOldSpace(kSimd128Unaligned, 0);
2194 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2195 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2196 // There is a filler object after the object.
2197 filler1 = HeapObject::FromAddress(start + kPointerSize);
2198 CHECK(obj != filler1 && filler1->IsFiller() &&
2199 filler1->Size() == kSimd128Size - kPointerSize);
2200 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2201 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2202 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2203 // There is a filler object before the object.
2204 filler1 = HeapObject::FromAddress(start);
2205 CHECK(obj != filler1 && filler1->IsFiller() &&
2206 filler1->Size() == kSimd128Size - kPointerSize);
2207
2208 if (double_misalignment) {
2209 // Test the 2 other alignments possible on 32 bit platforms.
2210 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2211 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2212 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2213 // There are filler objects before and after the object.
2214 filler1 = HeapObject::FromAddress(start);
2215 CHECK(obj != filler1 && filler1->IsFiller() &&
2216 filler1->Size() == 2 * kPointerSize);
2217 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2218 CHECK(obj != filler2 && filler2->IsFiller() &&
2219 filler2->Size() == kPointerSize);
2220 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2221 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2222 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2223 // There are filler objects before and after the object.
2224 filler1 = HeapObject::FromAddress(start);
2225 CHECK(obj != filler1 && filler1->IsFiller() &&
2226 filler1->Size() == kPointerSize);
2227 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2228 CHECK(obj != filler2 && filler2->IsFiller() &&
2229 filler2->Size() == 2 * kPointerSize);
2230 }
2231}
2232
2233
2234TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2235 CcTest::InitializeVM();
2236 HeapIterator iterator(CcTest::heap());
2237 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2238 intptr_t size_of_objects_2 = 0;
2239 for (HeapObject* obj = iterator.next();
2240 obj != NULL;
2241 obj = iterator.next()) {
2242 if (!obj->IsFreeSpace()) {
2243 size_of_objects_2 += obj->Size();
2244 }
2245 }
2246 // Delta must be within 5% of the larger result.
2247 // TODO(gc): Tighten this up by distinguishing between byte
2248 // arrays that are real and those that merely mark free space
2249 // on the heap.
2250 if (size_of_objects_1 > size_of_objects_2) {
2251 intptr_t delta = size_of_objects_1 - size_of_objects_2;
Ben Murdochc5610432016-08-08 18:44:38 +01002252 PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
2253 ", "
2254 "Iterator: %" V8PRIdPTR
2255 ", "
2256 "delta: %" V8PRIdPTR "\n",
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 size_of_objects_1, size_of_objects_2, delta);
2258 CHECK_GT(size_of_objects_1 / 20, delta);
2259 } else {
2260 intptr_t delta = size_of_objects_2 - size_of_objects_1;
Ben Murdochc5610432016-08-08 18:44:38 +01002261 PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
2262 ", "
2263 "Iterator: %" V8PRIdPTR
2264 ", "
2265 "delta: %" V8PRIdPTR "\n",
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002266 size_of_objects_1, size_of_objects_2, delta);
2267 CHECK_GT(size_of_objects_2 / 20, delta);
2268 }
2269}
2270
2271
2272static void FillUpNewSpace(NewSpace* new_space) {
2273 // Fill up new space to the point that it is completely full. Make sure
2274 // that the scavenger does not undo the filling.
2275 Heap* heap = new_space->heap();
2276 Isolate* isolate = heap->isolate();
2277 Factory* factory = isolate->factory();
2278 HandleScope scope(isolate);
2279 AlwaysAllocateScope always_allocate(isolate);
2280 intptr_t available = new_space->Capacity() - new_space->Size();
2281 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2282 for (intptr_t i = 0; i < number_of_fillers; i++) {
2283 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2284 }
2285}
2286
2287
2288TEST(GrowAndShrinkNewSpace) {
2289 CcTest::InitializeVM();
2290 Heap* heap = CcTest::heap();
2291 NewSpace* new_space = heap->new_space();
2292
Ben Murdochda12d292016-06-02 14:46:10 +01002293 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 return;
2295 }
2296
2297 // Explicitly growing should double the space capacity.
2298 intptr_t old_capacity, new_capacity;
2299 old_capacity = new_space->TotalCapacity();
2300 new_space->Grow();
2301 new_capacity = new_space->TotalCapacity();
2302 CHECK(2 * old_capacity == new_capacity);
2303
2304 old_capacity = new_space->TotalCapacity();
2305 FillUpNewSpace(new_space);
2306 new_capacity = new_space->TotalCapacity();
2307 CHECK(old_capacity == new_capacity);
2308
2309 // Explicitly shrinking should not affect space capacity.
2310 old_capacity = new_space->TotalCapacity();
2311 new_space->Shrink();
2312 new_capacity = new_space->TotalCapacity();
2313 CHECK(old_capacity == new_capacity);
2314
2315 // Let the scavenger empty the new space.
2316 heap->CollectGarbage(NEW_SPACE);
2317 CHECK_LE(new_space->Size(), old_capacity);
2318
2319 // Explicitly shrinking should halve the space capacity.
2320 old_capacity = new_space->TotalCapacity();
2321 new_space->Shrink();
2322 new_capacity = new_space->TotalCapacity();
2323 CHECK(old_capacity == 2 * new_capacity);
2324
2325 // Consecutive shrinking should not affect space capacity.
2326 old_capacity = new_space->TotalCapacity();
2327 new_space->Shrink();
2328 new_space->Shrink();
2329 new_space->Shrink();
2330 new_capacity = new_space->TotalCapacity();
2331 CHECK(old_capacity == new_capacity);
2332}
2333
2334
2335TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2336 CcTest::InitializeVM();
2337 Heap* heap = CcTest::heap();
Ben Murdochda12d292016-06-02 14:46:10 +01002338 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002339 return;
2340 }
2341
2342 v8::HandleScope scope(CcTest::isolate());
2343 NewSpace* new_space = heap->new_space();
2344 intptr_t old_capacity, new_capacity;
2345 old_capacity = new_space->TotalCapacity();
2346 new_space->Grow();
2347 new_capacity = new_space->TotalCapacity();
2348 CHECK(2 * old_capacity == new_capacity);
2349 FillUpNewSpace(new_space);
2350 heap->CollectAllAvailableGarbage();
2351 new_capacity = new_space->TotalCapacity();
2352 CHECK(old_capacity == new_capacity);
2353}
2354
2355
2356static int NumberOfGlobalObjects() {
2357 int count = 0;
2358 HeapIterator iterator(CcTest::heap());
2359 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2360 if (obj->IsJSGlobalObject()) count++;
2361 }
2362 return count;
2363}
2364
2365
2366// Test that we don't embed maps from foreign contexts into
2367// optimized code.
2368TEST(LeakNativeContextViaMap) {
2369 i::FLAG_allow_natives_syntax = true;
2370 v8::Isolate* isolate = CcTest::isolate();
2371 v8::HandleScope outer_scope(isolate);
2372 v8::Persistent<v8::Context> ctx1p;
2373 v8::Persistent<v8::Context> ctx2p;
2374 {
2375 v8::HandleScope scope(isolate);
2376 ctx1p.Reset(isolate, v8::Context::New(isolate));
2377 ctx2p.Reset(isolate, v8::Context::New(isolate));
2378 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2379 }
2380
2381 CcTest::heap()->CollectAllAvailableGarbage();
2382 CHECK_EQ(2, NumberOfGlobalObjects());
2383
2384 {
2385 v8::HandleScope inner_scope(isolate);
2386 CompileRun("var v = {x: 42}");
2387 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2388 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2389 v8::Local<v8::Value> v =
2390 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2391 ctx2->Enter();
2392 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2393 v8::Local<v8::Value> res = CompileRun(
2394 "function f() { return o.x; }"
2395 "for (var i = 0; i < 10; ++i) f();"
2396 "%OptimizeFunctionOnNextCall(f);"
2397 "f();");
2398 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2399 CHECK(ctx2->Global()
2400 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2401 .FromJust());
2402 ctx2->Exit();
2403 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2404 ctx1p.Reset();
2405 isolate->ContextDisposedNotification();
2406 }
2407 CcTest::heap()->CollectAllAvailableGarbage();
2408 CHECK_EQ(1, NumberOfGlobalObjects());
2409 ctx2p.Reset();
2410 CcTest::heap()->CollectAllAvailableGarbage();
2411 CHECK_EQ(0, NumberOfGlobalObjects());
2412}
2413
2414
2415// Test that we don't embed functions from foreign contexts into
2416// optimized code.
2417TEST(LeakNativeContextViaFunction) {
2418 i::FLAG_allow_natives_syntax = true;
2419 v8::Isolate* isolate = CcTest::isolate();
2420 v8::HandleScope outer_scope(isolate);
2421 v8::Persistent<v8::Context> ctx1p;
2422 v8::Persistent<v8::Context> ctx2p;
2423 {
2424 v8::HandleScope scope(isolate);
2425 ctx1p.Reset(isolate, v8::Context::New(isolate));
2426 ctx2p.Reset(isolate, v8::Context::New(isolate));
2427 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2428 }
2429
2430 CcTest::heap()->CollectAllAvailableGarbage();
2431 CHECK_EQ(2, NumberOfGlobalObjects());
2432
2433 {
2434 v8::HandleScope inner_scope(isolate);
2435 CompileRun("var v = function() { return 42; }");
2436 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2437 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2438 v8::Local<v8::Value> v =
2439 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2440 ctx2->Enter();
2441 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2442 v8::Local<v8::Value> res = CompileRun(
2443 "function f(x) { return x(); }"
2444 "for (var i = 0; i < 10; ++i) f(o);"
2445 "%OptimizeFunctionOnNextCall(f);"
2446 "f(o);");
2447 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2448 CHECK(ctx2->Global()
2449 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2450 .FromJust());
2451 ctx2->Exit();
2452 ctx1->Exit();
2453 ctx1p.Reset();
2454 isolate->ContextDisposedNotification();
2455 }
2456 CcTest::heap()->CollectAllAvailableGarbage();
2457 CHECK_EQ(1, NumberOfGlobalObjects());
2458 ctx2p.Reset();
2459 CcTest::heap()->CollectAllAvailableGarbage();
2460 CHECK_EQ(0, NumberOfGlobalObjects());
2461}
2462
2463
2464TEST(LeakNativeContextViaMapKeyed) {
2465 i::FLAG_allow_natives_syntax = true;
2466 v8::Isolate* isolate = CcTest::isolate();
2467 v8::HandleScope outer_scope(isolate);
2468 v8::Persistent<v8::Context> ctx1p;
2469 v8::Persistent<v8::Context> ctx2p;
2470 {
2471 v8::HandleScope scope(isolate);
2472 ctx1p.Reset(isolate, v8::Context::New(isolate));
2473 ctx2p.Reset(isolate, v8::Context::New(isolate));
2474 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2475 }
2476
2477 CcTest::heap()->CollectAllAvailableGarbage();
2478 CHECK_EQ(2, NumberOfGlobalObjects());
2479
2480 {
2481 v8::HandleScope inner_scope(isolate);
2482 CompileRun("var v = [42, 43]");
2483 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2484 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2485 v8::Local<v8::Value> v =
2486 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2487 ctx2->Enter();
2488 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2489 v8::Local<v8::Value> res = CompileRun(
2490 "function f() { return o[0]; }"
2491 "for (var i = 0; i < 10; ++i) f();"
2492 "%OptimizeFunctionOnNextCall(f);"
2493 "f();");
2494 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2495 CHECK(ctx2->Global()
2496 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2497 .FromJust());
2498 ctx2->Exit();
2499 ctx1->Exit();
2500 ctx1p.Reset();
2501 isolate->ContextDisposedNotification();
2502 }
2503 CcTest::heap()->CollectAllAvailableGarbage();
2504 CHECK_EQ(1, NumberOfGlobalObjects());
2505 ctx2p.Reset();
2506 CcTest::heap()->CollectAllAvailableGarbage();
2507 CHECK_EQ(0, NumberOfGlobalObjects());
2508}
2509
2510
2511TEST(LeakNativeContextViaMapProto) {
2512 i::FLAG_allow_natives_syntax = true;
2513 v8::Isolate* isolate = CcTest::isolate();
2514 v8::HandleScope outer_scope(isolate);
2515 v8::Persistent<v8::Context> ctx1p;
2516 v8::Persistent<v8::Context> ctx2p;
2517 {
2518 v8::HandleScope scope(isolate);
2519 ctx1p.Reset(isolate, v8::Context::New(isolate));
2520 ctx2p.Reset(isolate, v8::Context::New(isolate));
2521 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2522 }
2523
2524 CcTest::heap()->CollectAllAvailableGarbage();
2525 CHECK_EQ(2, NumberOfGlobalObjects());
2526
2527 {
2528 v8::HandleScope inner_scope(isolate);
2529 CompileRun("var v = { y: 42}");
2530 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2531 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2532 v8::Local<v8::Value> v =
2533 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2534 ctx2->Enter();
2535 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2536 v8::Local<v8::Value> res = CompileRun(
2537 "function f() {"
2538 " var p = {x: 42};"
2539 " p.__proto__ = o;"
2540 " return p.x;"
2541 "}"
2542 "for (var i = 0; i < 10; ++i) f();"
2543 "%OptimizeFunctionOnNextCall(f);"
2544 "f();");
2545 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2546 CHECK(ctx2->Global()
2547 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2548 .FromJust());
2549 ctx2->Exit();
2550 ctx1->Exit();
2551 ctx1p.Reset();
2552 isolate->ContextDisposedNotification();
2553 }
2554 CcTest::heap()->CollectAllAvailableGarbage();
2555 CHECK_EQ(1, NumberOfGlobalObjects());
2556 ctx2p.Reset();
2557 CcTest::heap()->CollectAllAvailableGarbage();
2558 CHECK_EQ(0, NumberOfGlobalObjects());
2559}
2560
2561
2562TEST(InstanceOfStubWriteBarrier) {
2563 i::FLAG_allow_natives_syntax = true;
2564#ifdef VERIFY_HEAP
2565 i::FLAG_verify_heap = true;
2566#endif
2567
2568 CcTest::InitializeVM();
2569 if (!CcTest::i_isolate()->use_crankshaft()) return;
2570 if (i::FLAG_force_marking_deque_overflows) return;
2571 v8::HandleScope outer_scope(CcTest::isolate());
2572 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2573
2574 {
2575 v8::HandleScope scope(CcTest::isolate());
2576 CompileRun(
2577 "function foo () { }"
2578 "function mkbar () { return new (new Function(\"\")) (); }"
2579 "function f (x) { return (x instanceof foo); }"
2580 "function g () { f(mkbar()); }"
2581 "f(new foo()); f(new foo());"
2582 "%OptimizeFunctionOnNextCall(f);"
2583 "f(new foo()); g();");
2584 }
2585
2586 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2587 marking->Stop();
2588 CcTest::heap()->StartIncrementalMarking();
2589
2590 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2591 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2592 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2593
2594 CHECK(f->IsOptimized());
2595
2596 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2597 !marking->IsStopped()) {
2598 // Discard any pending GC requests otherwise we will get GC when we enter
2599 // code below.
2600 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2601 }
2602
2603 CHECK(marking->IsMarking());
2604
2605 {
2606 v8::HandleScope scope(CcTest::isolate());
2607 v8::Local<v8::Object> global = CcTest::global();
2608 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2609 global->Get(ctx, v8_str("g")).ToLocalChecked());
2610 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2611 }
2612
2613 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2614 CcTest::heap()->CollectGarbage(OLD_SPACE);
2615}
2616
Ben Murdochc5610432016-08-08 18:44:38 +01002617namespace {
2618
2619int GetProfilerTicks(SharedFunctionInfo* shared) {
2620 return FLAG_ignition ? shared->profiler_ticks()
2621 : shared->code()->profiler_ticks();
2622}
2623
2624} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002625
2626TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2627 i::FLAG_stress_compaction = false;
2628 i::FLAG_allow_natives_syntax = true;
2629#ifdef VERIFY_HEAP
2630 i::FLAG_verify_heap = true;
2631#endif
2632
2633 CcTest::InitializeVM();
2634 if (!CcTest::i_isolate()->use_crankshaft()) return;
2635 v8::HandleScope outer_scope(CcTest::isolate());
2636 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2637
2638 {
2639 v8::HandleScope scope(CcTest::isolate());
2640 CompileRun(
2641 "function f () {"
2642 " var s = 0;"
2643 " for (var i = 0; i < 100; i++) s += i;"
2644 " return s;"
2645 "}"
2646 "f(); f();"
2647 "%OptimizeFunctionOnNextCall(f);"
2648 "f();");
2649 }
2650 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2651 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2652 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2653 CHECK(f->IsOptimized());
2654
Ben Murdochc5610432016-08-08 18:44:38 +01002655 // Make sure incremental marking it not running.
2656 CcTest::heap()->incremental_marking()->Stop();
2657
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002658 CcTest::heap()->StartIncrementalMarking();
2659 // The following calls will increment CcTest::heap()->global_ic_age().
2660 CcTest::isolate()->ContextDisposedNotification();
2661 SimulateIncrementalMarking(CcTest::heap());
2662 CcTest::heap()->CollectAllGarbage();
Ben Murdochc5610432016-08-08 18:44:38 +01002663
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002664 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2665 CHECK_EQ(0, f->shared()->opt_count());
Ben Murdochc5610432016-08-08 18:44:38 +01002666 CHECK_EQ(0, GetProfilerTicks(f->shared()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002667}
2668
2669
2670TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2671 i::FLAG_stress_compaction = false;
2672 i::FLAG_allow_natives_syntax = true;
2673#ifdef VERIFY_HEAP
2674 i::FLAG_verify_heap = true;
2675#endif
2676
2677 CcTest::InitializeVM();
2678 if (!CcTest::i_isolate()->use_crankshaft()) return;
2679 v8::HandleScope outer_scope(CcTest::isolate());
2680 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2681
2682 {
2683 v8::HandleScope scope(CcTest::isolate());
2684 CompileRun(
2685 "function f () {"
2686 " var s = 0;"
2687 " for (var i = 0; i < 100; i++) s += i;"
2688 " return s;"
2689 "}"
2690 "f(); f();"
2691 "%OptimizeFunctionOnNextCall(f);"
2692 "f();");
2693 }
2694 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2695 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2696 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002697 CHECK(f->IsOptimized());
2698
Ben Murdochc5610432016-08-08 18:44:38 +01002699 // Make sure incremental marking it not running.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002700 CcTest::heap()->incremental_marking()->Stop();
2701
2702 // The following two calls will increment CcTest::heap()->global_ic_age().
2703 CcTest::isolate()->ContextDisposedNotification();
2704 CcTest::heap()->CollectAllGarbage();
2705
2706 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2707 CHECK_EQ(0, f->shared()->opt_count());
Ben Murdochc5610432016-08-08 18:44:38 +01002708 CHECK_EQ(0, GetProfilerTicks(f->shared()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002709}
2710
2711
2712HEAP_TEST(GCFlags) {
2713 CcTest::InitializeVM();
2714 Heap* heap = CcTest::heap();
2715
2716 heap->set_current_gc_flags(Heap::kNoGCFlags);
2717 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2718
2719 // Set the flags to check whether we appropriately resets them after the GC.
2720 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2721 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2722 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2723
2724 MarkCompactCollector* collector = heap->mark_compact_collector();
2725 if (collector->sweeping_in_progress()) {
2726 collector->EnsureSweepingCompleted();
2727 }
2728
2729 IncrementalMarking* marking = heap->incremental_marking();
2730 marking->Stop();
2731 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2732 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2733
2734 heap->CollectGarbage(NEW_SPACE);
2735 // NewSpace scavenges should not overwrite the flags.
2736 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2737
2738 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2739 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2740}
2741
2742
2743TEST(IdleNotificationFinishMarking) {
2744 i::FLAG_allow_natives_syntax = true;
2745 CcTest::InitializeVM();
2746 SimulateFullSpace(CcTest::heap()->old_space());
2747 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2748 marking->Stop();
2749 CcTest::heap()->StartIncrementalMarking();
2750
2751 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2752
2753 // TODO(hpayer): We cannot write proper unit test right now for heap.
2754 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2755 // marking delay counter.
2756
2757 // Perform a huge incremental marking step but don't complete marking.
2758 intptr_t bytes_processed = 0;
2759 do {
2760 bytes_processed =
2761 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2762 IncrementalMarking::FORCE_MARKING,
2763 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2764 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2765 } while (bytes_processed);
2766
2767 // The next invocations of incremental marking are not going to complete
2768 // marking
2769 // since the completion threshold is not reached
2770 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2771 i++) {
2772 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2773 IncrementalMarking::FORCE_MARKING,
2774 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2775 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2776 }
2777
2778 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2779
2780 // The next idle notification has to finish incremental marking.
2781 const double kLongIdleTime = 1000.0;
2782 CcTest::isolate()->IdleNotificationDeadline(
2783 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2784 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2785 kLongIdleTime);
2786 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2787}
2788
2789
2790// Test that HAllocateObject will always return an object in new-space.
2791TEST(OptimizedAllocationAlwaysInNewSpace) {
2792 i::FLAG_allow_natives_syntax = true;
2793 CcTest::InitializeVM();
2794 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2795 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2796 v8::HandleScope scope(CcTest::isolate());
2797 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2798 SimulateFullSpace(CcTest::heap()->new_space());
2799 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2800 v8::Local<v8::Value> res = CompileRun(
2801 "function c(x) {"
2802 " this.x = x;"
2803 " for (var i = 0; i < 32; i++) {"
2804 " this['x' + i] = x;"
2805 " }"
2806 "}"
2807 "function f(x) { return new c(x); };"
2808 "f(1); f(2); f(3);"
2809 "%OptimizeFunctionOnNextCall(f);"
2810 "f(4);");
2811
2812 CHECK_EQ(4, res.As<v8::Object>()
2813 ->GetRealNamedProperty(ctx, v8_str("x"))
2814 .ToLocalChecked()
2815 ->Int32Value(ctx)
2816 .FromJust());
2817
2818 i::Handle<JSReceiver> o =
2819 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2820
2821 CHECK(CcTest::heap()->InNewSpace(*o));
2822}
2823
2824
2825TEST(OptimizedPretenuringAllocationFolding) {
2826 i::FLAG_allow_natives_syntax = true;
2827 i::FLAG_expose_gc = true;
2828 CcTest::InitializeVM();
2829 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2830 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2831 v8::HandleScope scope(CcTest::isolate());
2832 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2833 // Grow new space unitl maximum capacity reached.
2834 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2835 CcTest::heap()->new_space()->Grow();
2836 }
2837
2838 i::ScopedVector<char> source(1024);
2839 i::SNPrintF(
2840 source,
2841 "var number_elements = %d;"
2842 "var elements = new Array();"
2843 "function f() {"
2844 " for (var i = 0; i < number_elements; i++) {"
2845 " elements[i] = [[{}], [1.1]];"
2846 " }"
2847 " return elements[number_elements-1]"
2848 "};"
2849 "f(); gc();"
2850 "f(); f();"
2851 "%%OptimizeFunctionOnNextCall(f);"
2852 "f();",
2853 AllocationSite::kPretenureMinimumCreated);
2854
2855 v8::Local<v8::Value> res = CompileRun(source.start());
2856
2857 v8::Local<v8::Value> int_array =
2858 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2859 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2860 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2861 v8::Local<v8::Value> double_array =
2862 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2863 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2864 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2865
2866 i::Handle<JSReceiver> o =
2867 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2868 CHECK(CcTest::heap()->InOldSpace(*o));
2869 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2870 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2871 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2872 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2873}
2874
2875
2876TEST(OptimizedPretenuringObjectArrayLiterals) {
2877 i::FLAG_allow_natives_syntax = true;
2878 i::FLAG_expose_gc = true;
2879 CcTest::InitializeVM();
2880 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2881 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2882 v8::HandleScope scope(CcTest::isolate());
2883
2884 // Grow new space unitl maximum capacity reached.
2885 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2886 CcTest::heap()->new_space()->Grow();
2887 }
2888
2889 i::ScopedVector<char> source(1024);
2890 i::SNPrintF(
2891 source,
2892 "var number_elements = %d;"
2893 "var elements = new Array(number_elements);"
2894 "function f() {"
2895 " for (var i = 0; i < number_elements; i++) {"
2896 " elements[i] = [{}, {}, {}];"
2897 " }"
2898 " return elements[number_elements - 1];"
2899 "};"
2900 "f(); gc();"
2901 "f(); f();"
2902 "%%OptimizeFunctionOnNextCall(f);"
2903 "f();",
2904 AllocationSite::kPretenureMinimumCreated);
2905
2906 v8::Local<v8::Value> res = CompileRun(source.start());
2907
2908 i::Handle<JSObject> o = Handle<JSObject>::cast(
2909 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2910
2911 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2912 CHECK(CcTest::heap()->InOldSpace(*o));
2913}
2914
2915
2916TEST(OptimizedPretenuringMixedInObjectProperties) {
2917 i::FLAG_allow_natives_syntax = true;
2918 i::FLAG_expose_gc = true;
2919 CcTest::InitializeVM();
2920 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2921 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2922 v8::HandleScope scope(CcTest::isolate());
2923
2924 // Grow new space unitl maximum capacity reached.
2925 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2926 CcTest::heap()->new_space()->Grow();
2927 }
2928
2929
2930 i::ScopedVector<char> source(1024);
2931 i::SNPrintF(
2932 source,
2933 "var number_elements = %d;"
2934 "var elements = new Array(number_elements);"
2935 "function f() {"
2936 " for (var i = 0; i < number_elements; i++) {"
2937 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2938 " }"
2939 " return elements[number_elements - 1];"
2940 "};"
2941 "f(); gc();"
2942 "f(); f();"
2943 "%%OptimizeFunctionOnNextCall(f);"
2944 "f();",
2945 AllocationSite::kPretenureMinimumCreated);
2946
2947 v8::Local<v8::Value> res = CompileRun(source.start());
2948
2949 i::Handle<JSObject> o = Handle<JSObject>::cast(
2950 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2951
2952 CHECK(CcTest::heap()->InOldSpace(*o));
2953 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2954 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2955 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2956 if (!o->IsUnboxedDoubleField(idx2)) {
2957 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2958 } else {
2959 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2960 }
2961
2962 JSObject* inner_object =
2963 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2964 CHECK(CcTest::heap()->InOldSpace(inner_object));
2965 if (!inner_object->IsUnboxedDoubleField(idx1)) {
2966 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2967 } else {
2968 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2969 }
2970 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2971}
2972
2973
2974TEST(OptimizedPretenuringDoubleArrayProperties) {
2975 i::FLAG_allow_natives_syntax = true;
2976 i::FLAG_expose_gc = true;
2977 CcTest::InitializeVM();
2978 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2979 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2980 v8::HandleScope scope(CcTest::isolate());
2981
2982 // Grow new space unitl maximum capacity reached.
2983 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2984 CcTest::heap()->new_space()->Grow();
2985 }
2986
2987 i::ScopedVector<char> source(1024);
2988 i::SNPrintF(
2989 source,
2990 "var number_elements = %d;"
2991 "var elements = new Array(number_elements);"
2992 "function f() {"
2993 " for (var i = 0; i < number_elements; i++) {"
2994 " elements[i] = {a: 1.1, b: 2.2};"
2995 " }"
2996 " return elements[i - 1];"
2997 "};"
2998 "f(); gc();"
2999 "f(); f();"
3000 "%%OptimizeFunctionOnNextCall(f);"
3001 "f();",
3002 AllocationSite::kPretenureMinimumCreated);
3003
3004 v8::Local<v8::Value> res = CompileRun(source.start());
3005
3006 i::Handle<JSObject> o = Handle<JSObject>::cast(
3007 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3008
3009 CHECK(CcTest::heap()->InOldSpace(*o));
3010 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3011}
3012
3013
3014TEST(OptimizedPretenuringdoubleArrayLiterals) {
3015 i::FLAG_allow_natives_syntax = true;
3016 i::FLAG_expose_gc = true;
3017 CcTest::InitializeVM();
3018 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3019 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3020 v8::HandleScope scope(CcTest::isolate());
3021
3022 // Grow new space unitl maximum capacity reached.
3023 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3024 CcTest::heap()->new_space()->Grow();
3025 }
3026
3027 i::ScopedVector<char> source(1024);
3028 i::SNPrintF(
3029 source,
3030 "var number_elements = %d;"
3031 "var elements = new Array(number_elements);"
3032 "function f() {"
3033 " for (var i = 0; i < number_elements; i++) {"
3034 " elements[i] = [1.1, 2.2, 3.3];"
3035 " }"
3036 " return elements[number_elements - 1];"
3037 "};"
3038 "f(); gc();"
3039 "f(); f();"
3040 "%%OptimizeFunctionOnNextCall(f);"
3041 "f();",
3042 AllocationSite::kPretenureMinimumCreated);
3043
3044 v8::Local<v8::Value> res = CompileRun(source.start());
3045
3046 i::Handle<JSObject> o = Handle<JSObject>::cast(
3047 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3048
3049 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3050 CHECK(CcTest::heap()->InOldSpace(*o));
3051}
3052
3053
3054TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3055 i::FLAG_allow_natives_syntax = true;
3056 i::FLAG_expose_gc = true;
3057 CcTest::InitializeVM();
3058 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3059 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3060 v8::HandleScope scope(CcTest::isolate());
3061 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3062 // Grow new space unitl maximum capacity reached.
3063 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3064 CcTest::heap()->new_space()->Grow();
3065 }
3066
3067 i::ScopedVector<char> source(1024);
3068 i::SNPrintF(
3069 source,
3070 "var number_elements = 100;"
3071 "var elements = new Array(number_elements);"
3072 "function f() {"
3073 " for (var i = 0; i < number_elements; i++) {"
3074 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3075 " }"
3076 " return elements[number_elements - 1];"
3077 "};"
3078 "f(); gc();"
3079 "f(); f();"
3080 "%%OptimizeFunctionOnNextCall(f);"
3081 "f();");
3082
3083 v8::Local<v8::Value> res = CompileRun(source.start());
3084
3085 v8::Local<v8::Value> int_array =
3086 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3087 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
3088 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
3089 v8::Local<v8::Value> double_array =
3090 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3091 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
3092 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
3093
3094 Handle<JSObject> o = Handle<JSObject>::cast(
3095 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3096 CHECK(CcTest::heap()->InOldSpace(*o));
3097 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3098 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3099 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3100 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3101}
3102
3103
3104TEST(OptimizedPretenuringNestedObjectLiterals) {
3105 i::FLAG_allow_natives_syntax = true;
3106 i::FLAG_expose_gc = true;
3107 CcTest::InitializeVM();
3108 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3109 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3110 v8::HandleScope scope(CcTest::isolate());
3111 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3112 // Grow new space unitl maximum capacity reached.
3113 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3114 CcTest::heap()->new_space()->Grow();
3115 }
3116
3117 i::ScopedVector<char> source(1024);
3118 i::SNPrintF(
3119 source,
3120 "var number_elements = %d;"
3121 "var elements = new Array(number_elements);"
3122 "function f() {"
3123 " for (var i = 0; i < number_elements; i++) {"
3124 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3125 " }"
3126 " return elements[number_elements - 1];"
3127 "};"
3128 "f(); gc();"
3129 "f(); f();"
3130 "%%OptimizeFunctionOnNextCall(f);"
3131 "f();",
3132 AllocationSite::kPretenureMinimumCreated);
3133
3134 v8::Local<v8::Value> res = CompileRun(source.start());
3135
3136 v8::Local<v8::Value> int_array_1 =
3137 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3138 Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
3139 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
3140 v8::Local<v8::Value> int_array_2 =
3141 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3142 Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
3143 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
3144
3145 Handle<JSObject> o = Handle<JSObject>::cast(
3146 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3147 CHECK(CcTest::heap()->InOldSpace(*o));
3148 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3149 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3150 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3151 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3152}
3153
3154
3155TEST(OptimizedPretenuringNestedDoubleLiterals) {
3156 i::FLAG_allow_natives_syntax = true;
3157 i::FLAG_expose_gc = true;
3158 CcTest::InitializeVM();
3159 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3160 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3161 v8::HandleScope scope(CcTest::isolate());
3162 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3163 // Grow new space unitl maximum capacity reached.
3164 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3165 CcTest::heap()->new_space()->Grow();
3166 }
3167
3168 i::ScopedVector<char> source(1024);
3169 i::SNPrintF(
3170 source,
3171 "var number_elements = %d;"
3172 "var elements = new Array(number_elements);"
3173 "function f() {"
3174 " for (var i = 0; i < number_elements; i++) {"
3175 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3176 " }"
3177 " return elements[number_elements - 1];"
3178 "};"
3179 "f(); gc();"
3180 "f(); f();"
3181 "%%OptimizeFunctionOnNextCall(f);"
3182 "f();",
3183 AllocationSite::kPretenureMinimumCreated);
3184
3185 v8::Local<v8::Value> res = CompileRun(source.start());
3186
3187 v8::Local<v8::Value> double_array_1 =
3188 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3189 i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
3190 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
3191 v8::Local<v8::Value> double_array_2 =
3192 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3193 i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
3194 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
3195
3196 i::Handle<JSObject> o = Handle<JSObject>::cast(
3197 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3198 CHECK(CcTest::heap()->InOldSpace(*o));
3199 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3200 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3201 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3202 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3203}
3204
3205
3206// Test regular array literals allocation.
3207TEST(OptimizedAllocationArrayLiterals) {
3208 i::FLAG_allow_natives_syntax = true;
3209 CcTest::InitializeVM();
3210 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3211 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3212 v8::HandleScope scope(CcTest::isolate());
3213 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3214 v8::Local<v8::Value> res = CompileRun(
3215 "function f() {"
3216 " var numbers = new Array(1, 2, 3);"
3217 " numbers[0] = 3.14;"
3218 " return numbers;"
3219 "};"
3220 "f(); f(); f();"
3221 "%OptimizeFunctionOnNextCall(f);"
3222 "f();");
3223 CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
3224 ->Get(ctx, v8_str("0"))
3225 .ToLocalChecked()
3226 ->Int32Value(ctx)
3227 .FromJust());
3228
3229 i::Handle<JSObject> o = Handle<JSObject>::cast(
3230 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3231
3232 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3233}
3234
3235
3236static int CountMapTransitions(Map* map) {
3237 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3238}
3239
3240
3241// Test that map transitions are cleared and maps are collected with
3242// incremental marking as well.
3243TEST(Regress1465) {
3244 i::FLAG_stress_compaction = false;
3245 i::FLAG_allow_natives_syntax = true;
3246 i::FLAG_trace_incremental_marking = true;
3247 i::FLAG_retain_maps_for_n_gc = 0;
3248 CcTest::InitializeVM();
3249 v8::HandleScope scope(CcTest::isolate());
3250 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3251 static const int transitions_count = 256;
3252
3253 CompileRun("function F() {}");
3254 {
3255 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3256 for (int i = 0; i < transitions_count; i++) {
3257 EmbeddedVector<char, 64> buffer;
3258 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3259 CompileRun(buffer.start());
3260 }
3261 CompileRun("var root = new F;");
3262 }
3263
3264 i::Handle<JSReceiver> root =
3265 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3266 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3267
3268 // Count number of live transitions before marking.
3269 int transitions_before = CountMapTransitions(root->map());
3270 CompileRun("%DebugPrint(root);");
3271 CHECK_EQ(transitions_count, transitions_before);
3272
3273 SimulateIncrementalMarking(CcTest::heap());
3274 CcTest::heap()->CollectAllGarbage();
3275
3276 // Count number of live transitions after marking. Note that one transition
3277 // is left, because 'o' still holds an instance of one transition target.
3278 int transitions_after = CountMapTransitions(root->map());
3279 CompileRun("%DebugPrint(root);");
3280 CHECK_EQ(1, transitions_after);
3281}
3282
3283
3284#ifdef DEBUG
3285static void AddTransitions(int transitions_count) {
3286 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3287 for (int i = 0; i < transitions_count; i++) {
3288 EmbeddedVector<char, 64> buffer;
3289 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3290 CompileRun(buffer.start());
3291 }
3292}
3293
3294
3295static i::Handle<JSObject> GetByName(const char* name) {
3296 return i::Handle<JSObject>::cast(
3297 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3298 CcTest::global()
3299 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
3300 .ToLocalChecked())));
3301}
3302
3303
3304static void AddPropertyTo(
3305 int gc_count, Handle<JSObject> object, const char* property_name) {
3306 Isolate* isolate = CcTest::i_isolate();
3307 Factory* factory = isolate->factory();
3308 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3309 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3310 i::FLAG_gc_interval = gc_count;
3311 i::FLAG_gc_global = true;
3312 i::FLAG_retain_maps_for_n_gc = 0;
3313 CcTest::heap()->set_allocation_timeout(gc_count);
3314 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3315}
3316
3317
3318TEST(TransitionArrayShrinksDuringAllocToZero) {
3319 i::FLAG_stress_compaction = false;
3320 i::FLAG_allow_natives_syntax = true;
3321 CcTest::InitializeVM();
3322 v8::HandleScope scope(CcTest::isolate());
3323 static const int transitions_count = 10;
3324 CompileRun("function F() { }");
3325 AddTransitions(transitions_count);
3326 CompileRun("var root = new F;");
3327 Handle<JSObject> root = GetByName("root");
3328
3329 // Count number of live transitions before marking.
3330 int transitions_before = CountMapTransitions(root->map());
3331 CHECK_EQ(transitions_count, transitions_before);
3332
3333 // Get rid of o
3334 CompileRun("o = new F;"
3335 "root = new F");
3336 root = GetByName("root");
3337 AddPropertyTo(2, root, "funny");
3338 CcTest::heap()->CollectGarbage(NEW_SPACE);
3339
3340 // Count number of live transitions after marking. Note that one transition
3341 // is left, because 'o' still holds an instance of one transition target.
3342 int transitions_after = CountMapTransitions(
3343 Map::cast(root->map()->GetBackPointer()));
3344 CHECK_EQ(1, transitions_after);
3345}
3346
3347
3348TEST(TransitionArrayShrinksDuringAllocToOne) {
3349 i::FLAG_stress_compaction = false;
3350 i::FLAG_allow_natives_syntax = true;
3351 CcTest::InitializeVM();
3352 v8::HandleScope scope(CcTest::isolate());
3353 static const int transitions_count = 10;
3354 CompileRun("function F() {}");
3355 AddTransitions(transitions_count);
3356 CompileRun("var root = new F;");
3357 Handle<JSObject> root = GetByName("root");
3358
3359 // Count number of live transitions before marking.
3360 int transitions_before = CountMapTransitions(root->map());
3361 CHECK_EQ(transitions_count, transitions_before);
3362
3363 root = GetByName("root");
3364 AddPropertyTo(2, root, "funny");
3365 CcTest::heap()->CollectGarbage(NEW_SPACE);
3366
3367 // Count number of live transitions after marking. Note that one transition
3368 // is left, because 'o' still holds an instance of one transition target.
3369 int transitions_after = CountMapTransitions(
3370 Map::cast(root->map()->GetBackPointer()));
3371 CHECK_EQ(2, transitions_after);
3372}
3373
3374
3375TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3376 i::FLAG_stress_compaction = false;
3377 i::FLAG_allow_natives_syntax = true;
3378 CcTest::InitializeVM();
3379 v8::HandleScope scope(CcTest::isolate());
3380 static const int transitions_count = 10;
3381 CompileRun("function F() {}");
3382 AddTransitions(transitions_count);
3383 CompileRun("var root = new F;");
3384 Handle<JSObject> root = GetByName("root");
3385
3386 // Count number of live transitions before marking.
3387 int transitions_before = CountMapTransitions(root->map());
3388 CHECK_EQ(transitions_count, transitions_before);
3389
3390 root = GetByName("root");
3391 AddPropertyTo(0, root, "prop9");
3392 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3393
3394 // Count number of live transitions after marking. Note that one transition
3395 // is left, because 'o' still holds an instance of one transition target.
3396 int transitions_after = CountMapTransitions(
3397 Map::cast(root->map()->GetBackPointer()));
3398 CHECK_EQ(1, transitions_after);
3399}
3400
3401
3402TEST(TransitionArraySimpleToFull) {
3403 i::FLAG_stress_compaction = false;
3404 i::FLAG_allow_natives_syntax = true;
3405 CcTest::InitializeVM();
3406 v8::HandleScope scope(CcTest::isolate());
3407 static const int transitions_count = 1;
3408 CompileRun("function F() {}");
3409 AddTransitions(transitions_count);
3410 CompileRun("var root = new F;");
3411 Handle<JSObject> root = GetByName("root");
3412
3413 // Count number of live transitions before marking.
3414 int transitions_before = CountMapTransitions(root->map());
3415 CHECK_EQ(transitions_count, transitions_before);
3416
3417 CompileRun("o = new F;"
3418 "root = new F");
3419 root = GetByName("root");
3420 CHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3421 AddPropertyTo(2, root, "happy");
3422
3423 // Count number of live transitions after marking. Note that one transition
3424 // is left, because 'o' still holds an instance of one transition target.
3425 int transitions_after = CountMapTransitions(
3426 Map::cast(root->map()->GetBackPointer()));
3427 CHECK_EQ(1, transitions_after);
3428}
3429#endif // DEBUG
3430
3431
3432TEST(Regress2143a) {
3433 i::FLAG_incremental_marking = true;
3434 CcTest::InitializeVM();
3435 v8::HandleScope scope(CcTest::isolate());
3436
3437 // Prepare a map transition from the root object together with a yet
3438 // untransitioned root object.
3439 CompileRun("var root = new Object;"
3440 "root.foo = 0;"
3441 "root = new Object;");
3442
3443 SimulateIncrementalMarking(CcTest::heap());
3444
3445 // Compile a StoreIC that performs the prepared map transition. This
3446 // will restart incremental marking and should make sure the root is
3447 // marked grey again.
3448 CompileRun("function f(o) {"
3449 " o.foo = 0;"
3450 "}"
3451 "f(new Object);"
3452 "f(root);");
3453
3454 // This bug only triggers with aggressive IC clearing.
3455 CcTest::heap()->AgeInlineCaches();
3456
3457 // Explicitly request GC to perform final marking step and sweeping.
3458 CcTest::heap()->CollectAllGarbage();
3459
3460 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3461 CcTest::global()
3462 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3463 .ToLocalChecked()));
3464
3465 // The root object should be in a sane state.
3466 CHECK(root->IsJSObject());
3467 CHECK(root->map()->IsMap());
3468}
3469
3470
3471TEST(Regress2143b) {
3472 i::FLAG_incremental_marking = true;
3473 i::FLAG_allow_natives_syntax = true;
3474 CcTest::InitializeVM();
3475 v8::HandleScope scope(CcTest::isolate());
3476
3477 // Prepare a map transition from the root object together with a yet
3478 // untransitioned root object.
3479 CompileRun("var root = new Object;"
3480 "root.foo = 0;"
3481 "root = new Object;");
3482
3483 SimulateIncrementalMarking(CcTest::heap());
3484
3485 // Compile an optimized LStoreNamedField that performs the prepared
3486 // map transition. This will restart incremental marking and should
3487 // make sure the root is marked grey again.
3488 CompileRun("function f(o) {"
3489 " o.foo = 0;"
3490 "}"
3491 "f(new Object);"
3492 "f(new Object);"
3493 "%OptimizeFunctionOnNextCall(f);"
3494 "f(root);"
3495 "%DeoptimizeFunction(f);");
3496
3497 // This bug only triggers with aggressive IC clearing.
3498 CcTest::heap()->AgeInlineCaches();
3499
3500 // Explicitly request GC to perform final marking step and sweeping.
3501 CcTest::heap()->CollectAllGarbage();
3502
3503 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3504 CcTest::global()
3505 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3506 .ToLocalChecked()));
3507
3508 // The root object should be in a sane state.
3509 CHECK(root->IsJSObject());
3510 CHECK(root->map()->IsMap());
3511}
3512
3513
3514TEST(ReleaseOverReservedPages) {
3515 if (FLAG_never_compact) return;
3516 i::FLAG_trace_gc = true;
3517 // The optimizer can allocate stuff, messing up the test.
3518 i::FLAG_crankshaft = false;
3519 i::FLAG_always_opt = false;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003520 // Parallel compaction increases fragmentation, depending on how existing
3521 // memory is distributed. Since this is non-deterministic because of
3522 // concurrent sweeping, we disable it for this test.
3523 i::FLAG_parallel_compaction = false;
3524 // Concurrent sweeping adds non determinism, depending on when memory is
3525 // available for further reuse.
3526 i::FLAG_concurrent_sweeping = false;
Ben Murdochc5610432016-08-08 18:44:38 +01003527 // Fast evacuation of pages may result in a different page count in old space.
3528 i::FLAG_page_promotion = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003529 CcTest::InitializeVM();
3530 Isolate* isolate = CcTest::i_isolate();
3531 Factory* factory = isolate->factory();
3532 Heap* heap = isolate->heap();
3533 v8::HandleScope scope(CcTest::isolate());
3534 static const int number_of_test_pages = 20;
3535
3536 // Prepare many pages with low live-bytes count.
3537 PagedSpace* old_space = heap->old_space();
3538 CHECK_EQ(1, old_space->CountTotalPages());
3539 for (int i = 0; i < number_of_test_pages; i++) {
3540 AlwaysAllocateScope always_allocate(isolate);
3541 SimulateFullSpace(old_space);
3542 factory->NewFixedArray(1, TENURED);
3543 }
3544 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3545
3546 // Triggering one GC will cause a lot of garbage to be discovered but
3547 // even spread across all allocated pages.
3548 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3549 "triggered for preparation");
3550 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3551
3552 // Triggering subsequent GCs should cause at least half of the pages
3553 // to be released to the OS after at most two cycles.
3554 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3555 "triggered by test 1");
3556 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3557 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3558 "triggered by test 2");
3559 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3560
3561 // Triggering a last-resort GC should cause all pages to be released to the
3562 // OS so that other processes can seize the memory. If we get a failure here
3563 // where there are 2 pages left instead of 1, then we should increase the
3564 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3565 // first page should be small in order to reduce memory used when the VM
3566 // boots, but if the 20 small arrays don't fit on the first page then that's
3567 // an indication that it is too small.
3568 heap->CollectAllAvailableGarbage("triggered really hard");
3569 CHECK_EQ(1, old_space->CountTotalPages());
3570}
3571
3572static int forced_gc_counter = 0;
3573
3574void MockUseCounterCallback(v8::Isolate* isolate,
3575 v8::Isolate::UseCounterFeature feature) {
3576 isolate->GetCurrentContext();
3577 if (feature == v8::Isolate::kForcedGC) {
3578 forced_gc_counter++;
3579 }
3580}
3581
3582
3583TEST(CountForcedGC) {
3584 i::FLAG_expose_gc = true;
3585 CcTest::InitializeVM();
3586 Isolate* isolate = CcTest::i_isolate();
3587 v8::HandleScope scope(CcTest::isolate());
3588
3589 isolate->SetUseCounterCallback(MockUseCounterCallback);
3590
3591 forced_gc_counter = 0;
3592 const char* source = "gc();";
3593 CompileRun(source);
3594 CHECK_GT(forced_gc_counter, 0);
3595}
3596
3597
3598#ifdef OBJECT_PRINT
3599TEST(PrintSharedFunctionInfo) {
3600 CcTest::InitializeVM();
3601 v8::HandleScope scope(CcTest::isolate());
3602 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3603 const char* source = "f = function() { return 987654321; }\n"
3604 "g = function() { return 123456789; }\n";
3605 CompileRun(source);
3606 i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3607 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3608 CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3609
3610 OFStream os(stdout);
3611 g->shared()->Print(os);
3612 os << std::endl;
3613}
3614#endif // OBJECT_PRINT
3615
3616
3617TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3618 if (i::FLAG_always_opt) return;
3619 CcTest::InitializeVM();
3620 v8::HandleScope scope(CcTest::isolate());
3621 v8::Local<v8::Value> fun1, fun2;
3622 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3623 {
3624 CompileRun("function fun() {};");
3625 fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3626 }
3627
3628 {
3629 CompileRun("function fun() {};");
3630 fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3631 }
3632
3633 // Prepare function f that contains type feedback for the two closures.
3634 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3635 CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3636 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3637
3638 Handle<JSFunction> f = Handle<JSFunction>::cast(
3639 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3640 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3641
3642 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3643 FeedbackVectorHelper feedback_helper(feedback_vector);
3644
3645 int expected_slots = 2;
3646 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3647 int slot1 = 0;
3648 int slot2 = 1;
3649 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3650 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3651
3652 SimulateIncrementalMarking(CcTest::heap());
3653 CcTest::heap()->CollectAllGarbage();
3654
3655 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3656 ->cleared());
3657 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3658 ->cleared());
3659}
3660
3661
3662static Code* FindFirstIC(Code* code, Code::Kind kind) {
3663 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3664 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3665 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3666 RelocInfo* info = it.rinfo();
3667 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3668 if (target->is_inline_cache_stub() && target->kind() == kind) {
3669 return target;
3670 }
3671 }
3672 return NULL;
3673}
3674
3675
3676static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3677 InlineCacheState desired_state) {
3678 Handle<TypeFeedbackVector> vector =
3679 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3680 FeedbackVectorHelper helper(vector);
3681 FeedbackVectorSlot slot = helper.slot(slot_index);
3682 if (vector->GetKind(slot) == FeedbackVectorSlotKind::LOAD_IC) {
3683 LoadICNexus nexus(vector, slot);
3684 CHECK(nexus.StateFromFeedback() == desired_state);
3685 } else {
3686 CHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
3687 KeyedLoadICNexus nexus(vector, slot);
3688 CHECK(nexus.StateFromFeedback() == desired_state);
3689 }
3690}
3691
3692
3693static void CheckVectorICCleared(Handle<JSFunction> f, int slot_index) {
3694 Handle<TypeFeedbackVector> vector =
3695 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3696 FeedbackVectorSlot slot(slot_index);
3697 LoadICNexus nexus(vector, slot);
3698 CHECK(IC::IsCleared(&nexus));
3699}
3700
3701
3702TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3703 if (i::FLAG_always_opt) return;
3704 CcTest::InitializeVM();
3705 v8::HandleScope scope(CcTest::isolate());
3706 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3707 // Prepare function f that contains a monomorphic IC for object
3708 // originating from the same native context.
3709 CompileRun(
3710 "function fun() { this.x = 1; };"
3711 "function f(o) { return new o(); } f(fun); f(fun);");
3712 Handle<JSFunction> f = Handle<JSFunction>::cast(
3713 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3714 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3715
3716 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3717 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3718
3719 SimulateIncrementalMarking(CcTest::heap());
3720 CcTest::heap()->CollectAllGarbage();
3721
3722 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3723}
3724
3725
3726TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3727 if (i::FLAG_always_opt) return;
3728 CcTest::InitializeVM();
3729 Isolate* isolate = CcTest::i_isolate();
3730 v8::HandleScope scope(CcTest::isolate());
3731 v8::Local<v8::Value> fun1;
3732 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3733
3734 {
3735 LocalContext env;
3736 CompileRun("function fun() { this.x = 1; };");
3737 fun1 = env->Global()->Get(env.local(), v8_str("fun")).ToLocalChecked();
3738 }
3739
3740 // Prepare function f that contains a monomorphic constructor for object
3741 // originating from a different native context.
3742 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3743 CompileRun(
3744 "function fun() { this.x = 1; };"
3745 "function f(o) { return new o(); } f(fun1); f(fun1);");
3746 Handle<JSFunction> f = Handle<JSFunction>::cast(
3747 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3748 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3749
3750
3751 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3752 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3753
3754 // Fire context dispose notification.
3755 CcTest::isolate()->ContextDisposedNotification();
3756 SimulateIncrementalMarking(CcTest::heap());
3757 CcTest::heap()->CollectAllGarbage();
3758
3759 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3760 vector->Get(FeedbackVectorSlot(0)));
3761}
3762
3763
3764TEST(IncrementalMarkingPreservesMonomorphicIC) {
3765 if (i::FLAG_always_opt) return;
3766 CcTest::InitializeVM();
3767 v8::HandleScope scope(CcTest::isolate());
3768 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3769 // Prepare function f that contains a monomorphic IC for object
3770 // originating from the same native context.
3771 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3772 "function f(o) { return o.x; } f(obj); f(obj);");
3773 Handle<JSFunction> f = Handle<JSFunction>::cast(
3774 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3775 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3776
3777 CheckVectorIC(f, 0, MONOMORPHIC);
3778
3779 SimulateIncrementalMarking(CcTest::heap());
3780 CcTest::heap()->CollectAllGarbage();
3781
3782 CheckVectorIC(f, 0, MONOMORPHIC);
3783}
3784
3785
3786TEST(IncrementalMarkingClearsMonomorphicIC) {
3787 if (i::FLAG_always_opt) return;
3788 CcTest::InitializeVM();
3789 v8::HandleScope scope(CcTest::isolate());
3790 v8::Local<v8::Value> obj1;
3791 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3792
3793 {
3794 LocalContext env;
3795 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3796 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3797 }
3798
3799 // Prepare function f that contains a monomorphic IC for object
3800 // originating from a different native context.
3801 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3802 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3803 Handle<JSFunction> f = Handle<JSFunction>::cast(
3804 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3805 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3806
3807 CheckVectorIC(f, 0, MONOMORPHIC);
3808
3809 // Fire context dispose notification.
3810 CcTest::isolate()->ContextDisposedNotification();
3811 SimulateIncrementalMarking(CcTest::heap());
3812 CcTest::heap()->CollectAllGarbage();
3813
3814 CheckVectorICCleared(f, 0);
3815}
3816
3817
3818TEST(IncrementalMarkingPreservesPolymorphicIC) {
3819 if (i::FLAG_always_opt) return;
3820 CcTest::InitializeVM();
3821 v8::HandleScope scope(CcTest::isolate());
3822 v8::Local<v8::Value> obj1, obj2;
3823 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3824
3825 {
3826 LocalContext env;
3827 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3828 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3829 }
3830
3831 {
3832 LocalContext env;
3833 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3834 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3835 }
3836
3837 // Prepare function f that contains a polymorphic IC for objects
3838 // originating from two different native contexts.
3839 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3840 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3841 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3842 Handle<JSFunction> f = Handle<JSFunction>::cast(
3843 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3844 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3845
3846 CheckVectorIC(f, 0, POLYMORPHIC);
3847
3848 // Fire context dispose notification.
3849 SimulateIncrementalMarking(CcTest::heap());
3850 CcTest::heap()->CollectAllGarbage();
3851
3852 CheckVectorIC(f, 0, POLYMORPHIC);
3853}
3854
3855
3856TEST(IncrementalMarkingClearsPolymorphicIC) {
3857 if (i::FLAG_always_opt) return;
3858 CcTest::InitializeVM();
3859 v8::HandleScope scope(CcTest::isolate());
3860 v8::Local<v8::Value> obj1, obj2;
3861 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3862
3863 {
3864 LocalContext env;
3865 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3866 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3867 }
3868
3869 {
3870 LocalContext env;
3871 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3872 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3873 }
3874
3875 // Prepare function f that contains a polymorphic IC for objects
3876 // originating from two different native contexts.
3877 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3878 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3879 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3880 Handle<JSFunction> f = Handle<JSFunction>::cast(
3881 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3882 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3883
3884 CheckVectorIC(f, 0, POLYMORPHIC);
3885
3886 // Fire context dispose notification.
3887 CcTest::isolate()->ContextDisposedNotification();
3888 SimulateIncrementalMarking(CcTest::heap());
3889 CcTest::heap()->CollectAllGarbage();
3890
3891 CheckVectorICCleared(f, 0);
3892}
3893
3894
3895class SourceResource : public v8::String::ExternalOneByteStringResource {
3896 public:
3897 explicit SourceResource(const char* data)
3898 : data_(data), length_(strlen(data)) { }
3899
3900 virtual void Dispose() {
3901 i::DeleteArray(data_);
3902 data_ = NULL;
3903 }
3904
3905 const char* data() const { return data_; }
3906
3907 size_t length() const { return length_; }
3908
3909 bool IsDisposed() { return data_ == NULL; }
3910
3911 private:
3912 const char* data_;
3913 size_t length_;
3914};
3915
3916
3917void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3918 const char* accessor) {
3919 // Test that the data retained by the Error.stack accessor is released
3920 // after the first time the accessor is fired. We use external string
3921 // to check whether the data is being released since the external string
3922 // resource's callback is fired when the external string is GC'ed.
3923 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3924 v8::HandleScope scope(isolate);
3925 SourceResource* resource = new SourceResource(i::StrDup(source));
3926 {
3927 v8::HandleScope scope(isolate);
3928 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3929 v8::Local<v8::String> source_string =
3930 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3931 i_isolate->heap()->CollectAllAvailableGarbage();
3932 v8::Script::Compile(ctx, source_string)
3933 .ToLocalChecked()
3934 ->Run(ctx)
3935 .ToLocalChecked();
3936 CHECK(!resource->IsDisposed());
3937 }
3938 // i_isolate->heap()->CollectAllAvailableGarbage();
3939 CHECK(!resource->IsDisposed());
3940
3941 CompileRun(accessor);
3942 i_isolate->heap()->CollectAllAvailableGarbage();
3943
3944 // External source has been released.
3945 CHECK(resource->IsDisposed());
3946 delete resource;
3947}
3948
3949
3950UNINITIALIZED_TEST(ReleaseStackTraceData) {
3951 if (i::FLAG_always_opt) {
3952 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3953 // See: https://codereview.chromium.org/181833004/
3954 return;
3955 }
3956 FLAG_use_ic = false; // ICs retain objects.
3957 FLAG_concurrent_recompilation = false;
3958 v8::Isolate::CreateParams create_params;
3959 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3960 v8::Isolate* isolate = v8::Isolate::New(create_params);
3961 {
3962 v8::Isolate::Scope isolate_scope(isolate);
3963 v8::HandleScope handle_scope(isolate);
3964 v8::Context::New(isolate)->Enter();
3965 static const char* source1 = "var error = null; "
3966 /* Normal Error */ "try { "
3967 " throw new Error(); "
3968 "} catch (e) { "
3969 " error = e; "
3970 "} ";
3971 static const char* source2 = "var error = null; "
3972 /* Stack overflow */ "try { "
3973 " (function f() { f(); })(); "
3974 "} catch (e) { "
3975 " error = e; "
3976 "} ";
3977 static const char* source3 = "var error = null; "
3978 /* Normal Error */ "try { "
3979 /* as prototype */ " throw new Error(); "
3980 "} catch (e) { "
3981 " error = {}; "
3982 " error.__proto__ = e; "
3983 "} ";
3984 static const char* source4 = "var error = null; "
3985 /* Stack overflow */ "try { "
3986 /* as prototype */ " (function f() { f(); })(); "
3987 "} catch (e) { "
3988 " error = {}; "
3989 " error.__proto__ = e; "
3990 "} ";
3991 static const char* getter = "error.stack";
3992 static const char* setter = "error.stack = 0";
3993
3994 ReleaseStackTraceDataTest(isolate, source1, setter);
3995 ReleaseStackTraceDataTest(isolate, source2, setter);
3996 // We do not test source3 and source4 with setter, since the setter is
3997 // supposed to (untypically) write to the receiver, not the holder. This is
3998 // to emulate the behavior of a data property.
3999
4000 ReleaseStackTraceDataTest(isolate, source1, getter);
4001 ReleaseStackTraceDataTest(isolate, source2, getter);
4002 ReleaseStackTraceDataTest(isolate, source3, getter);
4003 ReleaseStackTraceDataTest(isolate, source4, getter);
4004 }
4005 isolate->Dispose();
4006}
4007
4008
4009TEST(Regress159140) {
4010 i::FLAG_allow_natives_syntax = true;
4011 CcTest::InitializeVM();
4012 Isolate* isolate = CcTest::i_isolate();
4013 LocalContext env;
4014 Heap* heap = isolate->heap();
4015 HandleScope scope(isolate);
4016
4017 // Perform one initial GC to enable code flushing.
4018 heap->CollectAllGarbage();
4019
4020 // Prepare several closures that are all eligible for code flushing
4021 // because all reachable ones are not optimized. Make sure that the
4022 // optimized code object is directly reachable through a handle so
4023 // that it is marked black during incremental marking.
4024 Handle<Code> code;
4025 {
4026 HandleScope inner_scope(isolate);
4027 CompileRun("function h(x) {}"
4028 "function mkClosure() {"
4029 " return function(x) { return x + 1; };"
4030 "}"
4031 "var f = mkClosure();"
4032 "var g = mkClosure();"
4033 "f(1); f(2);"
4034 "g(1); g(2);"
4035 "h(1); h(2);"
4036 "%OptimizeFunctionOnNextCall(f); f(3);"
4037 "%OptimizeFunctionOnNextCall(h); h(3);");
4038
4039 Handle<JSFunction> f = Handle<JSFunction>::cast(
4040 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4041 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4042 CHECK(f->is_compiled());
4043 CompileRun("f = null;");
4044
4045 Handle<JSFunction> g = Handle<JSFunction>::cast(
4046 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4047 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4048 CHECK(g->is_compiled());
4049 const int kAgingThreshold = 6;
4050 for (int i = 0; i < kAgingThreshold; i++) {
4051 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4052 }
4053
4054 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4055 }
4056
4057 // Simulate incremental marking so that the functions are enqueued as
4058 // code flushing candidates. Then optimize one function. Finally
4059 // finish the GC to complete code flushing.
4060 SimulateIncrementalMarking(heap);
4061 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4062 heap->CollectAllGarbage();
4063
4064 // Unoptimized code is missing and the deoptimizer will go ballistic.
4065 CompileRun("g('bozo');");
4066}
4067
4068
4069TEST(Regress165495) {
4070 i::FLAG_allow_natives_syntax = true;
4071 CcTest::InitializeVM();
4072 Isolate* isolate = CcTest::i_isolate();
4073 Heap* heap = isolate->heap();
4074 HandleScope scope(isolate);
4075
4076 // Perform one initial GC to enable code flushing.
4077 heap->CollectAllGarbage();
4078
4079 // Prepare an optimized closure that the optimized code map will get
4080 // populated. Then age the unoptimized code to trigger code flushing
4081 // but make sure the optimized code is unreachable.
4082 {
4083 HandleScope inner_scope(isolate);
4084 LocalContext env;
4085 CompileRun("function mkClosure() {"
4086 " return function(x) { return x + 1; };"
4087 "}"
4088 "var f = mkClosure();"
4089 "f(1); f(2);"
4090 "%OptimizeFunctionOnNextCall(f); f(3);");
4091
4092 Handle<JSFunction> f = Handle<JSFunction>::cast(
4093 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4094 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4095 CHECK(f->is_compiled());
4096 const int kAgingThreshold = 6;
4097 for (int i = 0; i < kAgingThreshold; i++) {
4098 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4099 }
4100
4101 CompileRun("f = null;");
4102 }
4103
4104 // Simulate incremental marking so that unoptimized code is flushed
4105 // even though it still is cached in the optimized code map.
4106 SimulateIncrementalMarking(heap);
4107 heap->CollectAllGarbage();
4108
4109 // Make a new closure that will get code installed from the code map.
4110 // Unoptimized code is missing and the deoptimizer will go ballistic.
4111 CompileRun("var g = mkClosure(); g('bozo');");
4112}
4113
4114
4115TEST(Regress169209) {
4116 i::FLAG_stress_compaction = false;
4117 i::FLAG_allow_natives_syntax = true;
4118
4119 CcTest::InitializeVM();
4120 Isolate* isolate = CcTest::i_isolate();
4121 Heap* heap = isolate->heap();
4122 HandleScope scope(isolate);
4123
4124 // Perform one initial GC to enable code flushing.
4125 heap->CollectAllGarbage();
4126
4127 // Prepare a shared function info eligible for code flushing for which
4128 // the unoptimized code will be replaced during optimization.
4129 Handle<SharedFunctionInfo> shared1;
4130 {
4131 HandleScope inner_scope(isolate);
4132 LocalContext env;
4133 CompileRun("function f() { return 'foobar'; }"
4134 "function g(x) { if (x) f(); }"
4135 "f();"
4136 "g(false);"
4137 "g(false);");
4138
4139 Handle<JSFunction> f = Handle<JSFunction>::cast(
4140 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4141 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4142 CHECK(f->is_compiled());
4143 const int kAgingThreshold = 6;
4144 for (int i = 0; i < kAgingThreshold; i++) {
4145 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4146 }
4147
4148 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4149 }
4150
4151 // Prepare a shared function info eligible for code flushing that will
4152 // represent the dangling tail of the candidate list.
4153 Handle<SharedFunctionInfo> shared2;
4154 {
4155 HandleScope inner_scope(isolate);
4156 LocalContext env;
4157 CompileRun("function flushMe() { return 0; }"
4158 "flushMe(1);");
4159
4160 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4161 *v8::Local<v8::Function>::Cast(CcTest::global()
4162 ->Get(env.local(), v8_str("flushMe"))
4163 .ToLocalChecked())));
4164 CHECK(f->is_compiled());
4165 const int kAgingThreshold = 6;
4166 for (int i = 0; i < kAgingThreshold; i++) {
4167 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4168 }
4169
4170 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4171 }
4172
4173 // Simulate incremental marking and collect code flushing candidates.
4174 SimulateIncrementalMarking(heap);
4175 CHECK(shared1->code()->gc_metadata() != NULL);
4176
4177 // Optimize function and make sure the unoptimized code is replaced.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004178 CompileRun("%OptimizeFunctionOnNextCall(g);"
4179 "g(false);");
4180
4181 // Finish garbage collection cycle.
4182 heap->CollectAllGarbage();
4183 CHECK(shared1->code()->gc_metadata() == NULL);
4184}
4185
4186
4187TEST(Regress169928) {
4188 i::FLAG_allow_natives_syntax = true;
4189 i::FLAG_crankshaft = false;
4190 CcTest::InitializeVM();
4191 Isolate* isolate = CcTest::i_isolate();
4192 LocalContext env;
4193 Factory* factory = isolate->factory();
4194 v8::HandleScope scope(CcTest::isolate());
4195
4196 // Some flags turn Scavenge collections into Mark-sweep collections
4197 // and hence are incompatible with this test case.
4198 if (FLAG_gc_global || FLAG_stress_compaction) return;
4199
4200 // Prepare the environment
4201 CompileRun("function fastliteralcase(literal, value) {"
4202 " literal[0] = value;"
4203 " return literal;"
4204 "}"
4205 "function get_standard_literal() {"
4206 " var literal = [1, 2, 3];"
4207 " return literal;"
4208 "}"
4209 "obj = fastliteralcase(get_standard_literal(), 1);"
4210 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4211 "obj = fastliteralcase(get_standard_literal(), 2);");
4212
4213 // prepare the heap
4214 v8::Local<v8::String> mote_code_string =
4215 v8_str("fastliteralcase(mote, 2.5);");
4216
4217 v8::Local<v8::String> array_name = v8_str("mote");
4218 CHECK(CcTest::global()
4219 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4220 .FromJust());
4221
4222 // First make sure we flip spaces
4223 CcTest::heap()->CollectGarbage(NEW_SPACE);
4224
4225 // Allocate the object.
4226 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4227 array_data->set(0, Smi::FromInt(1));
4228 array_data->set(1, Smi::FromInt(2));
4229
4230 AllocateAllButNBytes(CcTest::heap()->new_space(),
4231 JSArray::kSize + AllocationMemento::kSize +
4232 kPointerSize);
4233
4234 Handle<JSArray> array =
4235 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4236
4237 CHECK_EQ(Smi::FromInt(2), array->length());
4238 CHECK(array->HasFastSmiOrObjectElements());
4239
4240 // We need filler the size of AllocationMemento object, plus an extra
4241 // fill pointer value.
4242 HeapObject* obj = NULL;
4243 AllocationResult allocation =
4244 CcTest::heap()->new_space()->AllocateRawUnaligned(
4245 AllocationMemento::kSize + kPointerSize);
4246 CHECK(allocation.To(&obj));
4247 Address addr_obj = obj->address();
Ben Murdochda12d292016-06-02 14:46:10 +01004248 CcTest::heap()->CreateFillerObjectAt(addr_obj,
4249 AllocationMemento::kSize + kPointerSize,
4250 ClearRecordedSlots::kNo);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004251
4252 // Give the array a name, making sure not to allocate strings.
4253 v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
4254 CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
4255
4256 // This should crash with a protection violation if we are running a build
4257 // with the bug.
4258 AlwaysAllocateScope aa_scope(isolate);
4259 v8::Script::Compile(env.local(), mote_code_string)
4260 .ToLocalChecked()
4261 ->Run(env.local())
4262 .ToLocalChecked();
4263}
4264
4265
4266#ifdef DEBUG
4267TEST(Regress513507) {
4268 i::FLAG_flush_optimized_code_cache = false;
4269 i::FLAG_allow_natives_syntax = true;
4270 i::FLAG_gc_global = true;
4271 CcTest::InitializeVM();
4272 Isolate* isolate = CcTest::i_isolate();
4273 LocalContext env;
4274 Heap* heap = isolate->heap();
4275 HandleScope scope(isolate);
4276
4277 // Prepare function whose optimized code map we can use.
4278 Handle<SharedFunctionInfo> shared;
4279 {
4280 HandleScope inner_scope(isolate);
4281 CompileRun("function f() { return 1 }"
4282 "f(); %OptimizeFunctionOnNextCall(f); f();");
4283
4284 Handle<JSFunction> f = Handle<JSFunction>::cast(
4285 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4286 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4287 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4288 CompileRun("f = null");
4289 }
4290
4291 // Prepare optimized code that we can use.
4292 Handle<Code> code;
4293 {
4294 HandleScope inner_scope(isolate);
4295 CompileRun("function g() { return 2 }"
4296 "g(); %OptimizeFunctionOnNextCall(g); g();");
4297
4298 Handle<JSFunction> g = Handle<JSFunction>::cast(
4299 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4300 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4301 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4302 if (!code->is_optimized_code()) return;
4303 }
4304
4305 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4306 Handle<LiteralsArray> lit =
4307 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4308 Handle<Context> context(isolate->context());
4309
4310 // Add the new code several times to the optimized code map and also set an
4311 // allocation timeout so that expanding the code map will trigger a GC.
4312 heap->set_allocation_timeout(5);
4313 FLAG_gc_interval = 1000;
4314 for (int i = 0; i < 10; ++i) {
4315 BailoutId id = BailoutId(i);
4316 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4317 }
4318}
4319#endif // DEBUG
4320
4321
4322TEST(Regress514122) {
4323 i::FLAG_flush_optimized_code_cache = false;
4324 i::FLAG_allow_natives_syntax = true;
4325 CcTest::InitializeVM();
4326 Isolate* isolate = CcTest::i_isolate();
4327 LocalContext env;
4328 Heap* heap = isolate->heap();
4329 HandleScope scope(isolate);
4330
4331 // Perfrom one initial GC to enable code flushing.
4332 CcTest::heap()->CollectAllGarbage();
4333
4334 // Prepare function whose optimized code map we can use.
4335 Handle<SharedFunctionInfo> shared;
4336 {
4337 HandleScope inner_scope(isolate);
4338 CompileRun("function f() { return 1 }"
4339 "f(); %OptimizeFunctionOnNextCall(f); f();");
4340
4341 Handle<JSFunction> f = Handle<JSFunction>::cast(
4342 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4343 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4344 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4345 CompileRun("f = null");
4346 }
4347
4348 // Prepare optimized code that we can use.
4349 Handle<Code> code;
4350 {
4351 HandleScope inner_scope(isolate);
4352 CompileRun("function g() { return 2 }"
4353 "g(); %OptimizeFunctionOnNextCall(g); g();");
4354
4355 Handle<JSFunction> g = Handle<JSFunction>::cast(
4356 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4357 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4358 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4359 if (!code->is_optimized_code()) return;
4360 }
4361
4362 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4363 Handle<LiteralsArray> lit =
4364 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4365 Handle<Context> context(isolate->context());
4366
4367 // Add the code several times to the optimized code map.
4368 for (int i = 0; i < 3; ++i) {
4369 HandleScope inner_scope(isolate);
4370 BailoutId id = BailoutId(i);
4371 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4372 }
4373 shared->optimized_code_map()->Print();
4374
4375 // Add the code with a literals array to be evacuated.
4376 Page* evac_page;
4377 {
4378 HandleScope inner_scope(isolate);
4379 AlwaysAllocateScope always_allocate(isolate);
4380 // Make sure literal is placed on an old-space evacuation candidate.
4381 SimulateFullSpace(heap->old_space());
4382
4383 // Make sure there the number of literals is > 0.
4384 Handle<LiteralsArray> lit =
4385 LiteralsArray::New(isolate, vector, 23, TENURED);
4386
4387 evac_page = Page::FromAddress(lit->address());
4388 BailoutId id = BailoutId(100);
4389 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4390 }
4391
4392 // Heap is ready, force {lit_page} to become an evacuation candidate and
4393 // simulate incremental marking to enqueue optimized code map.
4394 FLAG_manual_evacuation_candidates_selection = true;
4395 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4396 SimulateIncrementalMarking(heap);
4397
4398 // No matter whether reachable or not, {boomer} is doomed.
4399 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4400
4401 // Add the code several times to the optimized code map. This will leave old
4402 // copies of the optimized code map unreachable but still marked.
4403 for (int i = 3; i < 6; ++i) {
4404 HandleScope inner_scope(isolate);
4405 BailoutId id = BailoutId(i);
4406 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4407 }
4408
4409 // Trigger a GC to flush out the bug.
4410 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4411 boomer->Print();
4412}
4413
4414
4415TEST(OptimizedCodeMapReuseEntries) {
4416 i::FLAG_flush_optimized_code_cache = false;
4417 i::FLAG_allow_natives_syntax = true;
4418 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
4419 // run this test.
4420 if (i::FLAG_turbo) return;
4421 CcTest::InitializeVM();
4422 v8::Isolate* v8_isolate = CcTest::isolate();
4423 Isolate* isolate = CcTest::i_isolate();
4424 Heap* heap = isolate->heap();
4425 HandleScope scope(isolate);
4426
4427 // Create 3 contexts, allow the 2nd one to be disposed, and verify that
4428 // a 4th context will re-use the weak slots in the optimized code map
4429 // to hold data, rather than expanding the map.
4430 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
4431 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
4432 v8::ScriptCompiler::Source script_source(
4433 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
4434 .ToLocalChecked());
4435 v8::Local<v8::UnboundScript> indep =
4436 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
4437 .ToLocalChecked();
4438 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
4439 // Perfrom one initial GC to enable code flushing.
4440 heap->CollectAllGarbage();
4441
4442 c1->Enter();
4443 indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
4444 CompileRun(toplevel);
4445
4446 Handle<SharedFunctionInfo> shared;
4447 Handle<JSFunction> foo = Handle<JSFunction>::cast(
4448 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4449 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
4450 CHECK(foo->shared()->is_compiled());
4451 shared = handle(foo->shared());
4452 c1->Exit();
4453
4454 {
4455 HandleScope scope(isolate);
4456 v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
4457 c2->Enter();
4458 indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
4459 CompileRun(toplevel);
4460 c2->Exit();
4461 }
4462
4463 {
4464 HandleScope scope(isolate);
4465 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
4466 c3->Enter();
4467 indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
4468 CompileRun(toplevel);
4469 c3->Exit();
4470
4471 // Now, collect garbage. Context c2 should have no roots to it, and it's
4472 // entry in the optimized code map should be free for a new context.
4473 for (int i = 0; i < 4; i++) {
4474 heap->CollectAllGarbage();
4475 }
4476
4477 Handle<FixedArray> optimized_code_map =
4478 handle(shared->optimized_code_map());
4479 // There should be 3 entries in the map.
4480 CHECK_EQ(
4481 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
4482 SharedFunctionInfo::kEntryLength));
4483 // But one of them (formerly for c2) should be cleared.
4484 int cleared_count = 0;
4485 for (int i = SharedFunctionInfo::kEntriesStart;
4486 i < optimized_code_map->length();
4487 i += SharedFunctionInfo::kEntryLength) {
4488 cleared_count +=
4489 WeakCell::cast(
4490 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4491 ->cleared()
4492 ? 1
4493 : 0;
4494 }
4495 CHECK_EQ(1, cleared_count);
4496
4497 // Verify that a new context uses the cleared entry rather than creating a
4498 // new
4499 // optimized code map array.
4500 v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
4501 c4->Enter();
4502 indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
4503 CompileRun(toplevel);
4504 c4->Exit();
4505 CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
4506
4507 // Now each entry is in use.
4508 cleared_count = 0;
4509 for (int i = SharedFunctionInfo::kEntriesStart;
4510 i < optimized_code_map->length();
4511 i += SharedFunctionInfo::kEntryLength) {
4512 cleared_count +=
4513 WeakCell::cast(
4514 optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
4515 ->cleared()
4516 ? 1
4517 : 0;
4518 }
4519 CHECK_EQ(0, cleared_count);
4520 }
4521}
4522
4523
4524TEST(Regress513496) {
4525 i::FLAG_flush_optimized_code_cache = false;
4526 i::FLAG_allow_natives_syntax = true;
4527 CcTest::InitializeVM();
4528 Isolate* isolate = CcTest::i_isolate();
4529 Heap* heap = isolate->heap();
4530 HandleScope scope(isolate);
4531
4532 // Perfrom one initial GC to enable code flushing.
4533 CcTest::heap()->CollectAllGarbage();
4534
4535 // Prepare an optimized closure with containing an inlined function. Then age
4536 // the inlined unoptimized code to trigger code flushing but make sure the
4537 // outer optimized code is kept in the optimized code map.
4538 Handle<SharedFunctionInfo> shared;
4539 {
4540 LocalContext context;
4541 HandleScope inner_scope(isolate);
4542 CompileRun(
4543 "function g(x) { return x + 1 }"
4544 "function mkClosure() {"
4545 " return function(x) { return g(x); };"
4546 "}"
4547 "var f = mkClosure();"
4548 "f(1); f(2);"
4549 "%OptimizeFunctionOnNextCall(f); f(3);");
4550
4551 Handle<JSFunction> g = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4552 *v8::Local<v8::Function>::Cast(CcTest::global()
4553 ->Get(context.local(), v8_str("g"))
4554 .ToLocalChecked())));
4555 CHECK(g->shared()->is_compiled());
4556 const int kAgingThreshold = 6;
4557 for (int i = 0; i < kAgingThreshold; i++) {
4558 g->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4559 }
4560
4561 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4562 *v8::Local<v8::Function>::Cast(CcTest::global()
4563 ->Get(context.local(), v8_str("f"))
4564 .ToLocalChecked())));
4565 CHECK(f->is_compiled());
4566 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4567 CompileRun("f = null");
4568 }
4569
4570 // Lookup the optimized code and keep it alive.
4571 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4572 isolate->context()->native_context(), BailoutId::None());
4573 Handle<Code> optimized_code(result.code, isolate);
4574
4575 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4576 // though the optimized code for 'f' is reachable via the optimized code map.
4577 heap->CollectAllGarbage();
4578
4579 // Make a new closure that will get code installed from the code map.
4580 // Unoptimized code is missing and the deoptimizer will go ballistic.
4581 CompileRun("var h = mkClosure(); h('bozo');");
4582}
4583
4584
4585TEST(LargeObjectSlotRecording) {
4586 FLAG_manual_evacuation_candidates_selection = true;
4587 CcTest::InitializeVM();
4588 Isolate* isolate = CcTest::i_isolate();
4589 Heap* heap = isolate->heap();
4590 HandleScope scope(isolate);
4591
4592 // Create an object on an evacuation candidate.
4593 SimulateFullSpace(heap->old_space());
4594 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4595 Page* evac_page = Page::FromAddress(lit->address());
4596 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4597 FixedArray* old_location = *lit;
4598
4599 // Allocate a large object.
4600 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB);
4601 CHECK(size > Page::kMaxRegularHeapObjectSize);
4602 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
4603 CHECK(heap->lo_space()->Contains(*lo));
4604
4605 // Start incremental marking to active write barrier.
4606 SimulateIncrementalMarking(heap, false);
4607 heap->incremental_marking()->AdvanceIncrementalMarking(
Ben Murdochda12d292016-06-02 14:46:10 +01004608 10000000, IncrementalMarking::IdleStepActions());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004609
4610 // Create references from the large object to the object on the evacuation
4611 // candidate.
4612 const int kStep = size / 10;
4613 for (int i = 0; i < size; i += kStep) {
4614 lo->set(i, *lit);
4615 CHECK(lo->get(i) == old_location);
4616 }
4617
4618 // Move the evaucation candidate object.
4619 CcTest::heap()->CollectAllGarbage();
4620
4621 // Verify that the pointers in the large object got updated.
4622 for (int i = 0; i < size; i += kStep) {
4623 CHECK_EQ(lo->get(i), *lit);
4624 CHECK(lo->get(i) != old_location);
4625 }
4626}
4627
4628
4629class DummyVisitor : public ObjectVisitor {
4630 public:
4631 void VisitPointers(Object** start, Object** end) override {}
4632};
4633
4634
4635TEST(DeferredHandles) {
4636 CcTest::InitializeVM();
4637 Isolate* isolate = CcTest::i_isolate();
4638 Heap* heap = isolate->heap();
4639 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4640 HandleScopeData* data = isolate->handle_scope_data();
4641 Handle<Object> init(heap->empty_string(), isolate);
4642 while (data->next < data->limit) {
4643 Handle<Object> obj(heap->empty_string(), isolate);
4644 }
4645 // An entire block of handles has been filled.
4646 // Next handle would require a new block.
4647 CHECK(data->next == data->limit);
4648
4649 DeferredHandleScope deferred(isolate);
4650 DummyVisitor visitor;
4651 isolate->handle_scope_implementer()->Iterate(&visitor);
4652 delete deferred.Detach();
4653}
4654
4655
4656TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4657 CcTest::InitializeVM();
4658 v8::HandleScope scope(CcTest::isolate());
4659 CompileRun("function f(n) {"
4660 " var a = new Array(n);"
4661 " for (var i = 0; i < n; i += 100) a[i] = i;"
4662 "};"
4663 "f(10 * 1024 * 1024);");
4664 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4665 if (marking->IsStopped()) {
4666 CcTest::heap()->StartIncrementalMarking();
4667 }
4668 // This big step should be sufficient to mark the whole array.
4669 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4670 CHECK(marking->IsComplete() ||
4671 marking->IsReadyToOverApproximateWeakClosure());
4672}
4673
4674
4675TEST(DisableInlineAllocation) {
4676 i::FLAG_allow_natives_syntax = true;
4677 CcTest::InitializeVM();
4678 v8::HandleScope scope(CcTest::isolate());
4679 CompileRun("function test() {"
4680 " var x = [];"
4681 " for (var i = 0; i < 10; i++) {"
4682 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4683 " }"
4684 "}"
4685 "function run() {"
4686 " %OptimizeFunctionOnNextCall(test);"
4687 " test();"
4688 " %DeoptimizeFunction(test);"
4689 "}");
4690
4691 // Warm-up with inline allocation enabled.
4692 CompileRun("test(); test(); run();");
4693
4694 // Run test with inline allocation disabled.
4695 CcTest::heap()->DisableInlineAllocation();
4696 CompileRun("run()");
4697
4698 // Run test with inline allocation re-enabled.
4699 CcTest::heap()->EnableInlineAllocation();
4700 CompileRun("run()");
4701}
4702
4703
4704static int AllocationSitesCount(Heap* heap) {
4705 int count = 0;
4706 for (Object* site = heap->allocation_sites_list();
4707 !(site->IsUndefined());
4708 site = AllocationSite::cast(site)->weak_next()) {
4709 count++;
4710 }
4711 return count;
4712}
4713
4714
4715TEST(EnsureAllocationSiteDependentCodesProcessed) {
4716 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4717 i::FLAG_allow_natives_syntax = true;
4718 CcTest::InitializeVM();
4719 Isolate* isolate = CcTest::i_isolate();
4720 v8::internal::Heap* heap = CcTest::heap();
4721 GlobalHandles* global_handles = isolate->global_handles();
4722
4723 if (!isolate->use_crankshaft()) return;
4724
4725 // The allocation site at the head of the list is ours.
4726 Handle<AllocationSite> site;
4727 {
4728 LocalContext context;
4729 v8::HandleScope scope(context->GetIsolate());
4730
4731 int count = AllocationSitesCount(heap);
4732 CompileRun("var bar = function() { return (new Array()); };"
4733 "var a = bar();"
4734 "bar();"
4735 "bar();");
4736
4737 // One allocation site should have been created.
4738 int new_count = AllocationSitesCount(heap);
4739 CHECK_EQ(new_count, (count + 1));
4740 site = Handle<AllocationSite>::cast(
4741 global_handles->Create(
4742 AllocationSite::cast(heap->allocation_sites_list())));
4743
4744 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4745
4746 CHECK_EQ(DependentCode::kAllocationSiteTransitionChangedGroup,
4747 site->dependent_code()->group());
4748 CHECK_EQ(1, site->dependent_code()->count());
4749 CHECK(site->dependent_code()->object_at(0)->IsWeakCell());
4750 Code* function_bar = Code::cast(
4751 WeakCell::cast(site->dependent_code()->object_at(0))->value());
4752 Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
4753 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4754 CcTest::global()
4755 ->Get(context.local(), v8_str("bar"))
4756 .ToLocalChecked())));
4757 CHECK_EQ(bar_handle->code(), function_bar);
4758 }
4759
4760 // Now make sure that a gc should get rid of the function, even though we
4761 // still have the allocation site alive.
4762 for (int i = 0; i < 4; i++) {
4763 heap->CollectAllGarbage();
4764 }
4765
4766 // The site still exists because of our global handle, but the code is no
4767 // longer referred to by dependent_code().
4768 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4769 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4770}
4771
4772
4773TEST(CellsInOptimizedCodeAreWeak) {
4774 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4775 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4776 i::FLAG_allow_natives_syntax = true;
4777 CcTest::InitializeVM();
4778 Isolate* isolate = CcTest::i_isolate();
4779 v8::internal::Heap* heap = CcTest::heap();
4780
4781 if (!isolate->use_crankshaft()) return;
4782 HandleScope outer_scope(heap->isolate());
4783 Handle<Code> code;
4784 {
4785 LocalContext context;
4786 HandleScope scope(heap->isolate());
4787
4788 CompileRun(
4789 "bar = (function() {"
4790 " function bar() {"
4791 " return foo(1);"
4792 " };"
4793 " var foo = function(x) { with (x) { return 1 + x; } };"
4794 " %NeverOptimizeFunction(foo);"
4795 " bar(foo);"
4796 " bar(foo);"
4797 " bar(foo);"
4798 " %OptimizeFunctionOnNextCall(bar);"
4799 " bar(foo);"
4800 " return bar;})();");
4801
4802 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4803 *v8::Local<v8::Function>::Cast(CcTest::global()
4804 ->Get(context.local(), v8_str("bar"))
4805 .ToLocalChecked())));
4806 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4807 }
4808
4809 // Now make sure that a gc should get rid of the function
4810 for (int i = 0; i < 4; i++) {
4811 heap->CollectAllGarbage();
4812 }
4813
4814 CHECK(code->marked_for_deoptimization());
4815}
4816
4817
4818TEST(ObjectsInOptimizedCodeAreWeak) {
4819 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4820 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4821 i::FLAG_allow_natives_syntax = true;
4822 CcTest::InitializeVM();
4823 Isolate* isolate = CcTest::i_isolate();
4824 v8::internal::Heap* heap = CcTest::heap();
4825
4826 if (!isolate->use_crankshaft()) return;
4827 HandleScope outer_scope(heap->isolate());
4828 Handle<Code> code;
4829 {
4830 LocalContext context;
4831 HandleScope scope(heap->isolate());
4832
4833 CompileRun(
4834 "function bar() {"
4835 " return foo(1);"
4836 "};"
4837 "function foo(x) { with (x) { return 1 + x; } };"
4838 "%NeverOptimizeFunction(foo);"
4839 "bar();"
4840 "bar();"
4841 "bar();"
4842 "%OptimizeFunctionOnNextCall(bar);"
4843 "bar();");
4844
4845 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4846 *v8::Local<v8::Function>::Cast(CcTest::global()
4847 ->Get(context.local(), v8_str("bar"))
4848 .ToLocalChecked())));
4849 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4850 }
4851
4852 // Now make sure that a gc should get rid of the function
4853 for (int i = 0; i < 4; i++) {
4854 heap->CollectAllGarbage();
4855 }
4856
4857 CHECK(code->marked_for_deoptimization());
4858}
4859
4860
4861TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4862 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4863 if (!i::FLAG_incremental_marking) return;
4864 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4865 i::FLAG_allow_natives_syntax = true;
4866 i::FLAG_compilation_cache = false;
4867 i::FLAG_retain_maps_for_n_gc = 0;
4868 CcTest::InitializeVM();
4869 Isolate* isolate = CcTest::i_isolate();
4870
4871 // Do not run for no-snap builds.
4872 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4873
4874 v8::internal::Heap* heap = CcTest::heap();
4875
4876 // Get a clean slate regarding optimized functions on the heap.
4877 i::Deoptimizer::DeoptimizeAll(isolate);
4878 heap->CollectAllGarbage();
4879
4880 if (!isolate->use_crankshaft()) return;
4881 HandleScope outer_scope(heap->isolate());
4882 for (int i = 0; i < 3; i++) {
4883 SimulateIncrementalMarking(heap);
4884 {
4885 LocalContext context;
4886 HandleScope scope(heap->isolate());
4887 EmbeddedVector<char, 256> source;
4888 SNPrintF(source,
4889 "function bar%d() {"
4890 " return foo%d(1);"
4891 "};"
4892 "function foo%d(x) { with (x) { return 1 + x; } };"
4893 "bar%d();"
4894 "bar%d();"
4895 "bar%d();"
4896 "%%OptimizeFunctionOnNextCall(bar%d);"
4897 "bar%d();",
4898 i, i, i, i, i, i, i, i);
4899 CompileRun(source.start());
4900 }
Ben Murdochda12d292016-06-02 14:46:10 +01004901 // We have to abort incremental marking here to abandon black pages.
4902 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004903 }
4904 int elements = 0;
4905 if (heap->weak_object_to_code_table()->IsHashTable()) {
4906 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4907 elements = t->NumberOfElements();
4908 }
4909 CHECK_EQ(0, elements);
4910}
4911
4912
4913static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4914 const char* name) {
4915 EmbeddedVector<char, 256> source;
4916 SNPrintF(source,
4917 "function %s() { return 0; }"
4918 "%s(); %s();"
4919 "%%OptimizeFunctionOnNextCall(%s);"
4920 "%s();", name, name, name, name, name);
4921 CompileRun(source.start());
4922 i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4923 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4924 CcTest::global()
4925 ->Get(isolate->GetCurrentContext(), v8_str(name))
4926 .ToLocalChecked())));
4927 return fun;
4928}
4929
4930
4931static int GetCodeChainLength(Code* code) {
4932 int result = 0;
4933 while (code->next_code_link()->IsCode()) {
4934 result++;
4935 code = Code::cast(code->next_code_link());
4936 }
4937 return result;
4938}
4939
4940
4941TEST(NextCodeLinkIsWeak) {
4942 i::FLAG_always_opt = false;
4943 i::FLAG_allow_natives_syntax = true;
4944 CcTest::InitializeVM();
4945 Isolate* isolate = CcTest::i_isolate();
4946 v8::internal::Heap* heap = CcTest::heap();
4947
4948 if (!isolate->use_crankshaft()) return;
4949 HandleScope outer_scope(heap->isolate());
4950 Handle<Code> code;
4951 heap->CollectAllAvailableGarbage();
4952 int code_chain_length_before, code_chain_length_after;
4953 {
4954 HandleScope scope(heap->isolate());
4955 Handle<JSFunction> mortal =
4956 OptimizeDummyFunction(CcTest::isolate(), "mortal");
4957 Handle<JSFunction> immortal =
4958 OptimizeDummyFunction(CcTest::isolate(), "immortal");
4959 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4960 code_chain_length_before = GetCodeChainLength(immortal->code());
4961 // Keep the immortal code and let the mortal code die.
4962 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4963 CompileRun("mortal = null; immortal = null;");
4964 }
4965 heap->CollectAllAvailableGarbage();
4966 // Now mortal code should be dead.
4967 code_chain_length_after = GetCodeChainLength(*code);
4968 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4969}
4970
4971
4972static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4973 i::byte buffer[i::Assembler::kMinimalBufferSize];
4974 MacroAssembler masm(isolate, buffer, sizeof(buffer),
4975 v8::internal::CodeObjectRequired::kYes);
4976 CodeDesc desc;
4977 masm.Push(isolate->factory()->undefined_value());
4978 masm.Drop(1);
4979 masm.GetCode(&desc);
4980 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4981 Handle<Code> code = isolate->factory()->NewCode(
4982 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4983 CHECK(code->IsCode());
4984 return code;
4985}
4986
4987
4988TEST(NextCodeLinkIsWeak2) {
4989 i::FLAG_allow_natives_syntax = true;
4990 CcTest::InitializeVM();
4991 Isolate* isolate = CcTest::i_isolate();
4992 v8::internal::Heap* heap = CcTest::heap();
4993
4994 if (!isolate->use_crankshaft()) return;
4995 HandleScope outer_scope(heap->isolate());
4996 heap->CollectAllAvailableGarbage();
4997 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4998 Handle<Code> new_head;
4999 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5000 {
5001 HandleScope scope(heap->isolate());
5002 Handle<Code> immortal = DummyOptimizedCode(isolate);
5003 Handle<Code> mortal = DummyOptimizedCode(isolate);
5004 mortal->set_next_code_link(*old_head);
5005 immortal->set_next_code_link(*mortal);
5006 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5007 new_head = scope.CloseAndEscape(immortal);
5008 }
5009 heap->CollectAllAvailableGarbage();
5010 // Now mortal code should be dead.
5011 CHECK_EQ(*old_head, new_head->next_code_link());
5012}
5013
5014
5015static bool weak_ic_cleared = false;
5016
5017static void ClearWeakIC(
5018 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5019 printf("clear weak is called\n");
5020 weak_ic_cleared = true;
5021 data.GetParameter()->Reset();
5022}
5023
5024
5025TEST(WeakFunctionInConstructor) {
5026 if (i::FLAG_always_opt) return;
5027 i::FLAG_stress_compaction = false;
5028 CcTest::InitializeVM();
5029 v8::Isolate* isolate = CcTest::isolate();
5030 LocalContext env;
5031 v8::HandleScope scope(isolate);
5032 CompileRun(
5033 "function createObj(obj) {"
5034 " return new obj();"
5035 "}");
5036 i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
5037 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
5038 CcTest::global()
5039 ->Get(env.local(), v8_str("createObj"))
5040 .ToLocalChecked())));
5041
5042 v8::Persistent<v8::Object> garbage;
5043 {
5044 v8::HandleScope scope(isolate);
5045 const char* source =
5046 " (function() {"
5047 " function hat() { this.x = 5; }"
5048 " createObj(hat);"
5049 " createObj(hat);"
5050 " return hat;"
5051 " })();";
5052 garbage.Reset(isolate, CompileRun(env.local(), source)
5053 .ToLocalChecked()
5054 ->ToObject(env.local())
5055 .ToLocalChecked());
5056 }
5057 weak_ic_cleared = false;
5058 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5059 Heap* heap = CcTest::i_isolate()->heap();
5060 heap->CollectAllGarbage();
5061 CHECK(weak_ic_cleared);
5062
5063 // We've determined the constructor in createObj has had it's weak cell
5064 // cleared. Now, verify that one additional call with a new function
5065 // allows monomorphicity.
5066 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5067 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5068 for (int i = 0; i < 20; i++) {
5069 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5070 CHECK(slot_value->IsWeakCell());
5071 if (WeakCell::cast(slot_value)->cleared()) break;
5072 heap->CollectAllGarbage();
5073 }
5074
5075 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5076 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5077 CompileRun(
5078 "function coat() { this.x = 6; }"
5079 "createObj(coat);");
5080 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5081 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5082}
5083
5084
5085// Checks that the value returned by execution of the source is weak.
5086void CheckWeakness(const char* source) {
5087 i::FLAG_stress_compaction = false;
5088 CcTest::InitializeVM();
5089 v8::Isolate* isolate = CcTest::isolate();
5090 LocalContext env;
5091 v8::HandleScope scope(isolate);
5092 v8::Persistent<v8::Object> garbage;
5093 {
5094 v8::HandleScope scope(isolate);
5095 garbage.Reset(isolate, CompileRun(env.local(), source)
5096 .ToLocalChecked()
5097 ->ToObject(env.local())
5098 .ToLocalChecked());
5099 }
5100 weak_ic_cleared = false;
5101 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5102 Heap* heap = CcTest::i_isolate()->heap();
5103 heap->CollectAllGarbage();
5104 CHECK(weak_ic_cleared);
5105}
5106
5107
5108// Each of the following "weak IC" tests creates an IC that embeds a map with
5109// the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5110TEST(WeakMapInMonomorphicLoadIC) {
5111 CheckWeakness("function loadIC(obj) {"
5112 " return obj.name;"
5113 "}"
5114 " (function() {"
5115 " var proto = {'name' : 'weak'};"
5116 " var obj = Object.create(proto);"
5117 " loadIC(obj);"
5118 " loadIC(obj);"
5119 " loadIC(obj);"
5120 " return proto;"
5121 " })();");
5122}
5123
5124
5125TEST(WeakMapInPolymorphicLoadIC) {
5126 CheckWeakness(
5127 "function loadIC(obj) {"
5128 " return obj.name;"
5129 "}"
5130 " (function() {"
5131 " var proto = {'name' : 'weak'};"
5132 " var obj = Object.create(proto);"
5133 " loadIC(obj);"
5134 " loadIC(obj);"
5135 " loadIC(obj);"
5136 " var poly = Object.create(proto);"
5137 " poly.x = true;"
5138 " loadIC(poly);"
5139 " return proto;"
5140 " })();");
5141}
5142
5143
5144TEST(WeakMapInMonomorphicKeyedLoadIC) {
5145 CheckWeakness("function keyedLoadIC(obj, field) {"
5146 " return obj[field];"
5147 "}"
5148 " (function() {"
5149 " var proto = {'name' : 'weak'};"
5150 " var obj = Object.create(proto);"
5151 " keyedLoadIC(obj, 'name');"
5152 " keyedLoadIC(obj, 'name');"
5153 " keyedLoadIC(obj, 'name');"
5154 " return proto;"
5155 " })();");
5156}
5157
5158
5159TEST(WeakMapInPolymorphicKeyedLoadIC) {
5160 CheckWeakness(
5161 "function keyedLoadIC(obj, field) {"
5162 " return obj[field];"
5163 "}"
5164 " (function() {"
5165 " var proto = {'name' : 'weak'};"
5166 " var obj = Object.create(proto);"
5167 " keyedLoadIC(obj, 'name');"
5168 " keyedLoadIC(obj, 'name');"
5169 " keyedLoadIC(obj, 'name');"
5170 " var poly = Object.create(proto);"
5171 " poly.x = true;"
5172 " keyedLoadIC(poly, 'name');"
5173 " return proto;"
5174 " })();");
5175}
5176
5177
5178TEST(WeakMapInMonomorphicStoreIC) {
5179 CheckWeakness("function storeIC(obj, value) {"
5180 " obj.name = value;"
5181 "}"
5182 " (function() {"
5183 " var proto = {'name' : 'weak'};"
5184 " var obj = Object.create(proto);"
5185 " storeIC(obj, 'x');"
5186 " storeIC(obj, 'x');"
5187 " storeIC(obj, 'x');"
5188 " return proto;"
5189 " })();");
5190}
5191
5192
5193TEST(WeakMapInPolymorphicStoreIC) {
5194 CheckWeakness(
5195 "function storeIC(obj, value) {"
5196 " obj.name = value;"
5197 "}"
5198 " (function() {"
5199 " var proto = {'name' : 'weak'};"
5200 " var obj = Object.create(proto);"
5201 " storeIC(obj, 'x');"
5202 " storeIC(obj, 'x');"
5203 " storeIC(obj, 'x');"
5204 " var poly = Object.create(proto);"
5205 " poly.x = true;"
5206 " storeIC(poly, 'x');"
5207 " return proto;"
5208 " })();");
5209}
5210
5211
5212TEST(WeakMapInMonomorphicKeyedStoreIC) {
5213 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5214 " obj[field] = value;"
5215 "}"
5216 " (function() {"
5217 " var proto = {'name' : 'weak'};"
5218 " var obj = Object.create(proto);"
5219 " keyedStoreIC(obj, 'x');"
5220 " keyedStoreIC(obj, 'x');"
5221 " keyedStoreIC(obj, 'x');"
5222 " return proto;"
5223 " })();");
5224}
5225
5226
5227TEST(WeakMapInPolymorphicKeyedStoreIC) {
5228 CheckWeakness(
5229 "function keyedStoreIC(obj, field, value) {"
5230 " obj[field] = value;"
5231 "}"
5232 " (function() {"
5233 " var proto = {'name' : 'weak'};"
5234 " var obj = Object.create(proto);"
5235 " keyedStoreIC(obj, 'x');"
5236 " keyedStoreIC(obj, 'x');"
5237 " keyedStoreIC(obj, 'x');"
5238 " var poly = Object.create(proto);"
5239 " poly.x = true;"
5240 " keyedStoreIC(poly, 'x');"
5241 " return proto;"
5242 " })();");
5243}
5244
5245
5246TEST(WeakMapInMonomorphicCompareNilIC) {
5247 CheckWeakness("function compareNilIC(obj) {"
5248 " return obj == null;"
5249 "}"
5250 " (function() {"
5251 " var proto = {'name' : 'weak'};"
5252 " var obj = Object.create(proto);"
5253 " compareNilIC(obj);"
5254 " compareNilIC(obj);"
5255 " compareNilIC(obj);"
5256 " return proto;"
5257 " })();");
5258}
5259
5260
5261Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5262 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5263 Handle<Object> obj =
5264 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5265 return Handle<JSFunction>::cast(obj);
5266}
5267
5268
5269void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5270 int slot_index, InlineCacheState state) {
5271 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5272 kind == Code::CALL_IC) {
5273 TypeFeedbackVector* vector = shared->feedback_vector();
5274 FeedbackVectorSlot slot(slot_index);
5275 if (kind == Code::LOAD_IC) {
5276 LoadICNexus nexus(vector, slot);
5277 CHECK_EQ(nexus.StateFromFeedback(), state);
5278 } else if (kind == Code::KEYED_LOAD_IC) {
5279 KeyedLoadICNexus nexus(vector, slot);
5280 CHECK_EQ(nexus.StateFromFeedback(), state);
5281 } else if (kind == Code::CALL_IC) {
5282 CallICNexus nexus(vector, slot);
5283 CHECK_EQ(nexus.StateFromFeedback(), state);
5284 }
5285 } else {
5286 Code* ic = FindFirstIC(code, kind);
5287 CHECK(ic->is_inline_cache_stub());
5288 CHECK(ic->ic_state() == state);
5289 }
5290}
5291
5292
5293TEST(MonomorphicStaysMonomorphicAfterGC) {
5294 if (FLAG_always_opt) return;
5295 CcTest::InitializeVM();
5296 Isolate* isolate = CcTest::i_isolate();
5297 Heap* heap = isolate->heap();
5298 v8::HandleScope scope(CcTest::isolate());
5299 CompileRun(
5300 "function loadIC(obj) {"
5301 " return obj.name;"
5302 "}"
5303 "function testIC() {"
5304 " var proto = {'name' : 'weak'};"
5305 " var obj = Object.create(proto);"
5306 " loadIC(obj);"
5307 " loadIC(obj);"
5308 " loadIC(obj);"
5309 " return proto;"
5310 "};");
5311 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5312 {
5313 v8::HandleScope scope(CcTest::isolate());
5314 CompileRun("(testIC())");
5315 }
5316 heap->CollectAllGarbage();
5317 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5318 {
5319 v8::HandleScope scope(CcTest::isolate());
5320 CompileRun("(testIC())");
5321 }
5322 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5323}
5324
5325
5326TEST(PolymorphicStaysPolymorphicAfterGC) {
5327 if (FLAG_always_opt) return;
5328 CcTest::InitializeVM();
5329 Isolate* isolate = CcTest::i_isolate();
5330 Heap* heap = isolate->heap();
5331 v8::HandleScope scope(CcTest::isolate());
5332 CompileRun(
5333 "function loadIC(obj) {"
5334 " return obj.name;"
5335 "}"
5336 "function testIC() {"
5337 " var proto = {'name' : 'weak'};"
5338 " var obj = Object.create(proto);"
5339 " loadIC(obj);"
5340 " loadIC(obj);"
5341 " loadIC(obj);"
5342 " var poly = Object.create(proto);"
5343 " poly.x = true;"
5344 " loadIC(poly);"
5345 " return proto;"
5346 "};");
5347 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5348 {
5349 v8::HandleScope scope(CcTest::isolate());
5350 CompileRun("(testIC())");
5351 }
5352 heap->CollectAllGarbage();
5353 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5354 {
5355 v8::HandleScope scope(CcTest::isolate());
5356 CompileRun("(testIC())");
5357 }
5358 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5359}
5360
5361
5362TEST(WeakCell) {
5363 CcTest::InitializeVM();
5364 Isolate* isolate = CcTest::i_isolate();
5365 v8::internal::Heap* heap = CcTest::heap();
5366 v8::internal::Factory* factory = isolate->factory();
5367
5368 HandleScope outer_scope(isolate);
5369 Handle<WeakCell> weak_cell1;
5370 {
5371 HandleScope inner_scope(isolate);
5372 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5373 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5374 }
5375
5376 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5377 Handle<WeakCell> weak_cell2;
5378 {
5379 HandleScope inner_scope(isolate);
5380 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5381 }
5382 CHECK(weak_cell1->value()->IsFixedArray());
5383 CHECK_EQ(*survivor, weak_cell2->value());
5384 heap->CollectGarbage(NEW_SPACE);
5385 CHECK(weak_cell1->value()->IsFixedArray());
5386 CHECK_EQ(*survivor, weak_cell2->value());
5387 heap->CollectGarbage(NEW_SPACE);
5388 CHECK(weak_cell1->value()->IsFixedArray());
5389 CHECK_EQ(*survivor, weak_cell2->value());
5390 heap->CollectAllAvailableGarbage();
5391 CHECK(weak_cell1->cleared());
5392 CHECK_EQ(*survivor, weak_cell2->value());
5393}
5394
5395
5396TEST(WeakCellsWithIncrementalMarking) {
5397 CcTest::InitializeVM();
5398 Isolate* isolate = CcTest::i_isolate();
5399 v8::internal::Heap* heap = CcTest::heap();
5400 v8::internal::Factory* factory = isolate->factory();
5401
5402 const int N = 16;
5403 HandleScope outer_scope(isolate);
5404 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5405 Handle<WeakCell> weak_cells[N];
5406
5407 for (int i = 0; i < N; i++) {
5408 HandleScope inner_scope(isolate);
5409 Handle<HeapObject> value =
5410 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5411 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5412 CHECK(weak_cell->value()->IsFixedArray());
5413 IncrementalMarking* marking = heap->incremental_marking();
5414 if (marking->IsStopped()) {
5415 heap->StartIncrementalMarking();
5416 }
5417 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5418 heap->CollectGarbage(NEW_SPACE);
5419 CHECK(weak_cell->value()->IsFixedArray());
5420 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5421 }
Ben Murdochda12d292016-06-02 14:46:10 +01005422 // Call collect all twice to make sure that we also cleared
5423 // weak cells that were allocated on black pages.
5424 heap->CollectAllGarbage();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005425 heap->CollectAllGarbage();
5426 CHECK_EQ(*survivor, weak_cells[0]->value());
5427 for (int i = 1; i < N; i++) {
5428 CHECK(weak_cells[i]->cleared());
5429 }
5430}
5431
5432
5433#ifdef DEBUG
5434TEST(AddInstructionChangesNewSpacePromotion) {
5435 i::FLAG_allow_natives_syntax = true;
5436 i::FLAG_expose_gc = true;
5437 i::FLAG_stress_compaction = true;
5438 i::FLAG_gc_interval = 1000;
5439 CcTest::InitializeVM();
5440 if (!i::FLAG_allocation_site_pretenuring) return;
5441 v8::HandleScope scope(CcTest::isolate());
5442 Isolate* isolate = CcTest::i_isolate();
5443 Heap* heap = isolate->heap();
5444 LocalContext env;
5445 CompileRun(
5446 "function add(a, b) {"
5447 " return a + b;"
5448 "}"
5449 "add(1, 2);"
5450 "add(\"a\", \"b\");"
5451 "var oldSpaceObject;"
5452 "gc();"
5453 "function crash(x) {"
5454 " var object = {a: null, b: null};"
5455 " var result = add(1.5, x | 0);"
5456 " object.a = result;"
5457 " oldSpaceObject = object;"
5458 " return object;"
5459 "}"
5460 "crash(1);"
5461 "crash(1);"
5462 "%OptimizeFunctionOnNextCall(crash);"
5463 "crash(1);");
5464
5465 v8::Local<v8::Object> global = CcTest::global();
5466 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5467 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5468 v8::Local<v8::Value> args1[] = {v8_num(1)};
5469 heap->DisableInlineAllocation();
5470 heap->set_allocation_timeout(1);
5471 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5472 heap->CollectAllGarbage();
5473}
5474
5475
5476void OnFatalErrorExpectOOM(const char* location, const char* message) {
5477 // Exit with 0 if the location matches our expectation.
5478 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5479}
5480
5481
5482TEST(CEntryStubOOM) {
5483 i::FLAG_allow_natives_syntax = true;
5484 CcTest::InitializeVM();
5485 v8::HandleScope scope(CcTest::isolate());
5486 CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
5487
5488 v8::Local<v8::Value> result = CompileRun(
5489 "%SetFlags('--gc-interval=1');"
5490 "var a = [];"
5491 "a.__proto__ = [];"
5492 "a.unshift(1)");
5493
5494 CHECK(result->IsNumber());
5495}
5496
5497#endif // DEBUG
5498
5499
5500static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5501
5502
5503static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5504 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5505}
5506
5507
5508UNINITIALIZED_TEST(Regress538257) {
5509 i::FLAG_manual_evacuation_candidates_selection = true;
5510 v8::Isolate::CreateParams create_params;
5511 // Set heap limits.
5512 create_params.constraints.set_max_semi_space_size(1 * Page::kPageSize / MB);
5513 create_params.constraints.set_max_old_space_size(6 * Page::kPageSize / MB);
5514 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5515 v8::Isolate* isolate = v8::Isolate::New(create_params);
5516 isolate->Enter();
5517 {
5518 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
Ben Murdochc5610432016-08-08 18:44:38 +01005519 Heap* heap = i_isolate->heap();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005520 HandleScope handle_scope(i_isolate);
Ben Murdochc5610432016-08-08 18:44:38 +01005521 PagedSpace* old_space = heap->old_space();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005522 const int kMaxObjects = 10000;
5523 const int kFixedArrayLen = 512;
5524 Handle<FixedArray> objects[kMaxObjects];
Ben Murdochc5610432016-08-08 18:44:38 +01005525 for (int i = 0; (i < kMaxObjects) &&
5526 heap->CanExpandOldGeneration(old_space->AreaSize());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005527 i++) {
5528 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5529 Page::FromAddress(objects[i]->address())
5530 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5531 }
5532 SimulateFullSpace(old_space);
Ben Murdochc5610432016-08-08 18:44:38 +01005533 heap->CollectGarbage(OLD_SPACE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005534 // If we get this far, we've successfully aborted compaction. Any further
5535 // allocations might trigger OOM.
5536 }
5537 isolate->Exit();
5538 isolate->Dispose();
5539}
5540
5541
5542TEST(Regress357137) {
5543 CcTest::InitializeVM();
5544 v8::Isolate* isolate = CcTest::isolate();
5545 v8::HandleScope hscope(isolate);
5546 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5547 global->Set(
5548 v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
5549 .ToLocalChecked(),
5550 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5551 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5552 CHECK(!context.IsEmpty());
5553 v8::Context::Scope cscope(context);
5554
5555 v8::Local<v8::Value> result = CompileRun(
5556 "var locals = '';"
5557 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5558 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5559 "interrupt();" // This triggers a fake stack overflow in f.
5560 "f()()");
5561 CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
5562}
5563
5564
5565TEST(Regress507979) {
5566 const int kFixedArrayLen = 10;
5567 CcTest::InitializeVM();
5568 Isolate* isolate = CcTest::i_isolate();
5569 Heap* heap = isolate->heap();
5570 HandleScope handle_scope(isolate);
5571
5572 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5573 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005574 CHECK(heap->InNewSpace(*o1));
5575 CHECK(heap->InNewSpace(*o2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005576
5577 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5578
5579 // Replace parts of an object placed before a live object with a filler. This
5580 // way the filler object shares the mark bits with the following live object.
5581 o1->Shrink(kFixedArrayLen - 1);
5582
5583 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5584 // Let's not optimize the loop away.
5585 CHECK(obj->address() != nullptr);
5586 }
5587}
5588
5589
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005590UNINITIALIZED_TEST(PromotionQueue) {
5591 i::FLAG_expose_gc = true;
5592 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5593 i::FLAG_min_semi_space_size = i::FLAG_max_semi_space_size;
5594 v8::Isolate::CreateParams create_params;
5595 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5596 v8::Isolate* isolate = v8::Isolate::New(create_params);
5597 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5598 {
5599 v8::Isolate::Scope isolate_scope(isolate);
5600 v8::HandleScope handle_scope(isolate);
5601 v8::Context::New(isolate)->Enter();
5602 Heap* heap = i_isolate->heap();
5603 NewSpace* new_space = heap->new_space();
5604
5605 // In this test we will try to overwrite the promotion queue which is at the
5606 // end of to-space. To actually make that possible, we need at least two
5607 // semi-space pages and take advantage of fragmentation.
5608 // (1) Use a semi-space consisting of two pages.
5609 // (2) Create a few small long living objects and call the scavenger to
5610 // move them to the other semi-space.
5611 // (3) Create a huge object, i.e., remainder of first semi-space page and
5612 // create another huge object which should be of maximum allocatable memory
5613 // size of the second semi-space page.
5614 // (4) Call the scavenger again.
5615 // What will happen is: the scavenger will promote the objects created in
5616 // (2) and will create promotion queue entries at the end of the second
5617 // semi-space page during the next scavenge when it promotes the objects to
5618 // the old generation. The first allocation of (3) will fill up the first
5619 // semi-space page. The second allocation in (3) will not fit into the
5620 // first semi-space page, but it will overwrite the promotion queue which
5621 // are in the second semi-space page. If the right guards are in place, the
5622 // promotion queue will be evacuated in that case.
5623
5624
5625 CHECK(new_space->IsAtMaximumCapacity());
5626 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5627
5628 // Call the scavenger two times to get an empty new space
5629 heap->CollectGarbage(NEW_SPACE);
5630 heap->CollectGarbage(NEW_SPACE);
5631
5632 // First create a few objects which will survive a scavenge, and will get
5633 // promoted to the old generation later on. These objects will create
5634 // promotion queue entries at the end of the second semi-space page.
5635 const int number_handles = 12;
5636 Handle<FixedArray> handles[number_handles];
5637 for (int i = 0; i < number_handles; i++) {
5638 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5639 }
5640
5641 heap->CollectGarbage(NEW_SPACE);
5642 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5643
5644 // Fill-up the first semi-space page.
5645 FillUpOnePage(new_space);
5646
5647 // Create a small object to initialize the bump pointer on the second
5648 // semi-space page.
5649 Handle<FixedArray> small =
5650 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5651 CHECK(heap->InNewSpace(*small));
5652
5653 // Fill-up the second semi-space page.
5654 FillUpOnePage(new_space);
5655
5656 // This scavenge will corrupt memory if the promotion queue is not
5657 // evacuated.
5658 heap->CollectGarbage(NEW_SPACE);
5659 }
5660 isolate->Dispose();
5661}
5662
5663
5664TEST(Regress388880) {
5665 i::FLAG_expose_gc = true;
5666 CcTest::InitializeVM();
5667 v8::HandleScope scope(CcTest::isolate());
5668 Isolate* isolate = CcTest::i_isolate();
5669 Factory* factory = isolate->factory();
5670 Heap* heap = isolate->heap();
5671
5672 Handle<Map> map1 = Map::Create(isolate, 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005673 Handle<String> name = factory->NewStringFromStaticChars("foo");
5674 name = factory->InternalizeString(name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005675 Handle<Map> map2 =
Ben Murdoch097c5b22016-05-18 11:27:45 +01005676 Map::CopyWithField(map1, name, FieldType::Any(isolate), NONE,
5677 Representation::Tagged(), OMIT_TRANSITION)
5678 .ToHandleChecked();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005679
5680 int desired_offset = Page::kPageSize - map1->instance_size();
5681
5682 // Allocate padding objects in old pointer space so, that object allocated
5683 // afterwards would end at the end of the page.
5684 SimulateFullSpace(heap->old_space());
5685 int padding_size = desired_offset - Page::kObjectStartOffset;
5686 CreatePadding(heap, padding_size, TENURED);
5687
5688 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5689 o->set_properties(*factory->empty_fixed_array());
5690
5691 // Ensure that the object allocated where we need it.
5692 Page* page = Page::FromAddress(o->address());
5693 CHECK_EQ(desired_offset, page->Offset(o->address()));
5694
5695 // Now we have an object right at the end of the page.
5696
5697 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5698 // that would cause crash.
5699 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5700 marking->Stop();
5701 CcTest::heap()->StartIncrementalMarking();
5702 CHECK(marking->IsMarking());
5703
5704 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5705 // when it calls heap->AdjustLiveBytes(...).
5706 JSObject::MigrateToMap(o, map2);
5707}
5708
5709
5710TEST(Regress3631) {
5711 i::FLAG_expose_gc = true;
5712 CcTest::InitializeVM();
5713 v8::HandleScope scope(CcTest::isolate());
5714 Isolate* isolate = CcTest::i_isolate();
5715 Heap* heap = isolate->heap();
5716 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5717 v8::Local<v8::Value> result = CompileRun(
5718 "var weak_map = new WeakMap();"
5719 "var future_keys = [];"
5720 "for (var i = 0; i < 50; i++) {"
5721 " var key = {'k' : i + 0.1};"
5722 " weak_map.set(key, 1);"
5723 " future_keys.push({'x' : i + 0.2});"
5724 "}"
5725 "weak_map");
5726 if (marking->IsStopped()) {
5727 CcTest::heap()->StartIncrementalMarking();
5728 }
5729 // Incrementally mark the backing store.
5730 Handle<JSReceiver> obj =
5731 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5732 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5733 while (!Marking::IsBlack(
5734 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5735 !marking->IsStopped()) {
5736 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5737 }
5738 // Stash the backing store in a handle.
5739 Handle<Object> save(weak_map->table(), isolate);
5740 // The following line will update the backing store.
5741 CompileRun(
5742 "for (var i = 0; i < 50; i++) {"
5743 " weak_map.set(future_keys[i], i);"
5744 "}");
5745 heap->incremental_marking()->set_should_hurry(true);
5746 heap->CollectGarbage(OLD_SPACE);
5747}
5748
5749
5750TEST(Regress442710) {
5751 CcTest::InitializeVM();
5752 Isolate* isolate = CcTest::i_isolate();
5753 Heap* heap = isolate->heap();
5754 Factory* factory = isolate->factory();
5755
5756 HandleScope sc(isolate);
5757 Handle<JSGlobalObject> global(
5758 CcTest::i_isolate()->context()->global_object());
5759 Handle<JSArray> array = factory->NewJSArray(2);
5760
5761 Handle<String> name = factory->InternalizeUtf8String("testArray");
5762 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5763 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5764 heap->CollectGarbage(OLD_SPACE);
5765}
5766
5767
5768HEAP_TEST(NumberStringCacheSize) {
5769 // Test that the number-string cache has not been resized in the snapshot.
5770 CcTest::InitializeVM();
5771 Isolate* isolate = CcTest::i_isolate();
5772 if (!isolate->snapshot_available()) return;
5773 Heap* heap = isolate->heap();
5774 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5775 heap->number_string_cache()->length());
5776}
5777
5778
5779TEST(Regress3877) {
5780 CcTest::InitializeVM();
5781 Isolate* isolate = CcTest::i_isolate();
5782 Heap* heap = isolate->heap();
5783 Factory* factory = isolate->factory();
5784 HandleScope scope(isolate);
5785 CompileRun("function cls() { this.x = 10; }");
5786 Handle<WeakCell> weak_prototype;
5787 {
5788 HandleScope inner_scope(isolate);
5789 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5790 Handle<JSReceiver> proto =
5791 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5792 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5793 }
5794 CHECK(!weak_prototype->cleared());
5795 CompileRun(
5796 "var a = { };"
5797 "a.x = new cls();"
5798 "cls.prototype = null;");
5799 for (int i = 0; i < 4; i++) {
5800 heap->CollectAllGarbage();
5801 }
5802 // The map of a.x keeps prototype alive
5803 CHECK(!weak_prototype->cleared());
5804 // Change the map of a.x and make the previous map garbage collectable.
5805 CompileRun("a.x.__proto__ = {};");
5806 for (int i = 0; i < 4; i++) {
5807 heap->CollectAllGarbage();
5808 }
5809 CHECK(weak_prototype->cleared());
5810}
5811
5812
5813Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5814 HandleScope inner_scope(isolate);
5815 Handle<Map> map = Map::Create(isolate, 1);
5816 v8::Local<v8::Value> result =
5817 CompileRun("(function () { return {x : 10}; })();");
5818 Handle<JSReceiver> proto =
5819 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5820 Map::SetPrototype(map, proto);
5821 heap->AddRetainedMap(map);
5822 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5823}
5824
5825
5826void CheckMapRetainingFor(int n) {
5827 FLAG_retain_maps_for_n_gc = n;
5828 Isolate* isolate = CcTest::i_isolate();
5829 Heap* heap = isolate->heap();
5830 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5831 CHECK(!weak_cell->cleared());
5832 for (int i = 0; i < n; i++) {
5833 SimulateIncrementalMarking(heap);
5834 heap->CollectGarbage(OLD_SPACE);
5835 }
5836 CHECK(!weak_cell->cleared());
5837 SimulateIncrementalMarking(heap);
5838 heap->CollectGarbage(OLD_SPACE);
5839 CHECK(weak_cell->cleared());
5840}
5841
5842
5843TEST(MapRetaining) {
5844 CcTest::InitializeVM();
5845 v8::HandleScope scope(CcTest::isolate());
5846 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5847 CheckMapRetainingFor(0);
5848 CheckMapRetainingFor(1);
5849 CheckMapRetainingFor(7);
5850}
5851
5852
5853TEST(RegressArrayListGC) {
5854 FLAG_retain_maps_for_n_gc = 1;
5855 FLAG_incremental_marking = 0;
5856 FLAG_gc_global = true;
5857 CcTest::InitializeVM();
5858 v8::HandleScope scope(CcTest::isolate());
5859 Isolate* isolate = CcTest::i_isolate();
5860 Heap* heap = isolate->heap();
5861 AddRetainedMap(isolate, heap);
5862 Handle<Map> map = Map::Create(isolate, 1);
5863 heap->CollectGarbage(OLD_SPACE);
5864 // Force GC in old space on next addition of retained map.
5865 Map::WeakCellForMap(map);
5866 SimulateFullSpace(CcTest::heap()->new_space());
5867 for (int i = 0; i < 10; i++) {
5868 heap->AddRetainedMap(map);
5869 }
5870 heap->CollectGarbage(OLD_SPACE);
5871}
5872
5873
5874#ifdef DEBUG
5875TEST(PathTracer) {
5876 CcTest::InitializeVM();
5877 v8::HandleScope scope(CcTest::isolate());
5878
5879 v8::Local<v8::Value> result = CompileRun("'abc'");
5880 Handle<Object> o = v8::Utils::OpenHandle(*result);
5881 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5882}
5883#endif // DEBUG
5884
5885
5886TEST(WritableVsImmortalRoots) {
5887 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5888 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5889 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5890 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5891 // A root value can be writable, immortal, or neither, but not both.
5892 CHECK(!immortal || !writable);
5893 }
5894}
5895
5896
5897static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5898 int initial_length,
5899 int elements_to_trim) {
5900 v8::HandleScope scope(CcTest::isolate());
5901 Isolate* isolate = CcTest::i_isolate();
5902 Factory* factory = isolate->factory();
5903 Heap* heap = isolate->heap();
5904
5905 Handle<FixedTypedArrayBase> array =
5906 factory->NewFixedTypedArray(initial_length, type, true);
5907 int old_size = array->size();
5908 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5909 elements_to_trim);
5910
5911 // Check that free space filler is at the right place and did not smash the
5912 // array header.
5913 CHECK(array->IsFixedArrayBase());
5914 CHECK_EQ(initial_length - elements_to_trim, array->length());
5915 int new_size = array->size();
5916 if (new_size != old_size) {
5917 // Free space filler should be created in this case.
5918 Address next_obj_address = array->address() + array->size();
5919 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5920 }
5921 heap->CollectAllAvailableGarbage();
5922}
5923
5924
5925TEST(Regress472513) {
5926 CcTest::InitializeVM();
5927 v8::HandleScope scope(CcTest::isolate());
5928
5929 // The combination of type/initial_length/elements_to_trim triggered
5930 // typed array header smashing with free space filler (crbug/472513).
5931
5932 // 64-bit cases.
5933 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5934 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5935 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5936 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5937 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5938 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5939
5940 // 32-bit cases.
5941 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5942 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5943 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5944 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5945 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5946}
5947
5948
5949TEST(WeakFixedArray) {
5950 CcTest::InitializeVM();
5951 v8::HandleScope scope(CcTest::isolate());
5952
5953 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5954 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5955 array->Remove(number);
5956 array->Compact<WeakFixedArray::NullCallback>();
5957 WeakFixedArray::Add(array, number);
5958}
5959
5960
5961TEST(PreprocessStackTrace) {
5962 // Do not automatically trigger early GC.
5963 FLAG_gc_interval = -1;
5964 CcTest::InitializeVM();
5965 v8::HandleScope scope(CcTest::isolate());
5966 v8::TryCatch try_catch(CcTest::isolate());
5967 CompileRun("throw new Error();");
5968 CHECK(try_catch.HasCaught());
5969 Isolate* isolate = CcTest::i_isolate();
5970 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5971 Handle<Name> key = isolate->factory()->stack_trace_symbol();
5972 Handle<Object> stack_trace =
Ben Murdochda12d292016-06-02 14:46:10 +01005973 Object::GetProperty(exception, key).ToHandleChecked();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005974 Handle<Object> code =
5975 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5976 CHECK(code->IsCode());
5977
5978 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
5979
5980 Handle<Object> pos =
5981 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5982 CHECK(pos->IsSmi());
5983
5984 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5985 int array_length = Smi::cast(stack_trace_array->length())->value();
5986 for (int i = 0; i < array_length; i++) {
5987 Handle<Object> element =
5988 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5989 CHECK(!element->IsCode());
5990 }
5991}
5992
5993
5994static bool utils_has_been_collected = false;
5995
5996static void UtilsHasBeenCollected(
5997 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5998 utils_has_been_collected = true;
5999 data.GetParameter()->Reset();
6000}
6001
6002
6003TEST(BootstrappingExports) {
6004 // Expose utils object and delete it to observe that it is indeed
6005 // being garbage-collected.
6006 FLAG_expose_natives_as = "utils";
6007 CcTest::InitializeVM();
6008 v8::Isolate* isolate = CcTest::isolate();
6009 LocalContext env;
6010
6011 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
6012
6013 utils_has_been_collected = false;
6014
6015 v8::Persistent<v8::Object> utils;
6016
6017 {
6018 v8::HandleScope scope(isolate);
6019 v8::Local<v8::String> name = v8_str("utils");
6020 utils.Reset(isolate, CcTest::global()
6021 ->Get(env.local(), name)
6022 .ToLocalChecked()
6023 ->ToObject(env.local())
6024 .ToLocalChecked());
6025 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
6026 }
6027
6028 utils.SetWeak(&utils, UtilsHasBeenCollected,
6029 v8::WeakCallbackType::kParameter);
6030
6031 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6032
6033 CHECK(utils_has_been_collected);
6034}
6035
6036
6037TEST(Regress1878) {
6038 FLAG_allow_natives_syntax = true;
6039 CcTest::InitializeVM();
6040 v8::Isolate* isolate = CcTest::isolate();
6041 v8::HandleScope scope(isolate);
6042 v8::Local<v8::Function> constructor = v8::Utils::CallableToLocal(
6043 CcTest::i_isolate()->internal_array_function());
6044 LocalContext env;
6045 CHECK(CcTest::global()
6046 ->Set(env.local(), v8_str("InternalArray"), constructor)
6047 .FromJust());
6048
6049 v8::TryCatch try_catch(isolate);
6050
6051 CompileRun(
6052 "var a = Array();"
6053 "for (var i = 0; i < 1000; i++) {"
6054 " var ai = new InternalArray(10000);"
6055 " if (%HaveSameMap(ai, a)) throw Error();"
6056 " if (!%HasFastObjectElements(ai)) throw Error();"
6057 "}"
6058 "for (var i = 0; i < 1000; i++) {"
6059 " var ai = new InternalArray(10000);"
6060 " if (%HaveSameMap(ai, a)) throw Error();"
6061 " if (!%HasFastObjectElements(ai)) throw Error();"
6062 "}");
6063
6064 CHECK(!try_catch.HasCaught());
6065}
6066
6067
6068void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6069 CHECK(bytes >= FixedArray::kHeaderSize);
6070 CHECK(bytes % kPointerSize == 0);
6071 Factory* factory = isolate->factory();
6072 HandleScope scope(isolate);
6073 AlwaysAllocateScope always_allocate(isolate);
6074 int elements =
6075 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6076 Handle<FixedArray> array = factory->NewFixedArray(
6077 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6078 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6079 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6080}
6081
6082
6083TEST(NewSpaceAllocationCounter) {
6084 CcTest::InitializeVM();
6085 v8::HandleScope scope(CcTest::isolate());
6086 Isolate* isolate = CcTest::i_isolate();
6087 Heap* heap = isolate->heap();
6088 size_t counter1 = heap->NewSpaceAllocationCounter();
6089 heap->CollectGarbage(NEW_SPACE);
6090 const size_t kSize = 1024;
6091 AllocateInSpace(isolate, kSize, NEW_SPACE);
6092 size_t counter2 = heap->NewSpaceAllocationCounter();
6093 CHECK_EQ(kSize, counter2 - counter1);
6094 heap->CollectGarbage(NEW_SPACE);
6095 size_t counter3 = heap->NewSpaceAllocationCounter();
6096 CHECK_EQ(0U, counter3 - counter2);
6097 // Test counter overflow.
6098 size_t max_counter = -1;
6099 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6100 size_t start = heap->NewSpaceAllocationCounter();
6101 for (int i = 0; i < 20; i++) {
6102 AllocateInSpace(isolate, kSize, NEW_SPACE);
6103 size_t counter = heap->NewSpaceAllocationCounter();
6104 CHECK_EQ(kSize, counter - start);
6105 start = counter;
6106 }
6107}
6108
6109
6110TEST(OldSpaceAllocationCounter) {
6111 CcTest::InitializeVM();
6112 v8::HandleScope scope(CcTest::isolate());
6113 Isolate* isolate = CcTest::i_isolate();
6114 Heap* heap = isolate->heap();
6115 size_t counter1 = heap->OldGenerationAllocationCounter();
6116 heap->CollectGarbage(NEW_SPACE);
6117 heap->CollectGarbage(NEW_SPACE);
6118 const size_t kSize = 1024;
6119 AllocateInSpace(isolate, kSize, OLD_SPACE);
6120 size_t counter2 = heap->OldGenerationAllocationCounter();
6121 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6122 CHECK_LE(kSize, counter2 - counter1);
6123 heap->CollectGarbage(NEW_SPACE);
6124 size_t counter3 = heap->OldGenerationAllocationCounter();
6125 CHECK_EQ(0u, counter3 - counter2);
6126 AllocateInSpace(isolate, kSize, OLD_SPACE);
6127 heap->CollectGarbage(OLD_SPACE);
6128 size_t counter4 = heap->OldGenerationAllocationCounter();
6129 CHECK_LE(kSize, counter4 - counter3);
6130 // Test counter overflow.
6131 size_t max_counter = -1;
6132 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6133 size_t start = heap->OldGenerationAllocationCounter();
6134 for (int i = 0; i < 20; i++) {
6135 AllocateInSpace(isolate, kSize, OLD_SPACE);
6136 size_t counter = heap->OldGenerationAllocationCounter();
6137 CHECK_LE(kSize, counter - start);
6138 start = counter;
6139 }
6140}
6141
6142
6143TEST(NewSpaceAllocationThroughput) {
6144 CcTest::InitializeVM();
6145 v8::HandleScope scope(CcTest::isolate());
6146 Isolate* isolate = CcTest::i_isolate();
6147 Heap* heap = isolate->heap();
6148 GCTracer* tracer = heap->tracer();
6149 int time1 = 100;
6150 size_t counter1 = 1000;
6151 tracer->SampleAllocation(time1, counter1, 0);
6152 int time2 = 200;
6153 size_t counter2 = 2000;
6154 tracer->SampleAllocation(time2, counter2, 0);
6155 size_t throughput =
6156 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6157 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6158 int time3 = 1000;
6159 size_t counter3 = 30000;
6160 tracer->SampleAllocation(time3, counter3, 0);
6161 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6162 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6163}
6164
6165
6166TEST(NewSpaceAllocationThroughput2) {
6167 CcTest::InitializeVM();
6168 v8::HandleScope scope(CcTest::isolate());
6169 Isolate* isolate = CcTest::i_isolate();
6170 Heap* heap = isolate->heap();
6171 GCTracer* tracer = heap->tracer();
6172 int time1 = 100;
6173 size_t counter1 = 1000;
6174 tracer->SampleAllocation(time1, counter1, 0);
6175 int time2 = 200;
6176 size_t counter2 = 2000;
6177 tracer->SampleAllocation(time2, counter2, 0);
6178 size_t throughput =
6179 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6180 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6181 int time3 = 1000;
6182 size_t counter3 = 30000;
6183 tracer->SampleAllocation(time3, counter3, 0);
6184 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6185 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6186}
6187
6188
6189static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6190 Isolate* isolate = CcTest::i_isolate();
6191 Object* message =
6192 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6193 CHECK(message->IsTheHole());
6194}
6195
6196
6197TEST(MessageObjectLeak) {
6198 CcTest::InitializeVM();
6199 v8::Isolate* isolate = CcTest::isolate();
6200 v8::HandleScope scope(isolate);
6201 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6202 global->Set(
6203 v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
6204 .ToLocalChecked(),
6205 v8::FunctionTemplate::New(isolate, CheckLeak));
6206 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6207 v8::Context::Scope cscope(context);
6208
6209 const char* test =
6210 "try {"
6211 " throw 'message 1';"
6212 "} catch (e) {"
6213 "}"
6214 "check();"
6215 "L: try {"
6216 " throw 'message 2';"
6217 "} finally {"
6218 " break L;"
6219 "}"
6220 "check();";
6221 CompileRun(test);
6222
6223 const char* flag = "--turbo-filter=*";
6224 FlagList::SetFlagsFromString(flag, StrLength(flag));
6225 FLAG_always_opt = true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006226
6227 CompileRun(test);
6228}
6229
6230
6231static void CheckEqualSharedFunctionInfos(
6232 const v8::FunctionCallbackInfo<v8::Value>& args) {
6233 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6234 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6235 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6236 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6237 CHECK(fun1->shared() == fun2->shared());
6238}
6239
6240
6241static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6242 Isolate* isolate = CcTest::i_isolate();
6243 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6244 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6245 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6246 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
Ben Murdochda12d292016-06-02 14:46:10 +01006247 fun->shared()->ClearBytecodeArray(); // Bytecode is code too.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006248 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6249}
6250
6251
6252TEST(CanonicalSharedFunctionInfo) {
6253 CcTest::InitializeVM();
6254 v8::Isolate* isolate = CcTest::isolate();
6255 v8::HandleScope scope(isolate);
6256 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6257 global->Set(isolate, "check", v8::FunctionTemplate::New(
6258 isolate, CheckEqualSharedFunctionInfos));
6259 global->Set(isolate, "remove",
6260 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6261 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6262 v8::Context::Scope cscope(context);
6263 CompileRun(
6264 "function f() { return function g() {}; }"
6265 "var g1 = f();"
6266 "remove(f);"
6267 "var g2 = f();"
6268 "check(g1, g2);");
6269
6270 CompileRun(
6271 "function f() { return (function() { return function g() {}; })(); }"
6272 "var g1 = f();"
6273 "remove(f);"
6274 "var g2 = f();"
6275 "check(g1, g2);");
6276}
6277
Ben Murdochc5610432016-08-08 18:44:38 +01006278TEST(RemoveCodeFromSharedFunctionInfoButNotFromClosure) {
6279 CcTest::InitializeVM();
6280 v8::Isolate* isolate = CcTest::isolate();
6281 v8::HandleScope scope(isolate);
6282 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6283 global->Set(isolate, "check", v8::FunctionTemplate::New(
6284 isolate, CheckEqualSharedFunctionInfos));
6285 global->Set(isolate, "remove",
6286 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6287 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6288 v8::Context::Scope cscope(context);
6289 CompileRun(
6290 "function f() { return function g() {}; }"
6291 "var g1 = f();"
6292 "var g2 = f();"
6293 "check(g1, g2);"
6294 "g1();"
6295 "g2();"
6296 "remove(g1);"
6297 "g2();"
6298 "check(g1, g2);");
6299}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006300
6301TEST(OldGenerationAllocationThroughput) {
6302 CcTest::InitializeVM();
6303 v8::HandleScope scope(CcTest::isolate());
6304 Isolate* isolate = CcTest::i_isolate();
6305 Heap* heap = isolate->heap();
6306 GCTracer* tracer = heap->tracer();
6307 int time1 = 100;
6308 size_t counter1 = 1000;
6309 tracer->SampleAllocation(time1, 0, counter1);
6310 int time2 = 200;
6311 size_t counter2 = 2000;
6312 tracer->SampleAllocation(time2, 0, counter2);
Ben Murdochda12d292016-06-02 14:46:10 +01006313 size_t throughput = static_cast<size_t>(
6314 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006315 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6316 int time3 = 1000;
6317 size_t counter3 = 30000;
6318 tracer->SampleAllocation(time3, 0, counter3);
Ben Murdochda12d292016-06-02 14:46:10 +01006319 throughput = static_cast<size_t>(
6320 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006321 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6322}
6323
6324
6325TEST(AllocationThroughput) {
6326 CcTest::InitializeVM();
6327 v8::HandleScope scope(CcTest::isolate());
6328 Isolate* isolate = CcTest::i_isolate();
6329 Heap* heap = isolate->heap();
6330 GCTracer* tracer = heap->tracer();
6331 int time1 = 100;
6332 size_t counter1 = 1000;
6333 tracer->SampleAllocation(time1, counter1, counter1);
6334 int time2 = 200;
6335 size_t counter2 = 2000;
6336 tracer->SampleAllocation(time2, counter2, counter2);
Ben Murdochda12d292016-06-02 14:46:10 +01006337 size_t throughput = static_cast<size_t>(
6338 tracer->AllocationThroughputInBytesPerMillisecond(100));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006339 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6340 int time3 = 1000;
6341 size_t counter3 = 30000;
6342 tracer->SampleAllocation(time3, counter3, counter3);
6343 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6344 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6345}
6346
6347
6348TEST(ContextMeasure) {
6349 CcTest::InitializeVM();
6350 v8::HandleScope scope(CcTest::isolate());
6351 Isolate* isolate = CcTest::i_isolate();
6352 LocalContext context;
6353
6354 int size_upper_limit = 0;
6355 int count_upper_limit = 0;
6356 HeapIterator it(CcTest::heap());
6357 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6358 size_upper_limit += obj->Size();
6359 count_upper_limit++;
6360 }
6361
6362 ContextMeasure measure(*isolate->native_context());
6363
6364 PrintF("Context size : %d bytes\n", measure.Size());
6365 PrintF("Context object count: %d\n", measure.Count());
6366
6367 CHECK_LE(1000, measure.Count());
6368 CHECK_LE(50000, measure.Size());
6369
6370 CHECK_LE(measure.Count(), count_upper_limit);
6371 CHECK_LE(measure.Size(), size_upper_limit);
6372}
6373
6374
6375TEST(ScriptIterator) {
6376 CcTest::InitializeVM();
6377 v8::HandleScope scope(CcTest::isolate());
6378 Isolate* isolate = CcTest::i_isolate();
6379 Heap* heap = CcTest::heap();
6380 LocalContext context;
6381
6382 heap->CollectAllGarbage();
6383
6384 int script_count = 0;
6385 {
6386 HeapIterator it(heap);
6387 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6388 if (obj->IsScript()) script_count++;
6389 }
6390 }
6391
6392 {
6393 Script::Iterator iterator(isolate);
6394 while (iterator.Next()) script_count--;
6395 }
6396
6397 CHECK_EQ(0, script_count);
6398}
6399
6400
6401TEST(SharedFunctionInfoIterator) {
6402 CcTest::InitializeVM();
6403 v8::HandleScope scope(CcTest::isolate());
6404 Isolate* isolate = CcTest::i_isolate();
6405 Heap* heap = CcTest::heap();
6406 LocalContext context;
6407
6408 heap->CollectAllGarbage();
6409 heap->CollectAllGarbage();
6410
6411 int sfi_count = 0;
6412 {
6413 HeapIterator it(heap);
6414 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6415 if (!obj->IsSharedFunctionInfo()) continue;
6416 sfi_count++;
6417 }
6418 }
6419
6420 {
6421 SharedFunctionInfo::Iterator iterator(isolate);
6422 while (iterator.Next()) sfi_count--;
6423 }
6424
6425 CHECK_EQ(0, sfi_count);
6426}
6427
6428
6429template <typename T>
6430static UniqueId MakeUniqueId(const Persistent<T>& p) {
6431 return UniqueId(reinterpret_cast<uintptr_t>(*v8::Utils::OpenPersistent(p)));
6432}
6433
6434
6435TEST(Regress519319) {
6436 CcTest::InitializeVM();
6437 v8::Isolate* isolate = CcTest::isolate();
6438 v8::HandleScope scope(isolate);
6439 Heap* heap = CcTest::heap();
6440 LocalContext context;
6441
6442 v8::Persistent<Value> parent;
6443 v8::Persistent<Value> child;
6444
6445 parent.Reset(isolate, v8::Object::New(isolate));
6446 child.Reset(isolate, v8::Object::New(isolate));
6447
6448 SimulateFullSpace(heap->old_space());
6449 heap->CollectGarbage(OLD_SPACE);
6450 {
6451 UniqueId id = MakeUniqueId(parent);
6452 isolate->SetObjectGroupId(parent, id);
6453 isolate->SetReferenceFromGroup(id, child);
6454 }
6455 // The CollectGarbage call above starts sweeper threads.
6456 // The crash will happen if the following two functions
6457 // are called before sweeping finishes.
6458 heap->StartIncrementalMarking();
6459 heap->FinalizeIncrementalMarkingIfComplete("test");
6460}
6461
6462
6463HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6464 CcTest::InitializeVM();
6465 v8::HandleScope scope(CcTest::isolate());
6466 Heap* heap = CcTest::heap();
6467 Isolate* isolate = CcTest::i_isolate();
6468 MemoryReducer* memory_reducer = heap->memory_reducer_;
6469 memory_reducer->SampleAndGetJsCallsPerMs(0);
6470 isolate->IncrementJsCallsFromApiCounter();
6471 isolate->IncrementJsCallsFromApiCounter();
6472 isolate->IncrementJsCallsFromApiCounter();
6473 double calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(1);
6474 CheckDoubleEquals(3, calls_per_ms);
6475
6476 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(2);
6477 CheckDoubleEquals(0, calls_per_ms);
6478
6479 isolate->IncrementJsCallsFromApiCounter();
6480 isolate->IncrementJsCallsFromApiCounter();
6481 isolate->IncrementJsCallsFromApiCounter();
6482 isolate->IncrementJsCallsFromApiCounter();
6483 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(4);
6484 CheckDoubleEquals(2, calls_per_ms);
6485}
6486
Ben Murdoch097c5b22016-05-18 11:27:45 +01006487HEAP_TEST(Regress587004) {
6488 FLAG_concurrent_sweeping = false;
6489#ifdef VERIFY_HEAP
6490 FLAG_verify_heap = false;
6491#endif
6492 CcTest::InitializeVM();
6493 v8::HandleScope scope(CcTest::isolate());
6494 Heap* heap = CcTest::heap();
6495 Isolate* isolate = CcTest::i_isolate();
6496 Factory* factory = isolate->factory();
6497 const int N = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
6498 kPointerSize;
6499 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
6500 CHECK(heap->old_space()->Contains(*array));
6501 Handle<Object> number = factory->NewHeapNumber(1.0);
6502 CHECK(heap->InNewSpace(*number));
6503 for (int i = 0; i < N; i++) {
6504 array->set(i, *number);
6505 }
6506 heap->CollectGarbage(OLD_SPACE);
6507 SimulateFullSpace(heap->old_space());
6508 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
6509 heap->mark_compact_collector()->EnsureSweepingCompleted();
6510 ByteArray* byte_array;
6511 const int M = 256;
6512 // Don't allow old space expansion. The test works without this flag too,
6513 // but becomes very slow.
6514 heap->set_force_oom(true);
6515 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
6516 for (int j = 0; j < M; j++) {
6517 byte_array->set(j, 0x31);
6518 }
6519 }
6520 // Re-enable old space expansion to avoid OOM crash.
6521 heap->set_force_oom(false);
6522 heap->CollectGarbage(NEW_SPACE);
6523}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006524
Ben Murdochda12d292016-06-02 14:46:10 +01006525HEAP_TEST(Regress589413) {
6526 FLAG_stress_compaction = true;
6527 FLAG_manual_evacuation_candidates_selection = true;
6528 FLAG_parallel_compaction = false;
6529 FLAG_concurrent_sweeping = false;
6530 CcTest::InitializeVM();
6531 v8::HandleScope scope(CcTest::isolate());
6532 Heap* heap = CcTest::heap();
6533 // Get the heap in clean state.
6534 heap->CollectGarbage(OLD_SPACE);
6535 heap->CollectGarbage(OLD_SPACE);
6536 Isolate* isolate = CcTest::i_isolate();
6537 Factory* factory = isolate->factory();
6538 // Fill the new space with byte arrays with elements looking like pointers.
6539 const int M = 256;
6540 ByteArray* byte_array;
6541 while (heap->AllocateByteArray(M).To(&byte_array)) {
6542 for (int j = 0; j < M; j++) {
6543 byte_array->set(j, 0x31);
6544 }
6545 // Add the array in root set.
6546 handle(byte_array);
6547 }
6548 // Make sure the byte arrays will be promoted on the next GC.
6549 heap->CollectGarbage(NEW_SPACE);
6550 // This number is close to large free list category threshold.
6551 const int N = 0x3eee;
6552 {
6553 std::vector<FixedArray*> arrays;
6554 std::set<Page*> pages;
6555 FixedArray* array;
6556 // Fill all pages with fixed arrays.
6557 heap->set_force_oom(true);
6558 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6559 arrays.push_back(array);
6560 pages.insert(Page::FromAddress(array->address()));
6561 // Add the array in root set.
6562 handle(array);
6563 }
6564 // Expand and full one complete page with fixed arrays.
6565 heap->set_force_oom(false);
6566 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6567 arrays.push_back(array);
6568 pages.insert(Page::FromAddress(array->address()));
6569 // Add the array in root set.
6570 handle(array);
6571 // Do not expand anymore.
6572 heap->set_force_oom(true);
6573 }
6574 // Expand and mark the new page as evacuation candidate.
6575 heap->set_force_oom(false);
6576 {
6577 AlwaysAllocateScope always_allocate(isolate);
6578 Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
6579 Page* ec_page = Page::FromAddress(ec_obj->address());
6580 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
6581 // Make all arrays point to evacuation candidate so that
6582 // slots are recorded for them.
6583 for (size_t j = 0; j < arrays.size(); j++) {
6584 array = arrays[j];
6585 for (int i = 0; i < N; i++) {
6586 array->set(i, *ec_obj);
6587 }
6588 }
6589 }
6590 SimulateIncrementalMarking(heap);
6591 for (size_t j = 0; j < arrays.size(); j++) {
6592 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
6593 }
6594 }
6595 // Force allocation from the free list.
6596 heap->set_force_oom(true);
6597 heap->CollectGarbage(OLD_SPACE);
6598}
6599
Ben Murdochc5610432016-08-08 18:44:38 +01006600UNINITIALIZED_TEST(PagePromotion) {
6601 FLAG_page_promotion = true;
6602 FLAG_page_promotion_threshold = 0; // %
6603 i::FLAG_min_semi_space_size = 8 * (Page::kPageSize / MB);
6604 // We cannot optimize for size as we require a new space with more than one
6605 // page.
6606 i::FLAG_optimize_for_size = false;
6607 // Set max_semi_space_size because it could've been initialized by an
6608 // implication of optimize_for_size.
6609 i::FLAG_max_semi_space_size = i::FLAG_min_semi_space_size;
6610 v8::Isolate::CreateParams create_params;
6611 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6612 v8::Isolate* isolate = v8::Isolate::New(create_params);
6613 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
6614 {
6615 v8::Isolate::Scope isolate_scope(isolate);
6616 v8::HandleScope handle_scope(isolate);
6617 v8::Context::New(isolate)->Enter();
6618 Heap* heap = i_isolate->heap();
6619 std::vector<Handle<FixedArray>> handles;
6620 SimulateFullSpace(heap->new_space(), &handles);
6621 heap->CollectGarbage(NEW_SPACE);
6622 CHECK_GT(handles.size(), 0u);
6623 // First object in handle should be on the first page.
6624 Handle<FixedArray> first_object = handles.front();
6625 Page* first_page = Page::FromAddress(first_object->address());
6626 // The age mark should not be on the first page.
6627 CHECK(!first_page->ContainsLimit(heap->new_space()->age_mark()));
6628 // To perform a sanity check on live bytes we need to mark the heap.
6629 SimulateIncrementalMarking(heap, true);
6630 // Sanity check that the page meets the requirements for promotion.
6631 const int threshold_bytes =
6632 FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100;
6633 CHECK_GE(first_page->LiveBytes(), threshold_bytes);
6634
6635 // Actual checks: The page is in new space first, but is moved to old space
6636 // during a full GC.
6637 CHECK(heap->new_space()->ContainsSlow(first_page->address()));
6638 CHECK(!heap->old_space()->ContainsSlow(first_page->address()));
6639 heap->CollectGarbage(OLD_SPACE);
6640 CHECK(!heap->new_space()->ContainsSlow(first_page->address()));
6641 CHECK(heap->old_space()->ContainsSlow(first_page->address()));
6642 }
6643}
6644
6645TEST(Regress598319) {
6646 // This test ensures that no white objects can cross the progress bar of large
6647 // objects during incremental marking. It checks this by using Shift() during
6648 // incremental marking.
6649 CcTest::InitializeVM();
6650 v8::HandleScope scope(CcTest::isolate());
6651 Heap* heap = CcTest::heap();
6652 Isolate* isolate = heap->isolate();
6653
6654 const int kNumberOfObjects = Page::kMaxRegularHeapObjectSize / kPointerSize;
6655
6656 struct Arr {
6657 Arr(Isolate* isolate, int number_of_objects) {
6658 root = isolate->factory()->NewFixedArray(1, TENURED);
6659 {
6660 // Temporary scope to avoid getting any other objects into the root set.
6661 v8::HandleScope scope(CcTest::isolate());
6662 Handle<FixedArray> tmp =
6663 isolate->factory()->NewFixedArray(number_of_objects);
6664 root->set(0, *tmp);
6665 for (int i = 0; i < get()->length(); i++) {
6666 tmp = isolate->factory()->NewFixedArray(100, TENURED);
6667 get()->set(i, *tmp);
6668 }
6669 }
6670 }
6671
6672 FixedArray* get() { return FixedArray::cast(root->get(0)); }
6673
6674 Handle<FixedArray> root;
6675 } arr(isolate, kNumberOfObjects);
6676
6677 CHECK_EQ(arr.get()->length(), kNumberOfObjects);
6678 CHECK(heap->lo_space()->Contains(arr.get()));
6679 LargePage* page = heap->lo_space()->FindPage(arr.get()->address());
6680 CHECK_NOT_NULL(page);
6681
6682 // GC to cleanup state
6683 heap->CollectGarbage(OLD_SPACE);
6684 MarkCompactCollector* collector = heap->mark_compact_collector();
6685 if (collector->sweeping_in_progress()) {
6686 collector->EnsureSweepingCompleted();
6687 }
6688
6689 CHECK(heap->lo_space()->Contains(arr.get()));
6690 CHECK(Marking::IsWhite(Marking::MarkBitFrom(arr.get())));
6691 for (int i = 0; i < arr.get()->length(); i++) {
6692 CHECK(Marking::IsWhite(
6693 Marking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6694 }
6695
6696 // Start incremental marking.
6697 IncrementalMarking* marking = heap->incremental_marking();
6698 CHECK(marking->IsMarking() || marking->IsStopped());
6699 if (marking->IsStopped()) {
6700 heap->StartIncrementalMarking();
6701 }
6702 CHECK(marking->IsMarking());
6703
6704 // Check that we have not marked the interesting array during root scanning.
6705 for (int i = 0; i < arr.get()->length(); i++) {
6706 CHECK(Marking::IsWhite(
6707 Marking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6708 }
6709
6710 // Now we search for a state where we are in incremental marking and have
6711 // only partially marked the large object.
6712 while (!marking->IsComplete()) {
6713 marking->Step(i::KB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
6714 if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->progress_bar() > 0) {
6715 CHECK_NE(page->progress_bar(), arr.get()->Size());
6716 {
6717 // Shift by 1, effectively moving one white object across the progress
6718 // bar, meaning that we will miss marking it.
6719 v8::HandleScope scope(CcTest::isolate());
6720 Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
6721 Handle<FixedArray>(arr.get()));
6722 js_array->GetElementsAccessor()->Shift(js_array);
6723 }
6724 break;
6725 }
6726 }
6727
6728 // Finish marking with bigger steps to speed up test.
6729 while (!marking->IsComplete()) {
6730 marking->Step(10 * i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
6731 if (marking->IsReadyToOverApproximateWeakClosure()) {
6732 marking->FinalizeIncrementally();
6733 }
6734 }
6735 CHECK(marking->IsComplete());
6736
6737 // All objects need to be black after marking. If a white object crossed the
6738 // progress bar, we would fail here.
6739 for (int i = 0; i < arr.get()->length(); i++) {
6740 CHECK(Marking::IsBlack(
6741 Marking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6742 }
6743}
6744
Ben Murdochda12d292016-06-02 14:46:10 +01006745TEST(Regress609761) {
6746 CcTest::InitializeVM();
6747 v8::HandleScope scope(CcTest::isolate());
6748 Heap* heap = CcTest::heap();
6749 Isolate* isolate = heap->isolate();
6750
6751 intptr_t size_before = heap->SizeOfObjects();
6752 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
6753 array->Shrink(1);
6754 intptr_t size_after = heap->SizeOfObjects();
6755 CHECK_EQ(size_after, size_before + array->Size());
6756}
6757
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006758} // namespace internal
6759} // namespace v8