blob: c8a8bbb37505595671118d8c0328a2b865400c99 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include <stdlib.h>
6#include <utility>
7
8#include "src/v8.h"
9
10#include "src/compilation-cache.h"
11#include "src/execution.h"
12#include "src/factory.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010013#include "src/field-type.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014#include "src/global-handles.h"
15#include "src/ic/ic.h"
16#include "src/macro-assembler.h"
17#include "test/cctest/cctest.h"
Ben Murdoch61f157c2016-09-16 13:49:30 +010018#include "test/cctest/heap/heap-utils.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040019
20using namespace v8::base;
21using namespace v8::internal;
22
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023#if V8_DOUBLE_FIELDS_UNBOXING
24
25
26//
27// Helper functions.
28//
29
30
31static void InitializeVerifiedMapDescriptors(
32 Map* map, DescriptorArray* descriptors,
33 LayoutDescriptor* layout_descriptor) {
34 map->InitializeDescriptors(descriptors, layout_descriptor);
35 CHECK(layout_descriptor->IsConsistentWithMap(map, true));
36}
37
38
39static Handle<String> MakeString(const char* str) {
40 Isolate* isolate = CcTest::i_isolate();
41 Factory* factory = isolate->factory();
42 return factory->InternalizeUtf8String(str);
43}
44
45
46static Handle<String> MakeName(const char* str, int suffix) {
47 EmbeddedVector<char, 128> buffer;
48 SNPrintF(buffer, "%s%d", str, suffix);
49 return MakeString(buffer.start());
50}
51
52
53Handle<JSObject> GetObject(const char* name) {
54 return Handle<JSObject>::cast(
55 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
56 CcTest::global()
57 ->Get(v8::Isolate::GetCurrent()->GetCurrentContext(),
58 v8_str(name))
59 .ToLocalChecked())));
60}
Emily Bernierd0a1eb72015-03-24 16:35:39 -040061
62
63static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
64 if (obj->IsUnboxedDoubleField(field_index)) {
65 return obj->RawFastDoublePropertyAt(field_index);
66 } else {
67 Object* value = obj->RawFastPropertyAt(field_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 CHECK(value->IsMutableHeapNumber());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040069 return HeapNumber::cast(value)->value();
70 }
71}
72
73const int kNumberOfBits = 32;
74
75
76enum TestPropertyKind {
77 PROP_CONSTANT,
78 PROP_SMI,
79 PROP_DOUBLE,
80 PROP_TAGGED,
81 PROP_KIND_NUMBER
82};
83
84static Representation representations[PROP_KIND_NUMBER] = {
85 Representation::None(), Representation::Smi(), Representation::Double(),
86 Representation::Tagged()};
87
88
89static Handle<DescriptorArray> CreateDescriptorArray(Isolate* isolate,
90 TestPropertyKind* props,
91 int kPropsCount) {
92 Factory* factory = isolate->factory();
93
94 Handle<String> func_name = factory->InternalizeUtf8String("func");
95 Handle<JSFunction> func = factory->NewFunction(func_name);
96
97 Handle<DescriptorArray> descriptors =
98 DescriptorArray::Allocate(isolate, 0, kPropsCount);
99
100 int next_field_offset = 0;
101 for (int i = 0; i < kPropsCount; i++) {
102 EmbeddedVector<char, 64> buffer;
103 SNPrintF(buffer, "prop%d", i);
104 Handle<String> name = factory->InternalizeUtf8String(buffer.start());
105
106 TestPropertyKind kind = props[i];
107
108 if (kind == PROP_CONSTANT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000109 DataConstantDescriptor d(name, func, NONE);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400110 descriptors->Append(&d);
111
112 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 DataDescriptor f(name, next_field_offset, NONE, representations[kind]);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400114 next_field_offset += f.GetDetails().field_width_in_words();
115 descriptors->Append(&f);
116 }
117 }
118 return descriptors;
119}
120
121
122TEST(LayoutDescriptorBasicFast) {
123 CcTest::InitializeVM();
124 v8::HandleScope scope(CcTest::isolate());
125
126 LayoutDescriptor* layout_desc = LayoutDescriptor::FastPointerLayout();
127
128 CHECK(!layout_desc->IsSlowLayout());
129 CHECK(layout_desc->IsFastPointerLayout());
130 CHECK_EQ(kSmiValueSize, layout_desc->capacity());
131
132 for (int i = 0; i < kSmiValueSize + 13; i++) {
133 CHECK_EQ(true, layout_desc->IsTagged(i));
134 }
135 CHECK_EQ(true, layout_desc->IsTagged(-1));
136 CHECK_EQ(true, layout_desc->IsTagged(-12347));
137 CHECK_EQ(true, layout_desc->IsTagged(15635));
138 CHECK(layout_desc->IsFastPointerLayout());
139
140 for (int i = 0; i < kSmiValueSize; i++) {
141 layout_desc = layout_desc->SetTaggedForTesting(i, false);
142 CHECK_EQ(false, layout_desc->IsTagged(i));
143 layout_desc = layout_desc->SetTaggedForTesting(i, true);
144 CHECK_EQ(true, layout_desc->IsTagged(i));
145 }
146 CHECK(layout_desc->IsFastPointerLayout());
147
148 int sequence_length;
149 CHECK_EQ(true, layout_desc->IsTagged(0, std::numeric_limits<int>::max(),
150 &sequence_length));
151 CHECK_EQ(std::numeric_limits<int>::max(), sequence_length);
152
153 CHECK_EQ(true, layout_desc->IsTagged(0, 7, &sequence_length));
154 CHECK_EQ(7, sequence_length);
155}
156
157
158TEST(LayoutDescriptorBasicSlow) {
159 CcTest::InitializeVM();
160 Isolate* isolate = CcTest::i_isolate();
161 v8::HandleScope scope(CcTest::isolate());
162
163 Handle<LayoutDescriptor> layout_descriptor;
164 const int kPropsCount = kSmiValueSize * 3;
165 TestPropertyKind props[kPropsCount];
166 for (int i = 0; i < kPropsCount; i++) {
167 // All properties tagged.
168 props[i] = PROP_TAGGED;
169 }
170
171 {
172 Handle<DescriptorArray> descriptors =
173 CreateDescriptorArray(isolate, props, kPropsCount);
174
175 Handle<Map> map = Map::Create(isolate, kPropsCount);
176
177 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
178 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
179 CHECK_EQ(kSmiValueSize, layout_descriptor->capacity());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000180 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 }
182
183 props[0] = PROP_DOUBLE;
184 props[kPropsCount - 1] = PROP_DOUBLE;
185
186 Handle<DescriptorArray> descriptors =
187 CreateDescriptorArray(isolate, props, kPropsCount);
188
189 {
190 int inobject_properties = kPropsCount - 1;
191 Handle<Map> map = Map::Create(isolate, inobject_properties);
192
193 // Should be fast as the only double property is the first one.
194 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
195 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
196 CHECK(!layout_descriptor->IsSlowLayout());
197 CHECK(!layout_descriptor->IsFastPointerLayout());
198
199 CHECK_EQ(false, layout_descriptor->IsTagged(0));
200 for (int i = 1; i < kPropsCount; i++) {
201 CHECK_EQ(true, layout_descriptor->IsTagged(i));
202 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400204 }
205
206 {
207 int inobject_properties = kPropsCount;
208 Handle<Map> map = Map::Create(isolate, inobject_properties);
209
210 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
211 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
212 CHECK(layout_descriptor->IsSlowLayout());
213 CHECK(!layout_descriptor->IsFastPointerLayout());
214 CHECK(layout_descriptor->capacity() > kSmiValueSize);
215
216 CHECK_EQ(false, layout_descriptor->IsTagged(0));
217 CHECK_EQ(false, layout_descriptor->IsTagged(kPropsCount - 1));
218 for (int i = 1; i < kPropsCount - 1; i++) {
219 CHECK_EQ(true, layout_descriptor->IsTagged(i));
220 }
221
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400223
224 // Here we have truly slow layout descriptor, so play with the bits.
225 CHECK_EQ(true, layout_descriptor->IsTagged(-1));
226 CHECK_EQ(true, layout_descriptor->IsTagged(-12347));
227 CHECK_EQ(true, layout_descriptor->IsTagged(15635));
228
229 LayoutDescriptor* layout_desc = *layout_descriptor;
230 // Play with the bits but leave it in consistent state with map at the end.
231 for (int i = 1; i < kPropsCount - 1; i++) {
232 layout_desc = layout_desc->SetTaggedForTesting(i, false);
233 CHECK_EQ(false, layout_desc->IsTagged(i));
234 layout_desc = layout_desc->SetTaggedForTesting(i, true);
235 CHECK_EQ(true, layout_desc->IsTagged(i));
236 }
237 CHECK(layout_desc->IsSlowLayout());
238 CHECK(!layout_desc->IsFastPointerLayout());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000239 CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400240 }
241}
242
243
244static void TestLayoutDescriptorQueries(int layout_descriptor_length,
245 int* bit_flip_positions,
246 int max_sequence_length) {
247 Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::NewForTesting(
248 CcTest::i_isolate(), layout_descriptor_length);
249 layout_descriptor_length = layout_descriptor->capacity();
250 LayoutDescriptor* layout_desc = *layout_descriptor;
251
252 {
253 // Fill in the layout descriptor.
254 int cur_bit_flip_index = 0;
255 bool tagged = true;
256 for (int i = 0; i < layout_descriptor_length; i++) {
257 if (i == bit_flip_positions[cur_bit_flip_index]) {
258 tagged = !tagged;
259 ++cur_bit_flip_index;
260 CHECK(i < bit_flip_positions[cur_bit_flip_index]); // check test data
261 }
262 layout_desc = layout_desc->SetTaggedForTesting(i, tagged);
263 }
264 }
265
266 if (layout_desc->IsFastPointerLayout()) {
267 return;
268 }
269
270 {
271 // Check queries.
272 int cur_bit_flip_index = 0;
273 bool tagged = true;
274 for (int i = 0; i < layout_descriptor_length; i++) {
275 if (i == bit_flip_positions[cur_bit_flip_index]) {
276 tagged = !tagged;
277 ++cur_bit_flip_index;
278 }
279 CHECK_EQ(tagged, layout_desc->IsTagged(i));
280
281 int next_bit_flip_position = bit_flip_positions[cur_bit_flip_index];
282 int expected_sequence_length;
283 if (next_bit_flip_position < layout_desc->capacity()) {
284 expected_sequence_length = next_bit_flip_position - i;
285 } else {
286 expected_sequence_length = tagged ? std::numeric_limits<int>::max()
287 : (layout_desc->capacity() - i);
288 }
289 expected_sequence_length =
290 Min(expected_sequence_length, max_sequence_length);
291 int sequence_length;
292 CHECK_EQ(tagged,
293 layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
294 CHECK(sequence_length > 0);
295
296 CHECK_EQ(expected_sequence_length, sequence_length);
297 }
298
299 int sequence_length;
300 CHECK_EQ(true,
301 layout_desc->IsTagged(layout_descriptor_length,
302 max_sequence_length, &sequence_length));
303 CHECK_EQ(max_sequence_length, sequence_length);
304 }
305}
306
307
308static void TestLayoutDescriptorQueriesFast(int max_sequence_length) {
309 {
310 LayoutDescriptor* layout_desc = LayoutDescriptor::FastPointerLayout();
311 int sequence_length;
312 for (int i = 0; i < kNumberOfBits; i++) {
313 CHECK_EQ(true,
314 layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
315 CHECK(sequence_length > 0);
316 CHECK_EQ(max_sequence_length, sequence_length);
317 }
318 }
319
320 {
321 int bit_flip_positions[] = {1000};
322 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
323 max_sequence_length);
324 }
325
326 {
327 int bit_flip_positions[] = {0, 1000};
328 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
329 max_sequence_length);
330 }
331
332 {
333 int bit_flip_positions[kNumberOfBits + 1];
334 for (int i = 0; i <= kNumberOfBits; i++) {
335 bit_flip_positions[i] = i;
336 }
337 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
338 max_sequence_length);
339 }
340
341 {
342 int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30, 1000};
343 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
344 max_sequence_length);
345 }
346
347 {
348 int bit_flip_positions[] = {0, 1, 2, 3, 5, 7, 9,
349 12, 15, 18, 22, 26, 29, 1000};
350 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
351 max_sequence_length);
352 }
353}
354
355
356TEST(LayoutDescriptorQueriesFastLimited7) {
357 CcTest::InitializeVM();
358 v8::HandleScope scope(CcTest::isolate());
359
360 TestLayoutDescriptorQueriesFast(7);
361}
362
363
364TEST(LayoutDescriptorQueriesFastLimited13) {
365 CcTest::InitializeVM();
366 v8::HandleScope scope(CcTest::isolate());
367
368 TestLayoutDescriptorQueriesFast(13);
369}
370
371
372TEST(LayoutDescriptorQueriesFastUnlimited) {
373 CcTest::InitializeVM();
374 v8::HandleScope scope(CcTest::isolate());
375
376 TestLayoutDescriptorQueriesFast(std::numeric_limits<int>::max());
377}
378
379
380static void TestLayoutDescriptorQueriesSlow(int max_sequence_length) {
381 {
382 int bit_flip_positions[] = {10000};
383 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
384 max_sequence_length);
385 }
386
387 {
388 int bit_flip_positions[] = {0, 10000};
389 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
390 max_sequence_length);
391 }
392
393 {
394 int bit_flip_positions[kMaxNumberOfDescriptors + 1];
395 for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
396 bit_flip_positions[i] = i;
397 }
398 bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
399 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
400 max_sequence_length);
401 }
402
403 {
404 int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30,
405 37, 54, 80, 99, 383, 10000};
406 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
407 max_sequence_length);
408 }
409
410 {
411 int bit_flip_positions[] = {0, 10, 20, 30, 50, 70, 90,
412 120, 150, 180, 220, 260, 290, 10000};
413 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
414 max_sequence_length);
415 }
416
417 {
418 int bit_flip_positions[kMaxNumberOfDescriptors + 1];
419 int cur = 0;
420 for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
421 bit_flip_positions[i] = cur;
422 cur = (cur + 1) * 2;
423 }
424 CHECK(cur < 10000);
425 bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
426 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
427 max_sequence_length);
428 }
429
430 {
431 int bit_flip_positions[kMaxNumberOfDescriptors + 1];
432 int cur = 3;
433 for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
434 bit_flip_positions[i] = cur;
435 cur = (cur + 1) * 2;
436 }
437 CHECK(cur < 10000);
438 bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
439 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
440 max_sequence_length);
441 }
442}
443
444
445TEST(LayoutDescriptorQueriesSlowLimited7) {
446 CcTest::InitializeVM();
447 v8::HandleScope scope(CcTest::isolate());
448
449 TestLayoutDescriptorQueriesSlow(7);
450}
451
452
453TEST(LayoutDescriptorQueriesSlowLimited13) {
454 CcTest::InitializeVM();
455 v8::HandleScope scope(CcTest::isolate());
456
457 TestLayoutDescriptorQueriesSlow(13);
458}
459
460
461TEST(LayoutDescriptorQueriesSlowLimited42) {
462 CcTest::InitializeVM();
463 v8::HandleScope scope(CcTest::isolate());
464
465 TestLayoutDescriptorQueriesSlow(42);
466}
467
468
469TEST(LayoutDescriptorQueriesSlowUnlimited) {
470 CcTest::InitializeVM();
471 v8::HandleScope scope(CcTest::isolate());
472
473 TestLayoutDescriptorQueriesSlow(std::numeric_limits<int>::max());
474}
475
476
477TEST(LayoutDescriptorCreateNewFast) {
478 CcTest::InitializeVM();
479 Isolate* isolate = CcTest::i_isolate();
480 v8::HandleScope scope(CcTest::isolate());
481
482 Handle<LayoutDescriptor> layout_descriptor;
483 TestPropertyKind props[] = {
484 PROP_CONSTANT,
485 PROP_TAGGED, // field #0
486 PROP_CONSTANT,
487 PROP_DOUBLE, // field #1
488 PROP_CONSTANT,
489 PROP_TAGGED, // field #2
490 PROP_CONSTANT,
491 };
492 const int kPropsCount = arraysize(props);
493
494 Handle<DescriptorArray> descriptors =
495 CreateDescriptorArray(isolate, props, kPropsCount);
496
497 {
498 Handle<Map> map = Map::Create(isolate, 0);
499 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
500 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000501 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400502 }
503
504 {
505 Handle<Map> map = Map::Create(isolate, 1);
506 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
507 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400509 }
510
511 {
512 Handle<Map> map = Map::Create(isolate, 2);
513 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
514 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
515 CHECK(!layout_descriptor->IsSlowLayout());
516 CHECK_EQ(true, layout_descriptor->IsTagged(0));
517 CHECK_EQ(false, layout_descriptor->IsTagged(1));
518 CHECK_EQ(true, layout_descriptor->IsTagged(2));
519 CHECK_EQ(true, layout_descriptor->IsTagged(125));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400521 }
522}
523
524
525TEST(LayoutDescriptorCreateNewSlow) {
526 CcTest::InitializeVM();
527 Isolate* isolate = CcTest::i_isolate();
528 v8::HandleScope scope(CcTest::isolate());
529
530 Handle<LayoutDescriptor> layout_descriptor;
531 const int kPropsCount = kSmiValueSize * 3;
532 TestPropertyKind props[kPropsCount];
533 for (int i = 0; i < kPropsCount; i++) {
534 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
535 }
536
537 Handle<DescriptorArray> descriptors =
538 CreateDescriptorArray(isolate, props, kPropsCount);
539
540 {
541 Handle<Map> map = Map::Create(isolate, 0);
542 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
543 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400545 }
546
547 {
548 Handle<Map> map = Map::Create(isolate, 1);
549 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
550 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400552 }
553
554 {
555 Handle<Map> map = Map::Create(isolate, 2);
556 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
557 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
558 CHECK(!layout_descriptor->IsSlowLayout());
559 CHECK_EQ(true, layout_descriptor->IsTagged(0));
560 CHECK_EQ(false, layout_descriptor->IsTagged(1));
561 CHECK_EQ(true, layout_descriptor->IsTagged(2));
562 CHECK_EQ(true, layout_descriptor->IsTagged(125));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400564 }
565
566 {
567 int inobject_properties = kPropsCount / 2;
568 Handle<Map> map = Map::Create(isolate, inobject_properties);
569 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
570 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
571 CHECK(layout_descriptor->IsSlowLayout());
572 for (int i = 0; i < inobject_properties; i++) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573 // PROP_DOUBLE has index 1 among DATA properties.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400574 const bool tagged = (i % (PROP_KIND_NUMBER - 1)) != 1;
575 CHECK_EQ(tagged, layout_descriptor->IsTagged(i));
576 }
577 // Every property after inobject_properties must be tagged.
578 for (int i = inobject_properties; i < kPropsCount; i++) {
579 CHECK_EQ(true, layout_descriptor->IsTagged(i));
580 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000581 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400582
583 // Now test LayoutDescriptor::cast_gc_safe().
584 Handle<LayoutDescriptor> layout_descriptor_copy =
585 LayoutDescriptor::New(map, descriptors, kPropsCount);
586
587 LayoutDescriptor* layout_desc = *layout_descriptor;
588 CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
589 CHECK_EQ(layout_desc, LayoutDescriptor::cast_gc_safe(layout_desc));
590 CHECK(layout_descriptor->IsFixedTypedArrayBase());
591 // Now make it look like a forwarding pointer to layout_descriptor_copy.
592 MapWord map_word = layout_desc->map_word();
593 CHECK(!map_word.IsForwardingAddress());
594 layout_desc->set_map_word(
595 MapWord::FromForwardingAddress(*layout_descriptor_copy));
596 CHECK(layout_desc->map_word().IsForwardingAddress());
597 CHECK_EQ(*layout_descriptor_copy,
598 LayoutDescriptor::cast_gc_safe(layout_desc));
599
600 // Restore it back.
601 layout_desc->set_map_word(map_word);
602 CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
603 }
604}
605
606
607static Handle<LayoutDescriptor> TestLayoutDescriptorAppend(
608 Isolate* isolate, int inobject_properties, TestPropertyKind* props,
609 int kPropsCount) {
610 Factory* factory = isolate->factory();
611
612 Handle<String> func_name = factory->InternalizeUtf8String("func");
613 Handle<JSFunction> func = factory->NewFunction(func_name);
614
615 Handle<DescriptorArray> descriptors =
616 DescriptorArray::Allocate(isolate, 0, kPropsCount);
617
618 Handle<Map> map = Map::Create(isolate, inobject_properties);
619 map->InitializeDescriptors(*descriptors,
620 LayoutDescriptor::FastPointerLayout());
621
622 int next_field_offset = 0;
623 for (int i = 0; i < kPropsCount; i++) {
624 EmbeddedVector<char, 64> buffer;
625 SNPrintF(buffer, "prop%d", i);
626 Handle<String> name = factory->InternalizeUtf8String(buffer.start());
627
628 Handle<LayoutDescriptor> layout_descriptor;
629 TestPropertyKind kind = props[i];
630 if (kind == PROP_CONSTANT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631 DataConstantDescriptor d(name, func, NONE);
632 layout_descriptor = LayoutDescriptor::ShareAppend(map, d.GetDetails());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400633 descriptors->Append(&d);
634
635 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000636 DataDescriptor f(name, next_field_offset, NONE, representations[kind]);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400637 int field_width_in_words = f.GetDetails().field_width_in_words();
638 next_field_offset += field_width_in_words;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 layout_descriptor = LayoutDescriptor::ShareAppend(map, f.GetDetails());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400640 descriptors->Append(&f);
641
642 int field_index = f.GetDetails().field_index();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000643 bool is_inobject = field_index < map->GetInObjectProperties();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400644 for (int bit = 0; bit < field_width_in_words; bit++) {
645 CHECK_EQ(is_inobject && (kind == PROP_DOUBLE),
646 !layout_descriptor->IsTagged(field_index + bit));
647 }
648 CHECK(layout_descriptor->IsTagged(next_field_offset));
649 }
650 map->InitializeDescriptors(*descriptors, *layout_descriptor);
651 }
652 Handle<LayoutDescriptor> layout_descriptor(map->layout_descriptor(), isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000653 CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400654 return layout_descriptor;
655}
656
657
658TEST(LayoutDescriptorAppend) {
659 CcTest::InitializeVM();
660 Isolate* isolate = CcTest::i_isolate();
661 v8::HandleScope scope(CcTest::isolate());
662
663 Handle<LayoutDescriptor> layout_descriptor;
664 const int kPropsCount = kSmiValueSize * 3;
665 TestPropertyKind props[kPropsCount];
666 for (int i = 0; i < kPropsCount; i++) {
667 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
668 }
669
670 layout_descriptor =
671 TestLayoutDescriptorAppend(isolate, 0, props, kPropsCount);
672 CHECK(!layout_descriptor->IsSlowLayout());
673
674 layout_descriptor =
675 TestLayoutDescriptorAppend(isolate, 13, props, kPropsCount);
676 CHECK(!layout_descriptor->IsSlowLayout());
677
678 layout_descriptor =
679 TestLayoutDescriptorAppend(isolate, kSmiValueSize, props, kPropsCount);
680 CHECK(!layout_descriptor->IsSlowLayout());
681
682 layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize * 2,
683 props, kPropsCount);
684 CHECK(layout_descriptor->IsSlowLayout());
685
686 layout_descriptor =
687 TestLayoutDescriptorAppend(isolate, kPropsCount, props, kPropsCount);
688 CHECK(layout_descriptor->IsSlowLayout());
689}
690
691
692TEST(LayoutDescriptorAppendAllDoubles) {
693 CcTest::InitializeVM();
694 Isolate* isolate = CcTest::i_isolate();
695 v8::HandleScope scope(CcTest::isolate());
696
697 Handle<LayoutDescriptor> layout_descriptor;
698 const int kPropsCount = kSmiValueSize * 3;
699 TestPropertyKind props[kPropsCount];
700 for (int i = 0; i < kPropsCount; i++) {
701 props[i] = PROP_DOUBLE;
702 }
703
704 layout_descriptor =
705 TestLayoutDescriptorAppend(isolate, 0, props, kPropsCount);
706 CHECK(!layout_descriptor->IsSlowLayout());
707
708 layout_descriptor =
709 TestLayoutDescriptorAppend(isolate, 13, props, kPropsCount);
710 CHECK(!layout_descriptor->IsSlowLayout());
711
712 layout_descriptor =
713 TestLayoutDescriptorAppend(isolate, kSmiValueSize, props, kPropsCount);
714 CHECK(!layout_descriptor->IsSlowLayout());
715
716 layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize + 1,
717 props, kPropsCount);
718 CHECK(layout_descriptor->IsSlowLayout());
719
720 layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize * 2,
721 props, kPropsCount);
722 CHECK(layout_descriptor->IsSlowLayout());
723
724 layout_descriptor =
725 TestLayoutDescriptorAppend(isolate, kPropsCount, props, kPropsCount);
726 CHECK(layout_descriptor->IsSlowLayout());
727
728 {
729 // Ensure layout descriptor switches into slow mode at the right moment.
730 layout_descriptor =
731 TestLayoutDescriptorAppend(isolate, kPropsCount, props, kSmiValueSize);
732 CHECK(!layout_descriptor->IsSlowLayout());
733
734 layout_descriptor = TestLayoutDescriptorAppend(isolate, kPropsCount, props,
735 kSmiValueSize + 1);
736 CHECK(layout_descriptor->IsSlowLayout());
737 }
738}
739
740
741static Handle<LayoutDescriptor> TestLayoutDescriptorAppendIfFastOrUseFull(
742 Isolate* isolate, int inobject_properties,
743 Handle<DescriptorArray> descriptors, int number_of_descriptors) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 Handle<Map> initial_map = Map::Create(isolate, inobject_properties);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400745
746 Handle<LayoutDescriptor> full_layout_descriptor = LayoutDescriptor::New(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 initial_map, descriptors, descriptors->number_of_descriptors());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400748
749 int nof = 0;
750 bool switched_to_slow_mode = false;
751
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000752 // This method calls LayoutDescriptor::AppendIfFastOrUseFull() internally
753 // and does all the required map-descriptors related book keeping.
754 Handle<Map> last_map = Map::AddMissingTransitionsForTesting(
755 initial_map, descriptors, full_layout_descriptor);
756
757 // Follow back pointers to construct a sequence of maps from |map|
758 // to |last_map|.
759 int descriptors_length = descriptors->number_of_descriptors();
760 std::vector<Handle<Map>> maps(descriptors_length);
761 {
762 CHECK(last_map->is_stable());
763 Map* map = *last_map;
764 for (int i = 0; i < descriptors_length; i++) {
765 maps[descriptors_length - 1 - i] = handle(map, isolate);
766 Object* maybe_map = map->GetBackPointer();
767 CHECK(maybe_map->IsMap());
768 map = Map::cast(maybe_map);
769 CHECK(!map->is_stable());
770 }
771 CHECK_EQ(1, maps[0]->NumberOfOwnDescriptors());
772 }
773
774 Handle<Map> map;
775 // Now check layout descriptors of all intermediate maps.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400776 for (int i = 0; i < number_of_descriptors; i++) {
777 PropertyDetails details = descriptors->GetDetails(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000778 map = maps[i];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400779 LayoutDescriptor* layout_desc = map->layout_descriptor();
780
781 if (layout_desc->IsSlowLayout()) {
782 switched_to_slow_mode = true;
783 CHECK_EQ(*full_layout_descriptor, layout_desc);
784 } else {
785 CHECK(!switched_to_slow_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000786 if (details.type() == DATA) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400787 nof++;
788 int field_index = details.field_index();
789 int field_width_in_words = details.field_width_in_words();
790
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791 bool is_inobject = field_index < map->GetInObjectProperties();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400792 for (int bit = 0; bit < field_width_in_words; bit++) {
793 CHECK_EQ(is_inobject && details.representation().IsDouble(),
794 !layout_desc->IsTagged(field_index + bit));
795 }
796 CHECK(layout_desc->IsTagged(field_index + field_width_in_words));
797 }
798 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000799 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400800 }
801
802 Handle<LayoutDescriptor> layout_descriptor(map->GetLayoutDescriptor(),
803 isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 CHECK(layout_descriptor->IsConsistentWithMap(*map));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400805 return layout_descriptor;
806}
807
808
809TEST(LayoutDescriptorAppendIfFastOrUseFull) {
810 CcTest::InitializeVM();
811 Isolate* isolate = CcTest::i_isolate();
812 v8::HandleScope scope(CcTest::isolate());
813
814 Handle<LayoutDescriptor> layout_descriptor;
815 const int kPropsCount = kSmiValueSize * 3;
816 TestPropertyKind props[kPropsCount];
817 for (int i = 0; i < kPropsCount; i++) {
818 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
819 }
820 Handle<DescriptorArray> descriptors =
821 CreateDescriptorArray(isolate, props, kPropsCount);
822
823 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
824 isolate, 0, descriptors, kPropsCount);
825 CHECK(!layout_descriptor->IsSlowLayout());
826
827 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
828 isolate, 13, descriptors, kPropsCount);
829 CHECK(!layout_descriptor->IsSlowLayout());
830
831 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
832 isolate, kSmiValueSize, descriptors, kPropsCount);
833 CHECK(!layout_descriptor->IsSlowLayout());
834
835 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
836 isolate, kSmiValueSize * 2, descriptors, kPropsCount);
837 CHECK(layout_descriptor->IsSlowLayout());
838
839 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
840 isolate, kPropsCount, descriptors, kPropsCount);
841 CHECK(layout_descriptor->IsSlowLayout());
842}
843
844
845TEST(LayoutDescriptorAppendIfFastOrUseFullAllDoubles) {
846 CcTest::InitializeVM();
847 Isolate* isolate = CcTest::i_isolate();
848 v8::HandleScope scope(CcTest::isolate());
849
850 Handle<LayoutDescriptor> layout_descriptor;
851 const int kPropsCount = kSmiValueSize * 3;
852 TestPropertyKind props[kPropsCount];
853 for (int i = 0; i < kPropsCount; i++) {
854 props[i] = PROP_DOUBLE;
855 }
856 Handle<DescriptorArray> descriptors =
857 CreateDescriptorArray(isolate, props, kPropsCount);
858
859 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
860 isolate, 0, descriptors, kPropsCount);
861 CHECK(!layout_descriptor->IsSlowLayout());
862
863 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
864 isolate, 13, descriptors, kPropsCount);
865 CHECK(!layout_descriptor->IsSlowLayout());
866
867 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
868 isolate, kSmiValueSize, descriptors, kPropsCount);
869 CHECK(!layout_descriptor->IsSlowLayout());
870
871 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
872 isolate, kSmiValueSize + 1, descriptors, kPropsCount);
873 CHECK(layout_descriptor->IsSlowLayout());
874
875 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
876 isolate, kSmiValueSize * 2, descriptors, kPropsCount);
877 CHECK(layout_descriptor->IsSlowLayout());
878
879 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
880 isolate, kPropsCount, descriptors, kPropsCount);
881 CHECK(layout_descriptor->IsSlowLayout());
882
883 {
884 // Ensure layout descriptor switches into slow mode at the right moment.
885 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
886 isolate, kPropsCount, descriptors, kSmiValueSize);
887 CHECK(!layout_descriptor->IsSlowLayout());
888
889 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
890 isolate, kPropsCount, descriptors, kSmiValueSize + 1);
891 CHECK(layout_descriptor->IsSlowLayout());
892 }
893}
894
895
896TEST(Regress436816) {
897 CcTest::InitializeVM();
898 Isolate* isolate = CcTest::i_isolate();
899 Factory* factory = isolate->factory();
900 v8::HandleScope scope(CcTest::isolate());
901
902 const int kPropsCount = kSmiValueSize * 3;
903 TestPropertyKind props[kPropsCount];
904 for (int i = 0; i < kPropsCount; i++) {
905 props[i] = PROP_DOUBLE;
906 }
907 Handle<DescriptorArray> descriptors =
908 CreateDescriptorArray(isolate, props, kPropsCount);
909
910 Handle<Map> map = Map::Create(isolate, kPropsCount);
911 Handle<LayoutDescriptor> layout_descriptor =
912 LayoutDescriptor::New(map, descriptors, kPropsCount);
913 map->InitializeDescriptors(*descriptors, *layout_descriptor);
914
915 Handle<JSObject> object = factory->NewJSObjectFromMap(map, TENURED);
916
917 Address fake_address = reinterpret_cast<Address>(~kHeapObjectTagMask);
918 HeapObject* fake_object = HeapObject::FromAddress(fake_address);
919 CHECK(fake_object->IsHeapObject());
920
921 double boom_value = bit_cast<double>(fake_object);
922 for (int i = 0; i < kPropsCount; i++) {
923 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
924 CHECK(map->IsUnboxedDoubleField(index));
925 object->RawFastDoublePropertyAtPut(index, boom_value);
926 }
927 CHECK(object->HasFastProperties());
928 CHECK(!object->map()->HasFastPointerLayout());
929
930 Handle<Map> normalized_map =
931 Map::Normalize(map, KEEP_INOBJECT_PROPERTIES, "testing");
932 JSObject::MigrateToMap(object, normalized_map);
933 CHECK(!object->HasFastProperties());
934 CHECK(object->map()->HasFastPointerLayout());
935
936 // Trigger GCs and heap verification.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000937 CcTest::heap()->CollectAllGarbage();
938}
939
940
941TEST(DescriptorArrayTrimming) {
942 CcTest::InitializeVM();
943 v8::HandleScope scope(CcTest::isolate());
944 Isolate* isolate = CcTest::i_isolate();
945
946 const int kFieldCount = 128;
947 const int kSplitFieldIndex = 32;
948 const int kTrimmedLayoutDescriptorLength = 64;
949
Ben Murdoch097c5b22016-05-18 11:27:45 +0100950 Handle<FieldType> any_type = FieldType::Any(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000951 Handle<Map> map = Map::Create(isolate, kFieldCount);
952 for (int i = 0; i < kSplitFieldIndex; i++) {
953 map = Map::CopyWithField(map, MakeName("prop", i), any_type, NONE,
954 Representation::Smi(),
955 INSERT_TRANSITION).ToHandleChecked();
956 }
957 map = Map::CopyWithField(map, MakeName("dbl", kSplitFieldIndex), any_type,
958 NONE, Representation::Double(),
959 INSERT_TRANSITION).ToHandleChecked();
960 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
961 CHECK(map->layout_descriptor()->IsSlowLayout());
962 CHECK(map->owns_descriptors());
963 CHECK_EQ(2, map->layout_descriptor()->length());
964
965 {
966 // Add transitions to double fields.
967 v8::HandleScope scope(CcTest::isolate());
968
969 Handle<Map> tmp_map = map;
970 for (int i = kSplitFieldIndex + 1; i < kFieldCount; i++) {
971 tmp_map = Map::CopyWithField(tmp_map, MakeName("dbl", i), any_type, NONE,
972 Representation::Double(),
973 INSERT_TRANSITION).ToHandleChecked();
974 CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
975 }
976 // Check that descriptors are shared.
977 CHECK(tmp_map->owns_descriptors());
978 CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
979 CHECK_EQ(map->layout_descriptor(), tmp_map->layout_descriptor());
980 }
981 CHECK(map->layout_descriptor()->IsSlowLayout());
982 CHECK_EQ(4, map->layout_descriptor()->length());
983
984 // The unused tail of the layout descriptor is now "durty" because of sharing.
985 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
986 for (int i = kSplitFieldIndex + 1; i < kTrimmedLayoutDescriptorLength; i++) {
987 CHECK(!map->layout_descriptor()->IsTagged(i));
988 }
989 CHECK_LT(map->NumberOfOwnDescriptors(),
990 map->instance_descriptors()->number_of_descriptors());
991
992 // Call GC that should trim both |map|'s descriptor array and layout
993 // descriptor.
994 CcTest::heap()->CollectAllGarbage();
995
996 // The unused tail of the layout descriptor is now "clean" again.
997 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
998 CHECK(map->owns_descriptors());
999 CHECK_EQ(map->NumberOfOwnDescriptors(),
1000 map->instance_descriptors()->number_of_descriptors());
1001 CHECK(map->layout_descriptor()->IsSlowLayout());
1002 CHECK_EQ(2, map->layout_descriptor()->length());
1003
1004 {
1005 // Add transitions to tagged fields.
1006 v8::HandleScope scope(CcTest::isolate());
1007
1008 Handle<Map> tmp_map = map;
1009 for (int i = kSplitFieldIndex + 1; i < kFieldCount - 1; i++) {
1010 tmp_map = Map::CopyWithField(tmp_map, MakeName("tagged", i), any_type,
1011 NONE, Representation::Tagged(),
1012 INSERT_TRANSITION).ToHandleChecked();
1013 CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
1014 }
1015 tmp_map = Map::CopyWithField(tmp_map, MakeString("dbl"), any_type, NONE,
1016 Representation::Double(),
1017 INSERT_TRANSITION).ToHandleChecked();
1018 CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
1019 // Check that descriptors are shared.
1020 CHECK(tmp_map->owns_descriptors());
1021 CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
1022 }
1023 CHECK(map->layout_descriptor()->IsSlowLayout());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001024}
1025
1026
1027TEST(DoScavenge) {
1028 CcTest::InitializeVM();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001029 v8::HandleScope scope(CcTest::isolate());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001030 Isolate* isolate = CcTest::i_isolate();
1031 Factory* factory = isolate->factory();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001032
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001033 // The plan: create |obj| with double field in new space, do scanvenge so
1034 // that |obj| is moved to old space, construct a double value that looks like
1035 // a pointer to "from space" pointer. Do scavenge one more time and ensure
1036 // that it didn't crash or corrupt the double value stored in the object.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001037
Ben Murdoch097c5b22016-05-18 11:27:45 +01001038 Handle<FieldType> any_type = FieldType::Any(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039 Handle<Map> map = Map::Create(isolate, 10);
1040 map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
1041 Representation::Double(),
1042 INSERT_TRANSITION).ToHandleChecked();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001043
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044 // Create object in new space.
1045 Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED);
1046
1047 Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
1048 obj->WriteToField(0, *heap_number);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001049
1050 {
1051 // Ensure the object is properly set up.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001052 FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001053 CHECK(field_index.is_inobject() && field_index.is_double());
1054 CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
1055 CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
1056 }
1057 CHECK(isolate->heap()->new_space()->Contains(*obj));
1058
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001059 // Do scavenge so that |obj| is moved to survivor space.
1060 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001061
1062 // Create temp object in the new space.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001063 Handle<JSArray> temp = factory->NewJSArray(0, FAST_ELEMENTS);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001064 CHECK(isolate->heap()->new_space()->Contains(*temp));
1065
1066 // Construct a double value that looks like a pointer to the new space object
1067 // and store it into the obj.
1068 Address fake_object = reinterpret_cast<Address>(*temp) + kPointerSize;
1069 double boom_value = bit_cast<double>(fake_object);
1070
1071 FieldIndex field_index = FieldIndex::ForDescriptor(obj->map(), 0);
1072 Handle<HeapNumber> boom_number = factory->NewHeapNumber(boom_value, MUTABLE);
1073 obj->FastPropertyAtPut(field_index, *boom_number);
1074
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075 // Now |obj| moves to old gen and it has a double field that looks like
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001076 // a pointer to a from semi-space.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 CcTest::heap()->CollectGarbage(i::NEW_SPACE, "boom");
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001078
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 CHECK(isolate->heap()->old_space()->Contains(*obj));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001080
1081 CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
1082}
1083
1084
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001085TEST(DoScavengeWithIncrementalWriteBarrier) {
1086 if (FLAG_never_compact || !FLAG_incremental_marking) return;
1087 CcTest::InitializeVM();
1088 v8::HandleScope scope(CcTest::isolate());
1089 Isolate* isolate = CcTest::i_isolate();
1090 Factory* factory = isolate->factory();
1091 Heap* heap = CcTest::heap();
1092 PagedSpace* old_space = heap->old_space();
1093
1094 // The plan: create |obj_value| in old space and ensure that it is allocated
1095 // on evacuation candidate page, create |obj| with double and tagged fields
1096 // in new space and write |obj_value| to tagged field of |obj|, do two
1097 // scavenges to promote |obj| to old space, a GC in old space and ensure that
1098 // the tagged value was properly updated after candidates evacuation.
1099
Ben Murdoch097c5b22016-05-18 11:27:45 +01001100 Handle<FieldType> any_type = FieldType::Any(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101 Handle<Map> map = Map::Create(isolate, 10);
1102 map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
1103 Representation::Double(),
1104 INSERT_TRANSITION).ToHandleChecked();
1105 map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
1106 Representation::Tagged(),
1107 INSERT_TRANSITION).ToHandleChecked();
1108
1109 // Create |obj_value| in old space.
1110 Handle<HeapObject> obj_value;
1111 Page* ec_page;
1112 {
1113 AlwaysAllocateScope always_allocate(isolate);
1114 // Make sure |obj_value| is placed on an old-space evacuation candidate.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001115 heap::SimulateFullSpace(old_space);
Ben Murdochda12d292016-06-02 14:46:10 +01001116 obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001117 ec_page = Page::FromAddress(obj_value->address());
1118 }
1119
1120 // Create object in new space.
1121 Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED);
1122
1123 Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
1124 obj->WriteToField(0, *heap_number);
1125 obj->WriteToField(1, *obj_value);
1126
1127 {
1128 // Ensure the object is properly set up.
1129 FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
1130 CHECK(field_index.is_inobject() && field_index.is_double());
1131 CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
1132 CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
1133
1134 field_index = FieldIndex::ForDescriptor(*map, 1);
1135 CHECK(field_index.is_inobject() && !field_index.is_double());
1136 CHECK(!map->IsUnboxedDoubleField(field_index));
1137 }
1138 CHECK(isolate->heap()->new_space()->Contains(*obj));
1139
1140 // Heap is ready, force |ec_page| to become an evacuation candidate and
1141 // simulate incremental marking.
1142 FLAG_stress_compaction = true;
1143 FLAG_manual_evacuation_candidates_selection = true;
1144 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001145 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001146 // Disable stress compaction mode in order to let GC do scavenge.
1147 FLAG_stress_compaction = false;
1148
1149 // Check that everything is ready for triggering incremental write barrier
1150 // during scavenge (i.e. that |obj| is black and incremental marking is
1151 // in compacting mode and |obj_value|'s page is an evacuation candidate).
1152 IncrementalMarking* marking = heap->incremental_marking();
1153 CHECK(marking->IsCompacting());
1154 CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
1155 CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1156
1157 // Trigger GCs so that |obj| moves to old gen.
1158 heap->CollectGarbage(i::NEW_SPACE); // in survivor space now
1159 heap->CollectGarbage(i::NEW_SPACE); // in old gen now
1160
1161 CHECK(isolate->heap()->old_space()->Contains(*obj));
1162 CHECK(isolate->heap()->old_space()->Contains(*obj_value));
1163 CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1164
1165 heap->CollectGarbage(i::OLD_SPACE, "boom");
1166
1167 // |obj_value| must be evacuated.
1168 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1169
1170 FieldIndex field_index = FieldIndex::ForDescriptor(*map, 1);
1171 CHECK_EQ(*obj_value, obj->RawFastPropertyAt(field_index));
1172}
1173
1174
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001175static void TestLayoutDescriptorHelper(Isolate* isolate,
1176 int inobject_properties,
1177 Handle<DescriptorArray> descriptors,
1178 int number_of_descriptors) {
1179 Handle<Map> map = Map::Create(isolate, inobject_properties);
1180
1181 Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::New(
1182 map, descriptors, descriptors->number_of_descriptors());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001183 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001184
1185 LayoutDescriptorHelper helper(*map);
1186 bool all_fields_tagged = true;
1187
1188 int instance_size = map->instance_size();
1189
1190 int end_offset = instance_size * 2;
1191 int first_non_tagged_field_offset = end_offset;
1192 for (int i = 0; i < number_of_descriptors; i++) {
1193 PropertyDetails details = descriptors->GetDetails(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 if (details.type() != DATA) continue;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001195 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
1196 if (!index.is_inobject()) continue;
1197 all_fields_tagged &= !details.representation().IsDouble();
1198 bool expected_tagged = !index.is_double();
1199 if (!expected_tagged) {
1200 first_non_tagged_field_offset =
1201 Min(first_non_tagged_field_offset, index.offset());
1202 }
1203
1204 int end_of_region_offset;
1205 CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
1206 CHECK_EQ(expected_tagged, helper.IsTagged(index.offset(), instance_size,
1207 &end_of_region_offset));
1208 CHECK(end_of_region_offset > 0);
1209 CHECK(end_of_region_offset % kPointerSize == 0);
1210 CHECK(end_of_region_offset <= instance_size);
1211
1212 for (int offset = index.offset(); offset < end_of_region_offset;
1213 offset += kPointerSize) {
1214 CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
1215 }
1216 if (end_of_region_offset < instance_size) {
1217 CHECK_EQ(!expected_tagged, helper.IsTagged(end_of_region_offset));
1218 } else {
1219 CHECK_EQ(true, helper.IsTagged(end_of_region_offset));
1220 }
1221 }
1222
1223 for (int offset = 0; offset < JSObject::kHeaderSize; offset += kPointerSize) {
1224 // Header queries
1225 CHECK_EQ(true, helper.IsTagged(offset));
1226 int end_of_region_offset;
1227 CHECK_EQ(true, helper.IsTagged(offset, end_offset, &end_of_region_offset));
1228 CHECK_EQ(first_non_tagged_field_offset, end_of_region_offset);
1229
1230 // Out of bounds queries
1231 CHECK_EQ(true, helper.IsTagged(offset + instance_size));
1232 }
1233
1234 CHECK_EQ(all_fields_tagged, helper.all_fields_tagged());
1235}
1236
1237
1238TEST(LayoutDescriptorHelperMixed) {
1239 CcTest::InitializeVM();
1240 Isolate* isolate = CcTest::i_isolate();
1241 v8::HandleScope scope(CcTest::isolate());
1242
1243 Handle<LayoutDescriptor> layout_descriptor;
1244 const int kPropsCount = kSmiValueSize * 3;
1245 TestPropertyKind props[kPropsCount];
1246 for (int i = 0; i < kPropsCount; i++) {
1247 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
1248 }
1249 Handle<DescriptorArray> descriptors =
1250 CreateDescriptorArray(isolate, props, kPropsCount);
1251
1252 TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
1253
1254 TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
1255
1256 TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
1257
1258 TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
1259 kPropsCount);
1260
1261 TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
1262}
1263
1264
1265TEST(LayoutDescriptorHelperAllTagged) {
1266 CcTest::InitializeVM();
1267 Isolate* isolate = CcTest::i_isolate();
1268 v8::HandleScope scope(CcTest::isolate());
1269
1270 Handle<LayoutDescriptor> layout_descriptor;
1271 const int kPropsCount = kSmiValueSize * 3;
1272 TestPropertyKind props[kPropsCount];
1273 for (int i = 0; i < kPropsCount; i++) {
1274 props[i] = PROP_TAGGED;
1275 }
1276 Handle<DescriptorArray> descriptors =
1277 CreateDescriptorArray(isolate, props, kPropsCount);
1278
1279 TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
1280
1281 TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
1282
1283 TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
1284
1285 TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
1286 kPropsCount);
1287
1288 TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
1289}
1290
1291
1292TEST(LayoutDescriptorHelperAllDoubles) {
1293 CcTest::InitializeVM();
1294 Isolate* isolate = CcTest::i_isolate();
1295 v8::HandleScope scope(CcTest::isolate());
1296
1297 Handle<LayoutDescriptor> layout_descriptor;
1298 const int kPropsCount = kSmiValueSize * 3;
1299 TestPropertyKind props[kPropsCount];
1300 for (int i = 0; i < kPropsCount; i++) {
1301 props[i] = PROP_DOUBLE;
1302 }
1303 Handle<DescriptorArray> descriptors =
1304 CreateDescriptorArray(isolate, props, kPropsCount);
1305
1306 TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
1307
1308 TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
1309
1310 TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
1311
1312 TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
1313 kPropsCount);
1314
1315 TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
1316}
1317
1318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001319TEST(LayoutDescriptorSharing) {
1320 CcTest::InitializeVM();
1321 v8::HandleScope scope(CcTest::isolate());
1322 Isolate* isolate = CcTest::i_isolate();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001323 Handle<FieldType> any_type = FieldType::Any(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001324
1325 Handle<Map> split_map;
1326 {
1327 Handle<Map> map = Map::Create(isolate, 64);
1328 for (int i = 0; i < 32; i++) {
1329 Handle<String> name = MakeName("prop", i);
1330 map = Map::CopyWithField(map, name, any_type, NONE, Representation::Smi(),
1331 INSERT_TRANSITION).ToHandleChecked();
1332 }
1333 split_map = Map::CopyWithField(map, MakeString("dbl"), any_type, NONE,
1334 Representation::Double(),
1335 INSERT_TRANSITION).ToHandleChecked();
1336 }
1337 Handle<LayoutDescriptor> split_layout_descriptor(
1338 split_map->layout_descriptor(), isolate);
1339 CHECK(split_layout_descriptor->IsConsistentWithMap(*split_map, true));
1340 CHECK(split_layout_descriptor->IsSlowLayout());
1341 CHECK(split_map->owns_descriptors());
1342
1343 Handle<Map> map1 = Map::CopyWithField(split_map, MakeString("foo"), any_type,
1344 NONE, Representation::Double(),
1345 INSERT_TRANSITION).ToHandleChecked();
1346 CHECK(!split_map->owns_descriptors());
1347 CHECK_EQ(*split_layout_descriptor, split_map->layout_descriptor());
1348
1349 // Layout descriptors should be shared with |split_map|.
1350 CHECK(map1->owns_descriptors());
1351 CHECK_EQ(*split_layout_descriptor, map1->layout_descriptor());
1352 CHECK(map1->layout_descriptor()->IsConsistentWithMap(*map1, true));
1353
1354 Handle<Map> map2 = Map::CopyWithField(split_map, MakeString("bar"), any_type,
1355 NONE, Representation::Tagged(),
1356 INSERT_TRANSITION).ToHandleChecked();
1357
1358 // Layout descriptors should not be shared with |split_map|.
1359 CHECK(map2->owns_descriptors());
1360 CHECK_NE(*split_layout_descriptor, map2->layout_descriptor());
1361 CHECK(map2->layout_descriptor()->IsConsistentWithMap(*map2, true));
1362}
1363
1364
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001365static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
1366 int tagged_descriptor, int double_descriptor,
1367 bool check_tagged_value = true) {
1368 FLAG_stress_compaction = true;
1369 FLAG_manual_evacuation_candidates_selection = true;
1370 Isolate* isolate = CcTest::i_isolate();
1371 Factory* factory = isolate->factory();
1372 Heap* heap = CcTest::heap();
1373 PagedSpace* old_space = heap->old_space();
1374
1375 // The plan: create |obj| by |map| in old space, create |obj_value| in
1376 // new space and ensure that write barrier is triggered when |obj_value| is
1377 // written to property |tagged_descriptor| of |obj|.
1378 // Then migrate object to |new_map| and set proper value for property
1379 // |double_descriptor|. Call GC and ensure that it did not crash during
1380 // store buffer entries updating.
1381
1382 Handle<JSObject> obj;
1383 Handle<HeapObject> obj_value;
1384 {
1385 AlwaysAllocateScope always_allocate(isolate);
1386 obj = factory->NewJSObjectFromMap(map, TENURED);
1387 CHECK(old_space->Contains(*obj));
1388
1389 obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS);
1390 }
1391
1392 CHECK(heap->InNewSpace(*obj_value));
1393
1394 {
1395 FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
1396 const int n = 153;
1397 for (int i = 0; i < n; i++) {
1398 obj->FastPropertyAtPut(index, *obj_value);
1399 }
1400 }
1401
1402 // Migrate |obj| to |new_map| which should shift fields and put the
1403 // |boom_value| to the slot that was earlier recorded by write barrier.
1404 JSObject::MigrateToMap(obj, new_map);
1405
1406 Address fake_object = reinterpret_cast<Address>(*obj_value) + kPointerSize;
1407 double boom_value = bit_cast<double>(fake_object);
1408
1409 FieldIndex double_field_index =
1410 FieldIndex::ForDescriptor(*new_map, double_descriptor);
1411 CHECK(obj->IsUnboxedDoubleField(double_field_index));
1412 obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
1413
1414 // Trigger GC to evacuate all candidates.
1415 CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
1416
1417 if (check_tagged_value) {
1418 FieldIndex tagged_field_index =
1419 FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
1420 CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
1421 }
1422 CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
1423}
1424
1425
1426static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
1427 int tagged_descriptor,
1428 int double_descriptor,
1429 bool check_tagged_value = true) {
1430 if (FLAG_never_compact || !FLAG_incremental_marking) return;
1431 FLAG_manual_evacuation_candidates_selection = true;
1432 Isolate* isolate = CcTest::i_isolate();
1433 Factory* factory = isolate->factory();
1434 Heap* heap = CcTest::heap();
1435 PagedSpace* old_space = heap->old_space();
1436
1437 // The plan: create |obj| by |map| in old space, create |obj_value| in
1438 // old space and ensure it end up in evacuation candidate page. Start
1439 // incremental marking and ensure that incremental write barrier is triggered
1440 // when |obj_value| is written to property |tagged_descriptor| of |obj|.
1441 // Then migrate object to |new_map| and set proper value for property
1442 // |double_descriptor|. Call GC and ensure that it did not crash during
1443 // slots buffer entries updating.
1444
1445 Handle<JSObject> obj;
1446 Handle<HeapObject> obj_value;
1447 Page* ec_page;
1448 {
1449 AlwaysAllocateScope always_allocate(isolate);
1450 obj = factory->NewJSObjectFromMap(map, TENURED);
1451 CHECK(old_space->Contains(*obj));
1452
1453 // Make sure |obj_value| is placed on an old-space evacuation candidate.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001454 heap::SimulateFullSpace(old_space);
Ben Murdochda12d292016-06-02 14:46:10 +01001455 obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001456 ec_page = Page::FromAddress(obj_value->address());
1457 CHECK_NE(ec_page, Page::FromAddress(obj->address()));
1458 }
1459
1460 // Heap is ready, force |ec_page| to become an evacuation candidate and
1461 // simulate incremental marking.
1462 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001463 heap::SimulateIncrementalMarking(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001464
1465 // Check that everything is ready for triggering incremental write barrier
1466 // (i.e. that both |obj| and |obj_value| are black and the marking phase is
1467 // still active and |obj_value|'s page is indeed an evacuation candidate).
1468 IncrementalMarking* marking = heap->incremental_marking();
1469 CHECK(marking->IsMarking());
1470 CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
1471 CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value)));
1472 CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1473
Ben Murdochda12d292016-06-02 14:46:10 +01001474 // Trigger incremental write barrier, which should add a slot to remembered
1475 // set.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001476 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001477 FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
Ben Murdochda12d292016-06-02 14:46:10 +01001478 obj->FastPropertyAtPut(index, *obj_value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 }
1480
1481 // Migrate |obj| to |new_map| which should shift fields and put the
1482 // |boom_value| to the slot that was earlier recorded by incremental write
1483 // barrier.
1484 JSObject::MigrateToMap(obj, new_map);
1485
1486 double boom_value = bit_cast<double>(UINT64_C(0xbaad0176a37c28e1));
1487
1488 FieldIndex double_field_index =
1489 FieldIndex::ForDescriptor(*new_map, double_descriptor);
1490 CHECK(obj->IsUnboxedDoubleField(double_field_index));
1491 obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
1492
1493 // Trigger GC to evacuate all candidates.
1494 CcTest::heap()->CollectGarbage(OLD_SPACE, "boom");
1495
1496 // Ensure that the values are still there and correct.
1497 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1498
1499 if (check_tagged_value) {
1500 FieldIndex tagged_field_index =
1501 FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
1502 CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
1503 }
1504 CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
1505}
1506
Ben Murdochc5610432016-08-08 18:44:38 +01001507enum OldToWriteBarrierKind {
1508 OLD_TO_OLD_WRITE_BARRIER,
1509 OLD_TO_NEW_WRITE_BARRIER
1510};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511static void TestWriteBarrierObjectShiftFieldsRight(
Ben Murdochc5610432016-08-08 18:44:38 +01001512 OldToWriteBarrierKind write_barrier_kind) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 CcTest::InitializeVM();
1514 Isolate* isolate = CcTest::i_isolate();
1515 v8::HandleScope scope(CcTest::isolate());
1516
Ben Murdoch097c5b22016-05-18 11:27:45 +01001517 Handle<FieldType> any_type = FieldType::Any(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518
1519 CompileRun("function func() { return 1; }");
1520
1521 Handle<JSObject> func = GetObject("func");
1522
1523 Handle<Map> map = Map::Create(isolate, 10);
1524 map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE,
1525 INSERT_TRANSITION).ToHandleChecked();
1526 map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
1527 Representation::Double(),
1528 INSERT_TRANSITION).ToHandleChecked();
1529 map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE,
1530 Representation::Tagged(),
1531 INSERT_TRANSITION).ToHandleChecked();
1532
1533 // Shift fields right by turning constant property to a field.
1534 Handle<Map> new_map = Map::ReconfigureProperty(
1535 map, 0, kData, NONE, Representation::Tagged(), any_type, FORCE_FIELD);
1536
1537 if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) {
1538 TestWriteBarrier(map, new_map, 2, 1);
1539 } else {
1540 CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind);
1541 TestIncrementalWriteBarrier(map, new_map, 2, 1);
1542 }
1543}
1544
Ben Murdoch097c5b22016-05-18 11:27:45 +01001545TEST(WriteBarrierObjectShiftFieldsRight) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001546 TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER);
1547}
1548
1549
1550TEST(IncrementalWriteBarrierObjectShiftFieldsRight) {
1551 TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER);
1552}
1553
1554
1555// TODO(ishell): add respective tests for property kind reconfiguring from
1556// accessor field to double, once accessor fields are supported by
1557// Map::ReconfigureProperty().
1558
1559
1560// TODO(ishell): add respective tests for fast property removal case once
1561// Map::ReconfigureProperty() supports that.
1562
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001563#endif