blob: 96fb9650e7e27e8bee05fefe7a0d252d81274c8a [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include <limits>
6
7#include "src/compiler/access-builder.h"
8#include "src/compiler/control-builders.h"
9#include "src/compiler/generic-node-inl.h"
10#include "src/compiler/graph-visualizer.h"
11#include "src/compiler/node-properties-inl.h"
12#include "src/compiler/pipeline.h"
13#include "src/compiler/representation-change.h"
14#include "src/compiler/simplified-lowering.h"
15#include "src/compiler/typer.h"
16#include "src/compiler/verifier.h"
17#include "src/execution.h"
18#include "src/parser.h"
19#include "src/rewriter.h"
20#include "src/scopes.h"
21#include "test/cctest/cctest.h"
22#include "test/cctest/compiler/codegen-tester.h"
23#include "test/cctest/compiler/graph-builder-tester.h"
24#include "test/cctest/compiler/value-helper.h"
25
26using namespace v8::internal;
27using namespace v8::internal::compiler;
28
29template <typename ReturnType>
30class SimplifiedLoweringTester : public GraphBuilderTester<ReturnType> {
31 public:
32 SimplifiedLoweringTester(MachineType p0 = kMachNone,
33 MachineType p1 = kMachNone,
34 MachineType p2 = kMachNone,
35 MachineType p3 = kMachNone,
36 MachineType p4 = kMachNone)
37 : GraphBuilderTester<ReturnType>(p0, p1, p2, p3, p4),
38 typer(this->zone()),
39 javascript(this->zone()),
40 jsgraph(this->graph(), this->common(), &javascript, &typer,
41 this->machine()),
42 lowering(&jsgraph) {}
43
44 Typer typer;
45 JSOperatorBuilder javascript;
46 JSGraph jsgraph;
47 SimplifiedLowering lowering;
48
49 void LowerAllNodes() {
50 this->End();
51 lowering.LowerAllNodes();
52 }
53
54 Factory* factory() { return this->isolate()->factory(); }
55 Heap* heap() { return this->isolate()->heap(); }
56};
57
58
59#ifndef V8_TARGET_ARCH_ARM64
60// TODO(titzer): these result in a stub call that doesn't work on ARM64.
61// TODO(titzer): factor these tests out to test-run-simplifiedops.cc.
62// TODO(titzer): test tagged representation for input to NumberToInt32.
63TEST(RunNumberToInt32_float64) {
64 // TODO(titzer): explicit load/stores here are only because of representations
65 double input;
66 int32_t result;
67 SimplifiedLoweringTester<Object*> t;
68 FieldAccess load = {kUntaggedBase, 0, Handle<Name>(), Type::Number(),
69 kMachFloat64};
70 Node* loaded = t.LoadField(load, t.PointerConstant(&input));
71 Node* convert = t.NumberToInt32(loaded);
72 FieldAccess store = {kUntaggedBase, 0, Handle<Name>(), Type::Signed32(),
73 kMachInt32};
74 t.StoreField(store, t.PointerConstant(&result), convert);
75 t.Return(t.jsgraph.TrueConstant());
76 t.LowerAllNodes();
77 t.GenerateCode();
78
79 if (Pipeline::SupportedTarget()) {
80 FOR_FLOAT64_INPUTS(i) {
81 input = *i;
82 int32_t expected = DoubleToInt32(*i);
83 t.Call();
84 CHECK_EQ(expected, result);
85 }
86 }
87}
88
89
90// TODO(titzer): test tagged representation for input to NumberToUint32.
91TEST(RunNumberToUint32_float64) {
92 // TODO(titzer): explicit load/stores here are only because of representations
93 double input;
94 uint32_t result;
95 SimplifiedLoweringTester<Object*> t;
96 FieldAccess load = {kUntaggedBase, 0, Handle<Name>(), Type::Number(),
97 kMachFloat64};
98 Node* loaded = t.LoadField(load, t.PointerConstant(&input));
99 Node* convert = t.NumberToUint32(loaded);
100 FieldAccess store = {kUntaggedBase, 0, Handle<Name>(), Type::Unsigned32(),
101 kMachUint32};
102 t.StoreField(store, t.PointerConstant(&result), convert);
103 t.Return(t.jsgraph.TrueConstant());
104 t.LowerAllNodes();
105 t.GenerateCode();
106
107 if (Pipeline::SupportedTarget()) {
108 FOR_FLOAT64_INPUTS(i) {
109 input = *i;
110 uint32_t expected = DoubleToUint32(*i);
111 t.Call();
112 CHECK_EQ(static_cast<int32_t>(expected), static_cast<int32_t>(result));
113 }
114 }
115}
116#endif
117
118
119// Create a simple JSObject with a unique map.
120static Handle<JSObject> TestObject() {
121 static int index = 0;
122 char buffer[50];
123 v8::base::OS::SNPrintF(buffer, 50, "({'a_%d':1})", index++);
124 return Handle<JSObject>::cast(v8::Utils::OpenHandle(*CompileRun(buffer)));
125}
126
127
128TEST(RunLoadMap) {
129 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
130 FieldAccess access = AccessBuilder::ForMap();
131 Node* load = t.LoadField(access, t.Parameter(0));
132 t.Return(load);
133
134 t.LowerAllNodes();
135 t.GenerateCode();
136
137 if (Pipeline::SupportedTarget()) {
138 Handle<JSObject> src = TestObject();
139 Handle<Map> src_map(src->map());
140 Object* result = t.Call(*src); // TODO(titzer): raw pointers in call
141 CHECK_EQ(*src_map, result);
142 }
143}
144
145
146TEST(RunStoreMap) {
147 SimplifiedLoweringTester<int32_t> t(kMachAnyTagged, kMachAnyTagged);
148 FieldAccess access = AccessBuilder::ForMap();
149 t.StoreField(access, t.Parameter(1), t.Parameter(0));
150 t.Return(t.jsgraph.TrueConstant());
151
152 t.LowerAllNodes();
153 t.GenerateCode();
154
155 if (Pipeline::SupportedTarget()) {
156 Handle<JSObject> src = TestObject();
157 Handle<Map> src_map(src->map());
158 Handle<JSObject> dst = TestObject();
159 CHECK(src->map() != dst->map());
160 t.Call(*src_map, *dst); // TODO(titzer): raw pointers in call
161 CHECK(*src_map == dst->map());
162 }
163}
164
165
166TEST(RunLoadProperties) {
167 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
168 FieldAccess access = AccessBuilder::ForJSObjectProperties();
169 Node* load = t.LoadField(access, t.Parameter(0));
170 t.Return(load);
171
172 t.LowerAllNodes();
173 t.GenerateCode();
174
175 if (Pipeline::SupportedTarget()) {
176 Handle<JSObject> src = TestObject();
177 Handle<FixedArray> src_props(src->properties());
178 Object* result = t.Call(*src); // TODO(titzer): raw pointers in call
179 CHECK_EQ(*src_props, result);
180 }
181}
182
183
184TEST(RunLoadStoreMap) {
185 SimplifiedLoweringTester<Object*> t(kMachAnyTagged, kMachAnyTagged);
186 FieldAccess access = AccessBuilder::ForMap();
187 Node* load = t.LoadField(access, t.Parameter(0));
188 t.StoreField(access, t.Parameter(1), load);
189 t.Return(load);
190
191 t.LowerAllNodes();
192 t.GenerateCode();
193
194 if (Pipeline::SupportedTarget()) {
195 Handle<JSObject> src = TestObject();
196 Handle<Map> src_map(src->map());
197 Handle<JSObject> dst = TestObject();
198 CHECK(src->map() != dst->map());
199 Object* result = t.Call(*src, *dst); // TODO(titzer): raw pointers in call
200 CHECK(result->IsMap());
201 CHECK_EQ(*src_map, result);
202 CHECK(*src_map == dst->map());
203 }
204}
205
206
207TEST(RunLoadStoreFixedArrayIndex) {
208 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
209 ElementAccess access = AccessBuilder::ForFixedArrayElement();
210 Node* load = t.LoadElement(access, t.Parameter(0), t.Int32Constant(0),
211 t.Int32Constant(2));
212 t.StoreElement(access, t.Parameter(0), t.Int32Constant(1), t.Int32Constant(2),
213 load);
214 t.Return(load);
215
216 t.LowerAllNodes();
217 t.GenerateCode();
218
219 if (Pipeline::SupportedTarget()) {
220 Handle<FixedArray> array = t.factory()->NewFixedArray(2);
221 Handle<JSObject> src = TestObject();
222 Handle<JSObject> dst = TestObject();
223 array->set(0, *src);
224 array->set(1, *dst);
225 Object* result = t.Call(*array);
226 CHECK_EQ(*src, result);
227 CHECK_EQ(*src, array->get(0));
228 CHECK_EQ(*src, array->get(1));
229 }
230}
231
232
233TEST(RunLoadStoreArrayBuffer) {
234 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
235 const int index = 12;
236 const int array_length = 2 * index;
237 ElementAccess buffer_access =
238 AccessBuilder::ForBackingStoreElement(kMachInt8);
239 Node* backing_store = t.LoadField(
240 AccessBuilder::ForJSArrayBufferBackingStore(), t.Parameter(0));
241 Node* load =
242 t.LoadElement(buffer_access, backing_store, t.Int32Constant(index),
243 t.Int32Constant(array_length));
244 t.StoreElement(buffer_access, backing_store, t.Int32Constant(index + 1),
245 t.Int32Constant(array_length), load);
246 t.Return(t.jsgraph.TrueConstant());
247
248 t.LowerAllNodes();
249 t.GenerateCode();
250
251 if (Pipeline::SupportedTarget()) {
252 Handle<JSArrayBuffer> array = t.factory()->NewJSArrayBuffer();
253 Runtime::SetupArrayBufferAllocatingData(t.isolate(), array, array_length);
254 uint8_t* data = reinterpret_cast<uint8_t*>(array->backing_store());
255 for (int i = 0; i < array_length; i++) {
256 data[i] = i;
257 }
258
259 // TODO(titzer): raw pointers in call
260 Object* result = t.Call(*array);
261 CHECK_EQ(t.isolate()->heap()->true_value(), result);
262 for (int i = 0; i < array_length; i++) {
263 uint8_t expected = i;
264 if (i == (index + 1)) expected = index;
265 CHECK_EQ(data[i], expected);
266 }
267 }
268}
269
270
271TEST(RunLoadFieldFromUntaggedBase) {
272 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3)};
273
274 for (size_t i = 0; i < arraysize(smis); i++) {
275 int offset = static_cast<int>(i * sizeof(Smi*));
276 FieldAccess access = {kUntaggedBase, offset, Handle<Name>(),
277 Type::Integral32(), kMachAnyTagged};
278
279 SimplifiedLoweringTester<Object*> t;
280 Node* load = t.LoadField(access, t.PointerConstant(smis));
281 t.Return(load);
282 t.LowerAllNodes();
283
284 if (!Pipeline::SupportedTarget()) continue;
285
286 for (int j = -5; j <= 5; j++) {
287 Smi* expected = Smi::FromInt(j);
288 smis[i] = expected;
289 CHECK_EQ(expected, t.Call());
290 }
291 }
292}
293
294
295TEST(RunStoreFieldToUntaggedBase) {
296 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3)};
297
298 for (size_t i = 0; i < arraysize(smis); i++) {
299 int offset = static_cast<int>(i * sizeof(Smi*));
300 FieldAccess access = {kUntaggedBase, offset, Handle<Name>(),
301 Type::Integral32(), kMachAnyTagged};
302
303 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
304 Node* p0 = t.Parameter(0);
305 t.StoreField(access, t.PointerConstant(smis), p0);
306 t.Return(p0);
307 t.LowerAllNodes();
308
309 if (!Pipeline::SupportedTarget()) continue;
310
311 for (int j = -5; j <= 5; j++) {
312 Smi* expected = Smi::FromInt(j);
313 smis[i] = Smi::FromInt(-100);
314 CHECK_EQ(expected, t.Call(expected));
315 CHECK_EQ(expected, smis[i]);
316 }
317 }
318}
319
320
321TEST(RunLoadElementFromUntaggedBase) {
322 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3),
323 Smi::FromInt(4), Smi::FromInt(5)};
324
325 for (size_t i = 0; i < arraysize(smis); i++) { // for header sizes
326 for (size_t j = 0; (i + j) < arraysize(smis); j++) { // for element index
327 int offset = static_cast<int>(i * sizeof(Smi*));
328 ElementAccess access = {kUntaggedBase, offset, Type::Integral32(),
329 kMachAnyTagged};
330
331 SimplifiedLoweringTester<Object*> t;
332 Node* load = t.LoadElement(
333 access, t.PointerConstant(smis), t.Int32Constant(static_cast<int>(j)),
334 t.Int32Constant(static_cast<int>(arraysize(smis))));
335 t.Return(load);
336 t.LowerAllNodes();
337
338 if (!Pipeline::SupportedTarget()) continue;
339
340 for (int k = -5; k <= 5; k++) {
341 Smi* expected = Smi::FromInt(k);
342 smis[i + j] = expected;
343 CHECK_EQ(expected, t.Call());
344 }
345 }
346 }
347}
348
349
350TEST(RunStoreElementFromUntaggedBase) {
351 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3),
352 Smi::FromInt(4), Smi::FromInt(5)};
353
354 for (size_t i = 0; i < arraysize(smis); i++) { // for header sizes
355 for (size_t j = 0; (i + j) < arraysize(smis); j++) { // for element index
356 int offset = static_cast<int>(i * sizeof(Smi*));
357 ElementAccess access = {kUntaggedBase, offset, Type::Integral32(),
358 kMachAnyTagged};
359
360 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
361 Node* p0 = t.Parameter(0);
362 t.StoreElement(access, t.PointerConstant(smis),
363 t.Int32Constant(static_cast<int>(j)),
364 t.Int32Constant(static_cast<int>(arraysize(smis))), p0);
365 t.Return(p0);
366 t.LowerAllNodes();
367
368 if (!Pipeline::SupportedTarget()) continue;
369
370 for (int k = -5; k <= 5; k++) {
371 Smi* expected = Smi::FromInt(k);
372 smis[i + j] = Smi::FromInt(-100);
373 CHECK_EQ(expected, t.Call(expected));
374 CHECK_EQ(expected, smis[i + j]);
375 }
376
377 // TODO(titzer): assert the contents of the array.
378 }
379 }
380}
381
382
383// A helper class for accessing fields and elements of various types, on both
384// tagged and untagged base pointers. Contains both tagged and untagged buffers
385// for testing direct memory access from generated code.
386template <typename E>
387class AccessTester : public HandleAndZoneScope {
388 public:
389 bool tagged;
390 MachineType rep;
391 E* original_elements;
392 size_t num_elements;
393 E* untagged_array;
394 Handle<ByteArray> tagged_array; // TODO(titzer): use FixedArray for tagged.
395
396 AccessTester(bool t, MachineType r, E* orig, size_t num)
397 : tagged(t),
398 rep(r),
399 original_elements(orig),
400 num_elements(num),
401 untagged_array(static_cast<E*>(malloc(ByteSize()))),
402 tagged_array(main_isolate()->factory()->NewByteArray(
403 static_cast<int>(ByteSize()))) {
404 Reinitialize();
405 }
406
407 ~AccessTester() { free(untagged_array); }
408
409 size_t ByteSize() { return num_elements * sizeof(E); }
410
411 // Nuke both {untagged_array} and {tagged_array} with {original_elements}.
412 void Reinitialize() {
413 memcpy(untagged_array, original_elements, ByteSize());
414 CHECK_EQ(static_cast<int>(ByteSize()), tagged_array->length());
415 E* raw = reinterpret_cast<E*>(tagged_array->GetDataStartAddress());
416 memcpy(raw, original_elements, ByteSize());
417 }
418
419 // Create and run code that copies the element in either {untagged_array}
420 // or {tagged_array} at index {from_index} to index {to_index}.
421 void RunCopyElement(int from_index, int to_index) {
422 // TODO(titzer): test element and field accesses where the base is not
423 // a constant in the code.
424 BoundsCheck(from_index);
425 BoundsCheck(to_index);
426 ElementAccess access = GetElementAccess();
427
428 SimplifiedLoweringTester<Object*> t;
429 Node* ptr = GetBaseNode(&t);
430 Node* load = t.LoadElement(access, ptr, t.Int32Constant(from_index),
431 t.Int32Constant(static_cast<int>(num_elements)));
432 t.StoreElement(access, ptr, t.Int32Constant(to_index),
433 t.Int32Constant(static_cast<int>(num_elements)), load);
434 t.Return(t.jsgraph.TrueConstant());
435 t.LowerAllNodes();
436 t.GenerateCode();
437
438 if (Pipeline::SupportedTarget()) {
439 Object* result = t.Call();
440 CHECK_EQ(t.isolate()->heap()->true_value(), result);
441 }
442 }
443
444 // Create and run code that copies the field in either {untagged_array}
445 // or {tagged_array} at index {from_index} to index {to_index}.
446 void RunCopyField(int from_index, int to_index) {
447 BoundsCheck(from_index);
448 BoundsCheck(to_index);
449 FieldAccess from_access = GetFieldAccess(from_index);
450 FieldAccess to_access = GetFieldAccess(to_index);
451
452 SimplifiedLoweringTester<Object*> t;
453 Node* ptr = GetBaseNode(&t);
454 Node* load = t.LoadField(from_access, ptr);
455 t.StoreField(to_access, ptr, load);
456 t.Return(t.jsgraph.TrueConstant());
457 t.LowerAllNodes();
458 t.GenerateCode();
459
460 if (Pipeline::SupportedTarget()) {
461 Object* result = t.Call();
462 CHECK_EQ(t.isolate()->heap()->true_value(), result);
463 }
464 }
465
466 // Create and run code that copies the elements from {this} to {that}.
467 void RunCopyElements(AccessTester<E>* that) {
468// TODO(titzer): Rewrite this test without StructuredGraphBuilder support.
469#if 0
470 SimplifiedLoweringTester<Object*> t;
471
472 Node* one = t.Int32Constant(1);
473 Node* index = t.Int32Constant(0);
474 Node* limit = t.Int32Constant(static_cast<int>(num_elements));
475 t.environment()->Push(index);
476 Node* src = this->GetBaseNode(&t);
477 Node* dst = that->GetBaseNode(&t);
478 {
479 LoopBuilder loop(&t);
480 loop.BeginLoop();
481 // Loop exit condition
482 index = t.environment()->Top();
483 Node* condition = t.Int32LessThan(index, limit);
484 loop.BreakUnless(condition);
485 // dst[index] = src[index]
486 index = t.environment()->Pop();
487 Node* load = t.LoadElement(this->GetElementAccess(), src, index);
488 t.StoreElement(that->GetElementAccess(), dst, index, load);
489 // index++
490 index = t.Int32Add(index, one);
491 t.environment()->Push(index);
492 // continue
493 loop.EndBody();
494 loop.EndLoop();
495 }
496 index = t.environment()->Pop();
497 t.Return(t.jsgraph.TrueConstant());
498 t.LowerAllNodes();
499 t.GenerateCode();
500
501 if (Pipeline::SupportedTarget()) {
502 Object* result = t.Call();
503 CHECK_EQ(t.isolate()->heap()->true_value(), result);
504 }
505#endif
506 }
507
508 E GetElement(int index) {
509 BoundsCheck(index);
510 if (tagged) {
511 E* raw = reinterpret_cast<E*>(tagged_array->GetDataStartAddress());
512 return raw[index];
513 } else {
514 return untagged_array[index];
515 }
516 }
517
518 private:
519 ElementAccess GetElementAccess() {
520 ElementAccess access = {tagged ? kTaggedBase : kUntaggedBase,
521 tagged ? FixedArrayBase::kHeaderSize : 0,
522 Type::Any(), rep};
523 return access;
524 }
525
526 FieldAccess GetFieldAccess(int field) {
527 int offset = field * sizeof(E);
528 FieldAccess access = {tagged ? kTaggedBase : kUntaggedBase,
529 offset + (tagged ? FixedArrayBase::kHeaderSize : 0),
530 Handle<Name>(), Type::Any(), rep};
531 return access;
532 }
533
534 template <typename T>
535 Node* GetBaseNode(SimplifiedLoweringTester<T>* t) {
536 return tagged ? t->HeapConstant(tagged_array)
537 : t->PointerConstant(untagged_array);
538 }
539
540 void BoundsCheck(int index) {
541 CHECK_GE(index, 0);
542 CHECK_LT(index, static_cast<int>(num_elements));
543 CHECK_EQ(static_cast<int>(ByteSize()), tagged_array->length());
544 }
545};
546
547
548template <typename E>
549static void RunAccessTest(MachineType rep, E* original_elements, size_t num) {
550 int num_elements = static_cast<int>(num);
551
552 for (int taggedness = 0; taggedness < 2; taggedness++) {
553 AccessTester<E> a(taggedness == 1, rep, original_elements, num);
554 for (int field = 0; field < 2; field++) {
555 for (int i = 0; i < num_elements - 1; i++) {
556 a.Reinitialize();
557 if (field == 0) {
558 a.RunCopyField(i, i + 1); // Test field read/write.
559 } else {
560 a.RunCopyElement(i, i + 1); // Test element read/write.
561 }
562 if (Pipeline::SupportedTarget()) { // verify.
563 for (int j = 0; j < num_elements; j++) {
564 E expect =
565 j == (i + 1) ? original_elements[i] : original_elements[j];
566 CHECK_EQ(expect, a.GetElement(j));
567 }
568 }
569 }
570 }
571 }
572 // Test array copy.
573 for (int tf = 0; tf < 2; tf++) {
574 for (int tt = 0; tt < 2; tt++) {
575 AccessTester<E> a(tf == 1, rep, original_elements, num);
576 AccessTester<E> b(tt == 1, rep, original_elements, num);
577 a.RunCopyElements(&b);
578 if (Pipeline::SupportedTarget()) { // verify.
579 for (int i = 0; i < num_elements; i++) {
580 CHECK_EQ(a.GetElement(i), b.GetElement(i));
581 }
582 }
583 }
584 }
585}
586
587
588TEST(RunAccessTests_uint8) {
589 uint8_t data[] = {0x07, 0x16, 0x25, 0x34, 0x43, 0x99,
590 0xab, 0x78, 0x89, 0x19, 0x2b, 0x38};
591 RunAccessTest<uint8_t>(kMachInt8, data, arraysize(data));
592}
593
594
595TEST(RunAccessTests_uint16) {
596 uint16_t data[] = {0x071a, 0x162b, 0x253c, 0x344d, 0x435e, 0x7777};
597 RunAccessTest<uint16_t>(kMachInt16, data, arraysize(data));
598}
599
600
601TEST(RunAccessTests_int32) {
602 int32_t data[] = {-211, 211, 628347, 2000000000, -2000000000, -1, -100000034};
603 RunAccessTest<int32_t>(kMachInt32, data, arraysize(data));
604}
605
606
607#define V8_2PART_INT64(a, b) (((static_cast<int64_t>(a) << 32) + 0x##b##u))
608
609
610TEST(RunAccessTests_int64) {
611 if (kPointerSize != 8) return;
612 int64_t data[] = {V8_2PART_INT64(0x10111213, 14151617),
613 V8_2PART_INT64(0x20212223, 24252627),
614 V8_2PART_INT64(0x30313233, 34353637),
615 V8_2PART_INT64(0xa0a1a2a3, a4a5a6a7),
616 V8_2PART_INT64(0xf0f1f2f3, f4f5f6f7)};
617 RunAccessTest<int64_t>(kMachInt64, data, arraysize(data));
618}
619
620
621TEST(RunAccessTests_float64) {
622 double data[] = {1.25, -1.25, 2.75, 11.0, 11100.8};
623 RunAccessTest<double>(kMachFloat64, data, arraysize(data));
624}
625
626
627TEST(RunAccessTests_Smi) {
628 Smi* data[] = {Smi::FromInt(-1), Smi::FromInt(-9),
629 Smi::FromInt(0), Smi::FromInt(666),
630 Smi::FromInt(77777), Smi::FromInt(Smi::kMaxValue)};
631 RunAccessTest<Smi*>(kMachAnyTagged, data, arraysize(data));
632}
633
634
635// Fills in most of the nodes of the graph in order to make tests shorter.
636class TestingGraph : public HandleAndZoneScope, public GraphAndBuilders {
637 public:
638 Typer typer;
639 JSOperatorBuilder javascript;
640 JSGraph jsgraph;
641 Node* p0;
642 Node* p1;
643 Node* p2;
644 Node* start;
645 Node* end;
646 Node* ret;
647
648 explicit TestingGraph(Type* p0_type, Type* p1_type = Type::None(),
649 Type* p2_type = Type::None())
650 : GraphAndBuilders(main_zone()),
651 typer(main_zone()),
652 javascript(main_zone()),
653 jsgraph(graph(), common(), &javascript, &typer, machine()) {
654 start = graph()->NewNode(common()->Start(2));
655 graph()->SetStart(start);
656 ret =
657 graph()->NewNode(common()->Return(), jsgraph.Constant(0), start, start);
658 end = graph()->NewNode(common()->End(), ret);
659 graph()->SetEnd(end);
660 p0 = graph()->NewNode(common()->Parameter(0), start);
661 p1 = graph()->NewNode(common()->Parameter(1), start);
662 p2 = graph()->NewNode(common()->Parameter(2), start);
663 NodeProperties::SetBounds(p0, Bounds(p0_type));
664 NodeProperties::SetBounds(p1, Bounds(p1_type));
665 NodeProperties::SetBounds(p2, Bounds(p2_type));
666 }
667
668 void CheckLoweringBinop(IrOpcode::Value expected, const Operator* op) {
669 Node* node = Return(graph()->NewNode(op, p0, p1));
670 Lower();
671 CHECK_EQ(expected, node->opcode());
672 }
673
674 void CheckLoweringTruncatedBinop(IrOpcode::Value expected, const Operator* op,
675 const Operator* trunc) {
676 Node* node = graph()->NewNode(op, p0, p1);
677 Return(graph()->NewNode(trunc, node));
678 Lower();
679 CHECK_EQ(expected, node->opcode());
680 }
681
682 void Lower() {
683 SimplifiedLowering lowering(&jsgraph);
684 lowering.LowerAllNodes();
685 }
686
687 // Inserts the node as the return value of the graph.
688 Node* Return(Node* node) {
689 ret->ReplaceInput(0, node);
690 return node;
691 }
692
693 // Inserts the node as the effect input to the return of the graph.
694 void Effect(Node* node) { ret->ReplaceInput(1, node); }
695
696 Node* ExampleWithOutput(MachineType type) {
697 // TODO(titzer): use parameters with guaranteed representations.
698 if (type & kTypeInt32) {
699 return graph()->NewNode(machine()->Int32Add(), jsgraph.Int32Constant(1),
700 jsgraph.Int32Constant(1));
701 } else if (type & kTypeUint32) {
702 return graph()->NewNode(machine()->Word32Shr(), jsgraph.Int32Constant(1),
703 jsgraph.Int32Constant(1));
704 } else if (type & kRepFloat64) {
705 return graph()->NewNode(machine()->Float64Add(),
706 jsgraph.Float64Constant(1),
707 jsgraph.Float64Constant(1));
708 } else if (type & kRepBit) {
709 return graph()->NewNode(machine()->Word32Equal(),
710 jsgraph.Int32Constant(1),
711 jsgraph.Int32Constant(1));
712 } else if (type & kRepWord64) {
713 return graph()->NewNode(machine()->Int64Add(), Int64Constant(1),
714 Int64Constant(1));
715 } else {
716 CHECK(type & kRepTagged);
717 return p0;
718 }
719 }
720
721 Node* Use(Node* node, MachineType type) {
722 if (type & kTypeInt32) {
723 return graph()->NewNode(machine()->Int32LessThan(), node,
724 jsgraph.Int32Constant(1));
725 } else if (type & kTypeUint32) {
726 return graph()->NewNode(machine()->Uint32LessThan(), node,
727 jsgraph.Int32Constant(1));
728 } else if (type & kRepFloat64) {
729 return graph()->NewNode(machine()->Float64Add(), node,
730 jsgraph.Float64Constant(1));
731 } else if (type & kRepWord64) {
732 return graph()->NewNode(machine()->Int64LessThan(), node,
733 Int64Constant(1));
734 } else {
735 return graph()->NewNode(simplified()->ReferenceEqual(Type::Any()), node,
736 jsgraph.TrueConstant());
737 }
738 }
739
740 Node* Branch(Node* cond) {
741 Node* br = graph()->NewNode(common()->Branch(), cond, start);
742 Node* tb = graph()->NewNode(common()->IfTrue(), br);
743 Node* fb = graph()->NewNode(common()->IfFalse(), br);
744 Node* m = graph()->NewNode(common()->Merge(2), tb, fb);
745 NodeProperties::ReplaceControlInput(ret, m);
746 return br;
747 }
748
749 Node* Int64Constant(int64_t v) {
750 return graph()->NewNode(common()->Int64Constant(v));
751 }
752
753 SimplifiedOperatorBuilder* simplified() { return &main_simplified_; }
754 MachineOperatorBuilder* machine() { return &main_machine_; }
755 CommonOperatorBuilder* common() { return &main_common_; }
756 Graph* graph() { return main_graph_; }
757};
758
759
760TEST(LowerBooleanNot_bit_bit) {
761 // BooleanNot(x: kRepBit) used as kRepBit
762 TestingGraph t(Type::Boolean());
763 Node* b = t.ExampleWithOutput(kRepBit);
764 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
765 Node* use = t.Branch(inv);
766 t.Lower();
767 Node* cmp = use->InputAt(0);
768 CHECK_EQ(t.machine()->WordEqual()->opcode(), cmp->opcode());
769 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
770 Node* f = t.jsgraph.Int32Constant(0);
771 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
772}
773
774
775TEST(LowerBooleanNot_bit_tagged) {
776 // BooleanNot(x: kRepBit) used as kRepTagged
777 TestingGraph t(Type::Boolean());
778 Node* b = t.ExampleWithOutput(kRepBit);
779 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
780 Node* use = t.Use(inv, kRepTagged);
781 t.Return(use);
782 t.Lower();
783 CHECK_EQ(IrOpcode::kChangeBitToBool, use->InputAt(0)->opcode());
784 Node* cmp = use->InputAt(0)->InputAt(0);
785 CHECK_EQ(t.machine()->WordEqual()->opcode(), cmp->opcode());
786 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
787 Node* f = t.jsgraph.Int32Constant(0);
788 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
789}
790
791
792TEST(LowerBooleanNot_tagged_bit) {
793 // BooleanNot(x: kRepTagged) used as kRepBit
794 TestingGraph t(Type::Boolean());
795 Node* b = t.p0;
796 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
797 Node* use = t.Branch(inv);
798 t.Lower();
799 Node* cmp = use->InputAt(0);
800 CHECK_EQ(t.machine()->WordEqual()->opcode(), cmp->opcode());
801 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
802 Node* f = t.jsgraph.FalseConstant();
803 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
804}
805
806
807TEST(LowerBooleanNot_tagged_tagged) {
808 // BooleanNot(x: kRepTagged) used as kRepTagged
809 TestingGraph t(Type::Boolean());
810 Node* b = t.p0;
811 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
812 Node* use = t.Use(inv, kRepTagged);
813 t.Return(use);
814 t.Lower();
815 CHECK_EQ(IrOpcode::kChangeBitToBool, use->InputAt(0)->opcode());
816 Node* cmp = use->InputAt(0)->InputAt(0);
817 CHECK_EQ(t.machine()->WordEqual()->opcode(), cmp->opcode());
818 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
819 Node* f = t.jsgraph.FalseConstant();
820 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
821}
822
823
824TEST(LowerBooleanToNumber_bit_int32) {
825 // BooleanToNumber(x: kRepBit) used as kMachInt32
826 TestingGraph t(Type::Boolean());
827 Node* b = t.ExampleWithOutput(kRepBit);
828 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
829 Node* use = t.Use(cnv, kMachInt32);
830 t.Return(use);
831 t.Lower();
832 CHECK_EQ(b, use->InputAt(0));
833}
834
835
836TEST(LowerBooleanToNumber_tagged_int32) {
837 // BooleanToNumber(x: kRepTagged) used as kMachInt32
838 TestingGraph t(Type::Boolean());
839 Node* b = t.p0;
840 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
841 Node* use = t.Use(cnv, kMachInt32);
842 t.Return(use);
843 t.Lower();
844 CHECK_EQ(t.machine()->WordEqual()->opcode(), cnv->opcode());
845 CHECK(b == cnv->InputAt(0) || b == cnv->InputAt(1));
846 Node* c = t.jsgraph.TrueConstant();
847 CHECK(c == cnv->InputAt(0) || c == cnv->InputAt(1));
848}
849
850
851TEST(LowerBooleanToNumber_bit_tagged) {
852 // BooleanToNumber(x: kRepBit) used as kMachAnyTagged
853 TestingGraph t(Type::Boolean());
854 Node* b = t.ExampleWithOutput(kRepBit);
855 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
856 Node* use = t.Use(cnv, kMachAnyTagged);
857 t.Return(use);
858 t.Lower();
859 CHECK_EQ(b, use->InputAt(0)->InputAt(0));
860 CHECK_EQ(IrOpcode::kChangeInt32ToTagged, use->InputAt(0)->opcode());
861}
862
863
864TEST(LowerBooleanToNumber_tagged_tagged) {
865 // BooleanToNumber(x: kRepTagged) used as kMachAnyTagged
866 TestingGraph t(Type::Boolean());
867 Node* b = t.p0;
868 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
869 Node* use = t.Use(cnv, kMachAnyTagged);
870 t.Return(use);
871 t.Lower();
872 CHECK_EQ(cnv, use->InputAt(0)->InputAt(0));
873 CHECK_EQ(IrOpcode::kChangeInt32ToTagged, use->InputAt(0)->opcode());
874 CHECK_EQ(t.machine()->WordEqual()->opcode(), cnv->opcode());
875 CHECK(b == cnv->InputAt(0) || b == cnv->InputAt(1));
876 Node* c = t.jsgraph.TrueConstant();
877 CHECK(c == cnv->InputAt(0) || c == cnv->InputAt(1));
878}
879
880
881static Type* test_types[] = {Type::Signed32(), Type::Unsigned32(),
882 Type::Number(), Type::Any()};
883
884
885TEST(LowerNumberCmp_to_int32) {
886 TestingGraph t(Type::Signed32(), Type::Signed32());
887
888 t.CheckLoweringBinop(IrOpcode::kWord32Equal, t.simplified()->NumberEqual());
889 t.CheckLoweringBinop(IrOpcode::kInt32LessThan,
890 t.simplified()->NumberLessThan());
891 t.CheckLoweringBinop(IrOpcode::kInt32LessThanOrEqual,
892 t.simplified()->NumberLessThanOrEqual());
893}
894
895
896TEST(LowerNumberCmp_to_uint32) {
897 TestingGraph t(Type::Unsigned32(), Type::Unsigned32());
898
899 t.CheckLoweringBinop(IrOpcode::kWord32Equal, t.simplified()->NumberEqual());
900 t.CheckLoweringBinop(IrOpcode::kUint32LessThan,
901 t.simplified()->NumberLessThan());
902 t.CheckLoweringBinop(IrOpcode::kUint32LessThanOrEqual,
903 t.simplified()->NumberLessThanOrEqual());
904}
905
906
907TEST(LowerNumberCmp_to_float64) {
908 static Type* types[] = {Type::Number(), Type::Any()};
909
910 for (size_t i = 0; i < arraysize(types); i++) {
911 TestingGraph t(types[i], types[i]);
912
913 t.CheckLoweringBinop(IrOpcode::kFloat64Equal,
914 t.simplified()->NumberEqual());
915 t.CheckLoweringBinop(IrOpcode::kFloat64LessThan,
916 t.simplified()->NumberLessThan());
917 t.CheckLoweringBinop(IrOpcode::kFloat64LessThanOrEqual,
918 t.simplified()->NumberLessThanOrEqual());
919 }
920}
921
922
923TEST(LowerNumberAddSub_to_int32) {
924 TestingGraph t(Type::Signed32(), Type::Signed32());
925 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Add,
926 t.simplified()->NumberAdd(),
927 t.simplified()->NumberToInt32());
928 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Sub,
929 t.simplified()->NumberSubtract(),
930 t.simplified()->NumberToInt32());
931}
932
933
934TEST(LowerNumberAddSub_to_uint32) {
935 TestingGraph t(Type::Unsigned32(), Type::Unsigned32());
936 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Add,
937 t.simplified()->NumberAdd(),
938 t.simplified()->NumberToUint32());
939 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Sub,
940 t.simplified()->NumberSubtract(),
941 t.simplified()->NumberToUint32());
942}
943
944
945TEST(LowerNumberAddSub_to_float64) {
946 for (size_t i = 0; i < arraysize(test_types); i++) {
947 TestingGraph t(test_types[i], test_types[i]);
948
949 t.CheckLoweringBinop(IrOpcode::kFloat64Add, t.simplified()->NumberAdd());
950 t.CheckLoweringBinop(IrOpcode::kFloat64Sub,
951 t.simplified()->NumberSubtract());
952 }
953}
954
955
956TEST(LowerNumberDivMod_to_float64) {
957 for (size_t i = 0; i < arraysize(test_types); i++) {
958 TestingGraph t(test_types[i], test_types[i]);
959
960 t.CheckLoweringBinop(IrOpcode::kFloat64Div, t.simplified()->NumberDivide());
961 t.CheckLoweringBinop(IrOpcode::kFloat64Mod,
962 t.simplified()->NumberModulus());
963 }
964}
965
966
967static void CheckChangeOf(IrOpcode::Value change, Node* of, Node* node) {
968 CHECK_EQ(change, node->opcode());
969 CHECK_EQ(of, node->InputAt(0));
970}
971
972
973TEST(LowerNumberToInt32_to_nop) {
974 // NumberToInt32(x: kRepTagged | kTypeInt32) used as kRepTagged
975 TestingGraph t(Type::Signed32());
976 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
977 Node* use = t.Use(trunc, kRepTagged);
978 t.Return(use);
979 t.Lower();
980 CHECK_EQ(t.p0, use->InputAt(0));
981}
982
983
984TEST(LowerNumberToInt32_to_ChangeTaggedToFloat64) {
985 // NumberToInt32(x: kRepTagged | kTypeInt32) used as kRepFloat64
986 TestingGraph t(Type::Signed32());
987 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
988 Node* use = t.Use(trunc, kRepFloat64);
989 t.Return(use);
990 t.Lower();
991 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p0, use->InputAt(0));
992}
993
994
995TEST(LowerNumberToInt32_to_ChangeTaggedToInt32) {
996 // NumberToInt32(x: kRepTagged | kTypeInt32) used as kRepWord32
997 TestingGraph t(Type::Signed32());
998 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
999 Node* use = t.Use(trunc, kTypeInt32);
1000 t.Return(use);
1001 t.Lower();
1002 CheckChangeOf(IrOpcode::kChangeTaggedToInt32, t.p0, use->InputAt(0));
1003}
1004
1005
1006TEST(LowerNumberToInt32_to_TruncateFloat64ToInt32) {
1007 // NumberToInt32(x: kRepFloat64) used as kMachInt32
1008 TestingGraph t(Type::Number());
1009 Node* p0 = t.ExampleWithOutput(kMachFloat64);
1010 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), p0);
1011 Node* use = t.Use(trunc, kMachInt32);
1012 t.Return(use);
1013 t.Lower();
1014 CheckChangeOf(IrOpcode::kTruncateFloat64ToInt32, p0, use->InputAt(0));
1015}
1016
1017
1018TEST(LowerNumberToInt32_to_TruncateFloat64ToInt32_with_change) {
1019 // NumberToInt32(x: kTypeNumber | kRepTagged) used as kMachInt32
1020 TestingGraph t(Type::Number());
1021 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
1022 Node* use = t.Use(trunc, kMachInt32);
1023 t.Return(use);
1024 t.Lower();
1025 Node* node = use->InputAt(0);
1026 CHECK_EQ(IrOpcode::kTruncateFloat64ToInt32, node->opcode());
1027 Node* of = node->InputAt(0);
1028 CHECK_EQ(IrOpcode::kChangeTaggedToFloat64, of->opcode());
1029 CHECK_EQ(t.p0, of->InputAt(0));
1030}
1031
1032
1033TEST(LowerNumberToInt32_to_ChangeFloat64ToTagged) {
1034 // TODO(titzer): NumberToInt32(x: kRepFloat64 | kTypeInt32) used as kRepTagged
1035}
1036
1037
1038TEST(LowerNumberToInt32_to_ChangeFloat64ToInt32) {
1039 // TODO(titzer): NumberToInt32(x: kRepFloat64 | kTypeInt32) used as kRepWord32
1040 // | kTypeInt32
1041}
1042
1043
1044TEST(LowerNumberToUint32_to_nop) {
1045 // NumberToUint32(x: kRepTagged | kTypeUint32) used as kRepTagged
1046 TestingGraph t(Type::Unsigned32());
1047 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1048 Node* use = t.Use(trunc, kRepTagged);
1049 t.Return(use);
1050 t.Lower();
1051 CHECK_EQ(t.p0, use->InputAt(0));
1052}
1053
1054
1055TEST(LowerNumberToUint32_to_ChangeTaggedToFloat64) {
1056 // NumberToUint32(x: kRepTagged | kTypeUint32) used as kRepWord32
1057 TestingGraph t(Type::Unsigned32());
1058 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1059 Node* use = t.Use(trunc, kRepFloat64);
1060 t.Return(use);
1061 t.Lower();
1062 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p0, use->InputAt(0));
1063}
1064
1065
1066TEST(LowerNumberToUint32_to_ChangeTaggedToUint32) {
1067 // NumberToUint32(x: kRepTagged | kTypeUint32) used as kRepWord32
1068 TestingGraph t(Type::Unsigned32());
1069 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1070 Node* use = t.Use(trunc, kTypeUint32);
1071 t.Return(use);
1072 t.Lower();
1073 CheckChangeOf(IrOpcode::kChangeTaggedToUint32, t.p0, use->InputAt(0));
1074}
1075
1076
1077TEST(LowerNumberToUint32_to_TruncateFloat64ToInt32) {
1078 // NumberToUint32(x: kRepFloat64) used as kMachUint32
1079 TestingGraph t(Type::Number());
1080 Node* p0 = t.ExampleWithOutput(kMachFloat64);
1081 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), p0);
1082 Node* use = t.Use(trunc, kMachUint32);
1083 t.Return(use);
1084 t.Lower();
1085 CheckChangeOf(IrOpcode::kTruncateFloat64ToInt32, p0, use->InputAt(0));
1086}
1087
1088
1089TEST(LowerNumberToUint32_to_TruncateFloat64ToInt32_with_change) {
1090 // NumberToInt32(x: kTypeNumber | kRepTagged) used as kMachUint32
1091 TestingGraph t(Type::Number());
1092 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1093 Node* use = t.Use(trunc, kMachUint32);
1094 t.Return(use);
1095 t.Lower();
1096 Node* node = use->InputAt(0);
1097 CHECK_EQ(IrOpcode::kTruncateFloat64ToInt32, node->opcode());
1098 Node* of = node->InputAt(0);
1099 CHECK_EQ(IrOpcode::kChangeTaggedToFloat64, of->opcode());
1100 CHECK_EQ(t.p0, of->InputAt(0));
1101}
1102
1103
1104TEST(LowerNumberToUint32_to_ChangeFloat64ToTagged) {
1105 // TODO(titzer): NumberToUint32(x: kRepFloat64 | kTypeUint32) used as
1106 // kRepTagged
1107}
1108
1109
1110TEST(LowerNumberToUint32_to_ChangeFloat64ToUint32) {
1111 // TODO(titzer): NumberToUint32(x: kRepFloat64 | kTypeUint32) used as
1112 // kRepWord32
1113}
1114
1115
1116TEST(LowerNumberToUint32_to_TruncateFloat64ToUint32) {
1117 // TODO(titzer): NumberToUint32(x: kRepFloat64) used as kRepWord32
1118}
1119
1120
1121TEST(LowerReferenceEqual_to_wordeq) {
1122 TestingGraph t(Type::Any(), Type::Any());
1123 IrOpcode::Value opcode =
1124 static_cast<IrOpcode::Value>(t.machine()->WordEqual()->opcode());
1125 t.CheckLoweringBinop(opcode, t.simplified()->ReferenceEqual(Type::Any()));
1126}
1127
1128
1129TEST(LowerStringOps_to_call_and_compare) {
1130 if (Pipeline::SupportedTarget()) {
1131 // These tests need linkage for the calls.
1132 TestingGraph t(Type::String(), Type::String());
1133 IrOpcode::Value compare_eq =
1134 static_cast<IrOpcode::Value>(t.machine()->WordEqual()->opcode());
1135 IrOpcode::Value compare_lt =
1136 static_cast<IrOpcode::Value>(t.machine()->IntLessThan()->opcode());
1137 IrOpcode::Value compare_le = static_cast<IrOpcode::Value>(
1138 t.machine()->IntLessThanOrEqual()->opcode());
1139 t.CheckLoweringBinop(compare_eq, t.simplified()->StringEqual());
1140 t.CheckLoweringBinop(compare_lt, t.simplified()->StringLessThan());
1141 t.CheckLoweringBinop(compare_le, t.simplified()->StringLessThanOrEqual());
1142 t.CheckLoweringBinop(IrOpcode::kCall, t.simplified()->StringAdd());
1143 }
1144}
1145
1146
1147void CheckChangeInsertion(IrOpcode::Value expected, MachineType from,
1148 MachineType to) {
1149 TestingGraph t(Type::Any());
1150 Node* in = t.ExampleWithOutput(from);
1151 Node* use = t.Use(in, to);
1152 t.Return(use);
1153 t.Lower();
1154 CHECK_EQ(expected, use->InputAt(0)->opcode());
1155 CHECK_EQ(in, use->InputAt(0)->InputAt(0));
1156}
1157
1158
1159TEST(InsertBasicChanges) {
1160 CheckChangeInsertion(IrOpcode::kChangeFloat64ToInt32, kRepFloat64,
1161 kTypeInt32);
1162 CheckChangeInsertion(IrOpcode::kChangeFloat64ToUint32, kRepFloat64,
1163 kTypeUint32);
1164 CheckChangeInsertion(IrOpcode::kChangeTaggedToInt32, kRepTagged, kTypeInt32);
1165 CheckChangeInsertion(IrOpcode::kChangeTaggedToUint32, kRepTagged,
1166 kTypeUint32);
1167
1168 CheckChangeInsertion(IrOpcode::kChangeFloat64ToTagged, kRepFloat64,
1169 kRepTagged);
1170 CheckChangeInsertion(IrOpcode::kChangeTaggedToFloat64, kRepTagged,
1171 kRepFloat64);
1172
1173 CheckChangeInsertion(IrOpcode::kChangeInt32ToFloat64, kTypeInt32,
1174 kRepFloat64);
1175 CheckChangeInsertion(IrOpcode::kChangeInt32ToTagged, kTypeInt32, kRepTagged);
1176
1177 CheckChangeInsertion(IrOpcode::kChangeUint32ToFloat64, kTypeUint32,
1178 kRepFloat64);
1179 CheckChangeInsertion(IrOpcode::kChangeUint32ToTagged, kTypeUint32,
1180 kRepTagged);
1181}
1182
1183
1184static void CheckChangesAroundBinop(TestingGraph* t, const Operator* op,
1185 IrOpcode::Value input_change,
1186 IrOpcode::Value output_change) {
1187 Node* binop = t->graph()->NewNode(op, t->p0, t->p1);
1188 t->Return(binop);
1189 t->Lower();
1190 CHECK_EQ(input_change, binop->InputAt(0)->opcode());
1191 CHECK_EQ(input_change, binop->InputAt(1)->opcode());
1192 CHECK_EQ(t->p0, binop->InputAt(0)->InputAt(0));
1193 CHECK_EQ(t->p1, binop->InputAt(1)->InputAt(0));
1194 CHECK_EQ(output_change, t->ret->InputAt(0)->opcode());
1195 CHECK_EQ(binop, t->ret->InputAt(0)->InputAt(0));
1196}
1197
1198
1199TEST(InsertChangesAroundInt32Binops) {
1200 TestingGraph t(Type::Signed32(), Type::Signed32());
1201
1202 const Operator* ops[] = {t.machine()->Int32Add(), t.machine()->Int32Sub(),
1203 t.machine()->Int32Mul(), t.machine()->Int32Div(),
1204 t.machine()->Int32Mod(), t.machine()->Word32And(),
1205 t.machine()->Word32Or(), t.machine()->Word32Xor(),
1206 t.machine()->Word32Shl(), t.machine()->Word32Sar()};
1207
1208 for (size_t i = 0; i < arraysize(ops); i++) {
1209 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToInt32,
1210 IrOpcode::kChangeInt32ToTagged);
1211 }
1212}
1213
1214
1215TEST(InsertChangesAroundInt32Cmp) {
1216 TestingGraph t(Type::Signed32(), Type::Signed32());
1217
1218 const Operator* ops[] = {t.machine()->Int32LessThan(),
1219 t.machine()->Int32LessThanOrEqual()};
1220
1221 for (size_t i = 0; i < arraysize(ops); i++) {
1222 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToInt32,
1223 IrOpcode::kChangeBitToBool);
1224 }
1225}
1226
1227
1228TEST(InsertChangesAroundUint32Cmp) {
1229 TestingGraph t(Type::Unsigned32(), Type::Unsigned32());
1230
1231 const Operator* ops[] = {t.machine()->Uint32LessThan(),
1232 t.machine()->Uint32LessThanOrEqual()};
1233
1234 for (size_t i = 0; i < arraysize(ops); i++) {
1235 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToUint32,
1236 IrOpcode::kChangeBitToBool);
1237 }
1238}
1239
1240
1241TEST(InsertChangesAroundFloat64Binops) {
1242 TestingGraph t(Type::Number(), Type::Number());
1243
1244 const Operator* ops[] = {
1245 t.machine()->Float64Add(), t.machine()->Float64Sub(),
1246 t.machine()->Float64Mul(), t.machine()->Float64Div(),
1247 t.machine()->Float64Mod(),
1248 };
1249
1250 for (size_t i = 0; i < arraysize(ops); i++) {
1251 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToFloat64,
1252 IrOpcode::kChangeFloat64ToTagged);
1253 }
1254}
1255
1256
1257TEST(InsertChangesAroundFloat64Cmp) {
1258 TestingGraph t(Type::Number(), Type::Number());
1259
1260 const Operator* ops[] = {t.machine()->Float64Equal(),
1261 t.machine()->Float64LessThan(),
1262 t.machine()->Float64LessThanOrEqual()};
1263
1264 for (size_t i = 0; i < arraysize(ops); i++) {
1265 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToFloat64,
1266 IrOpcode::kChangeBitToBool);
1267 }
1268}
1269
1270
1271void CheckFieldAccessArithmetic(FieldAccess access, Node* load_or_store) {
1272 Int32Matcher index = Int32Matcher(load_or_store->InputAt(1));
1273 CHECK(index.Is(access.offset - access.tag()));
1274}
1275
1276
1277Node* CheckElementAccessArithmetic(ElementAccess access, Node* load_or_store) {
1278 Int32BinopMatcher index(load_or_store->InputAt(1));
1279 CHECK_EQ(IrOpcode::kInt32Add, index.node()->opcode());
1280 CHECK(index.right().Is(access.header_size - access.tag()));
1281
1282 int element_size = ElementSizeOf(access.machine_type);
1283
1284 if (element_size != 1) {
1285 Int32BinopMatcher mul(index.left().node());
1286 CHECK_EQ(IrOpcode::kInt32Mul, mul.node()->opcode());
1287 CHECK(mul.right().Is(element_size));
1288 return mul.left().node();
1289 } else {
1290 return index.left().node();
1291 }
1292}
1293
1294
1295static const MachineType machine_reps[] = {
1296 kRepBit, kMachInt8, kMachInt16, kMachInt32,
1297 kMachInt64, kMachFloat64, kMachAnyTagged};
1298
1299
1300TEST(LowerLoadField_to_load) {
1301 TestingGraph t(Type::Any(), Type::Signed32());
1302
1303 for (size_t i = 0; i < arraysize(machine_reps); i++) {
1304 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1305 Handle<Name>::null(), Type::Any(), machine_reps[i]};
1306
1307 Node* load =
1308 t.graph()->NewNode(t.simplified()->LoadField(access), t.p0, t.start);
1309 Node* use = t.Use(load, machine_reps[i]);
1310 t.Return(use);
1311 t.Lower();
1312 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1313 CHECK_EQ(t.p0, load->InputAt(0));
1314 CheckFieldAccessArithmetic(access, load);
1315
1316 MachineType rep = OpParameter<MachineType>(load);
1317 CHECK_EQ(machine_reps[i], rep);
1318 }
1319}
1320
1321
1322TEST(LowerStoreField_to_store) {
1323 TestingGraph t(Type::Any(), Type::Signed32());
1324
1325 for (size_t i = 0; i < arraysize(machine_reps); i++) {
1326 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1327 Handle<Name>::null(), Type::Any(), machine_reps[i]};
1328
1329
1330 Node* val = t.ExampleWithOutput(machine_reps[i]);
1331 Node* store = t.graph()->NewNode(t.simplified()->StoreField(access), t.p0,
1332 val, t.start, t.start);
1333 t.Effect(store);
1334 t.Lower();
1335 CHECK_EQ(IrOpcode::kStore, store->opcode());
1336 CHECK_EQ(val, store->InputAt(2));
1337 CheckFieldAccessArithmetic(access, store);
1338
1339 StoreRepresentation rep = OpParameter<StoreRepresentation>(store);
1340 if (machine_reps[i] & kRepTagged) {
1341 CHECK_EQ(kFullWriteBarrier, rep.write_barrier_kind());
1342 }
1343 CHECK_EQ(machine_reps[i], rep.machine_type());
1344 }
1345}
1346
1347
1348TEST(LowerLoadElement_to_load) {
1349 TestingGraph t(Type::Any(), Type::Signed32());
1350
1351 for (size_t i = 0; i < arraysize(machine_reps); i++) {
1352 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1353 Type::Any(), machine_reps[i]};
1354
1355 Node* load =
1356 t.graph()->NewNode(t.simplified()->LoadElement(access), t.p0, t.p1,
1357 t.jsgraph.Int32Constant(1024), t.start);
1358 Node* use = t.Use(load, machine_reps[i]);
1359 t.Return(use);
1360 t.Lower();
1361 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1362 CHECK_EQ(t.p0, load->InputAt(0));
1363 CheckElementAccessArithmetic(access, load);
1364
1365 MachineType rep = OpParameter<MachineType>(load);
1366 CHECK_EQ(machine_reps[i], rep);
1367 }
1368}
1369
1370
1371TEST(LowerStoreElement_to_store) {
1372 TestingGraph t(Type::Any(), Type::Signed32());
1373
1374 for (size_t i = 0; i < arraysize(machine_reps); i++) {
1375 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1376 Type::Any(), machine_reps[i]};
1377
1378 Node* val = t.ExampleWithOutput(machine_reps[i]);
1379 Node* store = t.graph()->NewNode(t.simplified()->StoreElement(access), t.p0,
1380 t.p1, t.jsgraph.Int32Constant(1024), val,
1381 t.start, t.start);
1382 t.Effect(store);
1383 t.Lower();
1384 CHECK_EQ(IrOpcode::kStore, store->opcode());
1385 CHECK_EQ(val, store->InputAt(2));
1386 CheckElementAccessArithmetic(access, store);
1387
1388 StoreRepresentation rep = OpParameter<StoreRepresentation>(store);
1389 if (machine_reps[i] & kRepTagged) {
1390 CHECK_EQ(kFullWriteBarrier, rep.write_barrier_kind());
1391 }
1392 CHECK_EQ(machine_reps[i], rep.machine_type());
1393 }
1394}
1395
1396
1397TEST(InsertChangeForLoadElementIndex) {
1398 // LoadElement(obj: Tagged, index: kTypeInt32 | kRepTagged, length) =>
1399 // Load(obj, Int32Add(Int32Mul(ChangeTaggedToInt32(index), #k), #k))
1400 TestingGraph t(Type::Any(), Type::Signed32(), Type::Any());
1401 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1402 kMachAnyTagged};
1403
1404 Node* load = t.graph()->NewNode(t.simplified()->LoadElement(access), t.p0,
1405 t.p1, t.p2, t.start);
1406 t.Return(load);
1407 t.Lower();
1408 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1409 CHECK_EQ(t.p0, load->InputAt(0));
1410
1411 Node* index = CheckElementAccessArithmetic(access, load);
1412 CheckChangeOf(IrOpcode::kChangeTaggedToInt32, t.p1, index);
1413}
1414
1415
1416TEST(InsertChangeForStoreElementIndex) {
1417 // StoreElement(obj: Tagged, index: kTypeInt32 | kRepTagged, length, val) =>
1418 // Store(obj, Int32Add(Int32Mul(ChangeTaggedToInt32(index), #k), #k), val)
1419 TestingGraph t(Type::Any(), Type::Signed32(), Type::Any());
1420 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1421 kMachAnyTagged};
1422
1423 Node* store =
1424 t.graph()->NewNode(t.simplified()->StoreElement(access), t.p0, t.p1, t.p2,
1425 t.jsgraph.TrueConstant(), t.start, t.start);
1426 t.Effect(store);
1427 t.Lower();
1428 CHECK_EQ(IrOpcode::kStore, store->opcode());
1429 CHECK_EQ(t.p0, store->InputAt(0));
1430
1431 Node* index = CheckElementAccessArithmetic(access, store);
1432 CheckChangeOf(IrOpcode::kChangeTaggedToInt32, t.p1, index);
1433}
1434
1435
1436TEST(InsertChangeForLoadElement) {
1437 // TODO(titzer): test all load/store representation change insertions.
1438 TestingGraph t(Type::Any(), Type::Signed32(), Type::Any());
1439 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1440 kMachFloat64};
1441
1442 Node* load = t.graph()->NewNode(t.simplified()->LoadElement(access), t.p0,
1443 t.p1, t.p1, t.start);
1444 t.Return(load);
1445 t.Lower();
1446 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1447 CHECK_EQ(t.p0, load->InputAt(0));
1448 CheckChangeOf(IrOpcode::kChangeFloat64ToTagged, load, t.ret->InputAt(0));
1449}
1450
1451
1452TEST(InsertChangeForLoadField) {
1453 // TODO(titzer): test all load/store representation change insertions.
1454 TestingGraph t(Type::Any(), Type::Signed32());
1455 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1456 Handle<Name>::null(), Type::Any(), kMachFloat64};
1457
1458 Node* load =
1459 t.graph()->NewNode(t.simplified()->LoadField(access), t.p0, t.start);
1460 t.Return(load);
1461 t.Lower();
1462 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1463 CHECK_EQ(t.p0, load->InputAt(0));
1464 CheckChangeOf(IrOpcode::kChangeFloat64ToTagged, load, t.ret->InputAt(0));
1465}
1466
1467
1468TEST(InsertChangeForStoreElement) {
1469 // TODO(titzer): test all load/store representation change insertions.
1470 TestingGraph t(Type::Any(), Type::Signed32(), Type::Any());
1471 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1472 kMachFloat64};
1473
1474 Node* store = t.graph()->NewNode(t.simplified()->StoreElement(access), t.p0,
1475 t.jsgraph.Int32Constant(0), t.p2, t.p1,
1476 t.start, t.start);
1477 t.Effect(store);
1478 t.Lower();
1479
1480 CHECK_EQ(IrOpcode::kStore, store->opcode());
1481 CHECK_EQ(t.p0, store->InputAt(0));
1482 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p1, store->InputAt(2));
1483}
1484
1485
1486TEST(InsertChangeForStoreField) {
1487 // TODO(titzer): test all load/store representation change insertions.
1488 TestingGraph t(Type::Any(), Type::Signed32());
1489 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1490 Handle<Name>::null(), Type::Any(), kMachFloat64};
1491
1492 Node* store = t.graph()->NewNode(t.simplified()->StoreField(access), t.p0,
1493 t.p1, t.start, t.start);
1494 t.Effect(store);
1495 t.Lower();
1496
1497 CHECK_EQ(IrOpcode::kStore, store->opcode());
1498 CHECK_EQ(t.p0, store->InputAt(0));
1499 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p1, store->InputAt(2));
1500}
1501
1502
1503TEST(UpdatePhi) {
1504 TestingGraph t(Type::Any(), Type::Signed32());
1505 static const MachineType kMachineTypes[] = {kMachInt32, kMachUint32,
1506 kMachFloat64};
1507
1508 for (size_t i = 0; i < arraysize(kMachineTypes); i++) {
1509 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1510 Handle<Name>::null(), Type::Any(), kMachineTypes[i]};
1511
1512 Node* load0 =
1513 t.graph()->NewNode(t.simplified()->LoadField(access), t.p0, t.start);
1514 Node* load1 =
1515 t.graph()->NewNode(t.simplified()->LoadField(access), t.p1, t.start);
1516 Node* phi = t.graph()->NewNode(t.common()->Phi(kMachAnyTagged, 2), load0,
1517 load1, t.start);
1518 t.Return(t.Use(phi, kMachineTypes[i]));
1519 t.Lower();
1520
1521 CHECK_EQ(IrOpcode::kPhi, phi->opcode());
1522 CHECK_EQ(RepresentationOf(kMachineTypes[i]),
1523 RepresentationOf(OpParameter<MachineType>(phi)));
1524 }
1525}
1526
1527
1528// TODO(titzer): this tests current behavior of assuming an implicit
1529// representation change in loading float32s. Fix when float32 is fully
1530// supported.
1531TEST(ImplicitFloat32ToFloat64InLoads) {
1532 TestingGraph t(Type::Any());
1533
1534 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1535 Handle<Name>::null(), Type::Any(), kMachFloat32};
1536
1537 Node* load =
1538 t.graph()->NewNode(t.simplified()->LoadField(access), t.p0, t.start);
1539 t.Return(load);
1540 t.Lower();
1541 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1542 CHECK_EQ(t.p0, load->InputAt(0));
1543 CheckChangeOf(IrOpcode::kChangeFloat64ToTagged, load, t.ret->InputAt(0));
1544}
1545
1546
1547TEST(ImplicitFloat64ToFloat32InStores) {
1548 TestingGraph t(Type::Any(), Type::Signed32());
1549 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1550 Handle<Name>::null(), Type::Any(), kMachFloat32};
1551
1552 Node* store = t.graph()->NewNode(t.simplified()->StoreField(access), t.p0,
1553 t.p1, t.start, t.start);
1554 t.Effect(store);
1555 t.Lower();
1556
1557 CHECK_EQ(IrOpcode::kStore, store->opcode());
1558 CHECK_EQ(t.p0, store->InputAt(0));
1559 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p1, store->InputAt(2));
1560}