blob: 147aa323ffaf887351b87b46c5015a98d261d343 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include <limits>
6
7#include "src/compiler/access-builder.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04008#include "src/compiler/change-lowering.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/compiler/control-builders.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040010#include "src/compiler/graph-reducer.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/compiler/graph-visualizer.h"
12#include "src/compiler/node-properties-inl.h"
13#include "src/compiler/pipeline.h"
14#include "src/compiler/representation-change.h"
15#include "src/compiler/simplified-lowering.h"
16#include "src/compiler/typer.h"
17#include "src/compiler/verifier.h"
18#include "src/execution.h"
19#include "src/parser.h"
20#include "src/rewriter.h"
21#include "src/scopes.h"
22#include "test/cctest/cctest.h"
23#include "test/cctest/compiler/codegen-tester.h"
24#include "test/cctest/compiler/graph-builder-tester.h"
25#include "test/cctest/compiler/value-helper.h"
26
27using namespace v8::internal;
28using namespace v8::internal::compiler;
29
30template <typename ReturnType>
31class SimplifiedLoweringTester : public GraphBuilderTester<ReturnType> {
32 public:
33 SimplifiedLoweringTester(MachineType p0 = kMachNone,
34 MachineType p1 = kMachNone,
35 MachineType p2 = kMachNone,
36 MachineType p3 = kMachNone,
37 MachineType p4 = kMachNone)
38 : GraphBuilderTester<ReturnType>(p0, p1, p2, p3, p4),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 typer(this->graph(), MaybeHandle<Context>()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 javascript(this->zone()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040041 jsgraph(this->graph(), this->common(), &javascript, this->machine()),
42 lowering(&jsgraph, this->zone()) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043
44 Typer typer;
45 JSOperatorBuilder javascript;
46 JSGraph jsgraph;
47 SimplifiedLowering lowering;
48
49 void LowerAllNodes() {
50 this->End();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040051 typer.Run();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 lowering.LowerAllNodes();
53 }
54
Emily Bernierd0a1eb72015-03-24 16:35:39 -040055 void LowerAllNodesAndLowerChanges() {
56 this->End();
57 typer.Run();
58 lowering.LowerAllNodes();
59
60 Zone* zone = this->zone();
61 CompilationInfo info(zone->isolate(), zone);
62 Linkage linkage(
63 zone, Linkage::GetSimplifiedCDescriptor(zone, this->machine_sig_));
64 ChangeLowering lowering(&jsgraph, &linkage);
65 GraphReducer reducer(this->graph(), this->zone());
66 reducer.AddReducer(&lowering);
67 reducer.ReduceGraph();
68 Verifier::Run(this->graph());
69 }
70
71 void CheckNumberCall(double expected, double input) {
72 // TODO(titzer): make calls to NewNumber work in cctests.
73 if (expected <= Smi::kMinValue) return;
74 if (expected >= Smi::kMaxValue) return;
75 Handle<Object> num = factory()->NewNumber(input);
76 Object* result = this->Call(*num);
77 CHECK(factory()->NewNumber(expected)->SameValue(result));
78 }
79
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Factory* factory() { return this->isolate()->factory(); }
81 Heap* heap() { return this->isolate()->heap(); }
82};
83
84
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085// TODO(titzer): factor these tests out to test-run-simplifiedops.cc.
86// TODO(titzer): test tagged representation for input to NumberToInt32.
87TEST(RunNumberToInt32_float64) {
88 // TODO(titzer): explicit load/stores here are only because of representations
89 double input;
90 int32_t result;
91 SimplifiedLoweringTester<Object*> t;
92 FieldAccess load = {kUntaggedBase, 0, Handle<Name>(), Type::Number(),
93 kMachFloat64};
94 Node* loaded = t.LoadField(load, t.PointerConstant(&input));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040095 NodeProperties::SetBounds(loaded, Bounds(Type::Number()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 Node* convert = t.NumberToInt32(loaded);
97 FieldAccess store = {kUntaggedBase, 0, Handle<Name>(), Type::Signed32(),
98 kMachInt32};
99 t.StoreField(store, t.PointerConstant(&result), convert);
100 t.Return(t.jsgraph.TrueConstant());
101 t.LowerAllNodes();
102 t.GenerateCode();
103
104 if (Pipeline::SupportedTarget()) {
105 FOR_FLOAT64_INPUTS(i) {
106 input = *i;
107 int32_t expected = DoubleToInt32(*i);
108 t.Call();
109 CHECK_EQ(expected, result);
110 }
111 }
112}
113
114
115// TODO(titzer): test tagged representation for input to NumberToUint32.
116TEST(RunNumberToUint32_float64) {
117 // TODO(titzer): explicit load/stores here are only because of representations
118 double input;
119 uint32_t result;
120 SimplifiedLoweringTester<Object*> t;
121 FieldAccess load = {kUntaggedBase, 0, Handle<Name>(), Type::Number(),
122 kMachFloat64};
123 Node* loaded = t.LoadField(load, t.PointerConstant(&input));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400124 NodeProperties::SetBounds(loaded, Bounds(Type::Number()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 Node* convert = t.NumberToUint32(loaded);
126 FieldAccess store = {kUntaggedBase, 0, Handle<Name>(), Type::Unsigned32(),
127 kMachUint32};
128 t.StoreField(store, t.PointerConstant(&result), convert);
129 t.Return(t.jsgraph.TrueConstant());
130 t.LowerAllNodes();
131 t.GenerateCode();
132
133 if (Pipeline::SupportedTarget()) {
134 FOR_FLOAT64_INPUTS(i) {
135 input = *i;
136 uint32_t expected = DoubleToUint32(*i);
137 t.Call();
138 CHECK_EQ(static_cast<int32_t>(expected), static_cast<int32_t>(result));
139 }
140 }
141}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142
143
144// Create a simple JSObject with a unique map.
145static Handle<JSObject> TestObject() {
146 static int index = 0;
147 char buffer[50];
148 v8::base::OS::SNPrintF(buffer, 50, "({'a_%d':1})", index++);
149 return Handle<JSObject>::cast(v8::Utils::OpenHandle(*CompileRun(buffer)));
150}
151
152
153TEST(RunLoadMap) {
154 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
155 FieldAccess access = AccessBuilder::ForMap();
156 Node* load = t.LoadField(access, t.Parameter(0));
157 t.Return(load);
158
159 t.LowerAllNodes();
160 t.GenerateCode();
161
162 if (Pipeline::SupportedTarget()) {
163 Handle<JSObject> src = TestObject();
164 Handle<Map> src_map(src->map());
165 Object* result = t.Call(*src); // TODO(titzer): raw pointers in call
166 CHECK_EQ(*src_map, result);
167 }
168}
169
170
171TEST(RunStoreMap) {
172 SimplifiedLoweringTester<int32_t> t(kMachAnyTagged, kMachAnyTagged);
173 FieldAccess access = AccessBuilder::ForMap();
174 t.StoreField(access, t.Parameter(1), t.Parameter(0));
175 t.Return(t.jsgraph.TrueConstant());
176
177 t.LowerAllNodes();
178 t.GenerateCode();
179
180 if (Pipeline::SupportedTarget()) {
181 Handle<JSObject> src = TestObject();
182 Handle<Map> src_map(src->map());
183 Handle<JSObject> dst = TestObject();
184 CHECK(src->map() != dst->map());
185 t.Call(*src_map, *dst); // TODO(titzer): raw pointers in call
186 CHECK(*src_map == dst->map());
187 }
188}
189
190
191TEST(RunLoadProperties) {
192 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
193 FieldAccess access = AccessBuilder::ForJSObjectProperties();
194 Node* load = t.LoadField(access, t.Parameter(0));
195 t.Return(load);
196
197 t.LowerAllNodes();
198 t.GenerateCode();
199
200 if (Pipeline::SupportedTarget()) {
201 Handle<JSObject> src = TestObject();
202 Handle<FixedArray> src_props(src->properties());
203 Object* result = t.Call(*src); // TODO(titzer): raw pointers in call
204 CHECK_EQ(*src_props, result);
205 }
206}
207
208
209TEST(RunLoadStoreMap) {
210 SimplifiedLoweringTester<Object*> t(kMachAnyTagged, kMachAnyTagged);
211 FieldAccess access = AccessBuilder::ForMap();
212 Node* load = t.LoadField(access, t.Parameter(0));
213 t.StoreField(access, t.Parameter(1), load);
214 t.Return(load);
215
216 t.LowerAllNodes();
217 t.GenerateCode();
218
219 if (Pipeline::SupportedTarget()) {
220 Handle<JSObject> src = TestObject();
221 Handle<Map> src_map(src->map());
222 Handle<JSObject> dst = TestObject();
223 CHECK(src->map() != dst->map());
224 Object* result = t.Call(*src, *dst); // TODO(titzer): raw pointers in call
225 CHECK(result->IsMap());
226 CHECK_EQ(*src_map, result);
227 CHECK(*src_map == dst->map());
228 }
229}
230
231
232TEST(RunLoadStoreFixedArrayIndex) {
233 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
234 ElementAccess access = AccessBuilder::ForFixedArrayElement();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400235 Node* load = t.LoadElement(access, t.Parameter(0), t.Int32Constant(0));
236 t.StoreElement(access, t.Parameter(0), t.Int32Constant(1), load);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 t.Return(load);
238
239 t.LowerAllNodes();
240 t.GenerateCode();
241
242 if (Pipeline::SupportedTarget()) {
243 Handle<FixedArray> array = t.factory()->NewFixedArray(2);
244 Handle<JSObject> src = TestObject();
245 Handle<JSObject> dst = TestObject();
246 array->set(0, *src);
247 array->set(1, *dst);
248 Object* result = t.Call(*array);
249 CHECK_EQ(*src, result);
250 CHECK_EQ(*src, array->get(0));
251 CHECK_EQ(*src, array->get(1));
252 }
253}
254
255
256TEST(RunLoadStoreArrayBuffer) {
257 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
258 const int index = 12;
259 const int array_length = 2 * index;
260 ElementAccess buffer_access =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400261 AccessBuilder::ForTypedArrayElement(v8::kExternalInt8Array, true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 Node* backing_store = t.LoadField(
263 AccessBuilder::ForJSArrayBufferBackingStore(), t.Parameter(0));
264 Node* load =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400265 t.LoadElement(buffer_access, backing_store, t.Int32Constant(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 t.StoreElement(buffer_access, backing_store, t.Int32Constant(index + 1),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400267 load);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 t.Return(t.jsgraph.TrueConstant());
269
270 t.LowerAllNodes();
271 t.GenerateCode();
272
273 if (Pipeline::SupportedTarget()) {
274 Handle<JSArrayBuffer> array = t.factory()->NewJSArrayBuffer();
275 Runtime::SetupArrayBufferAllocatingData(t.isolate(), array, array_length);
276 uint8_t* data = reinterpret_cast<uint8_t*>(array->backing_store());
277 for (int i = 0; i < array_length; i++) {
278 data[i] = i;
279 }
280
281 // TODO(titzer): raw pointers in call
282 Object* result = t.Call(*array);
283 CHECK_EQ(t.isolate()->heap()->true_value(), result);
284 for (int i = 0; i < array_length; i++) {
285 uint8_t expected = i;
286 if (i == (index + 1)) expected = index;
287 CHECK_EQ(data[i], expected);
288 }
289 }
290}
291
292
293TEST(RunLoadFieldFromUntaggedBase) {
294 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3)};
295
296 for (size_t i = 0; i < arraysize(smis); i++) {
297 int offset = static_cast<int>(i * sizeof(Smi*));
298 FieldAccess access = {kUntaggedBase, offset, Handle<Name>(),
299 Type::Integral32(), kMachAnyTagged};
300
301 SimplifiedLoweringTester<Object*> t;
302 Node* load = t.LoadField(access, t.PointerConstant(smis));
303 t.Return(load);
304 t.LowerAllNodes();
305
306 if (!Pipeline::SupportedTarget()) continue;
307
308 for (int j = -5; j <= 5; j++) {
309 Smi* expected = Smi::FromInt(j);
310 smis[i] = expected;
311 CHECK_EQ(expected, t.Call());
312 }
313 }
314}
315
316
317TEST(RunStoreFieldToUntaggedBase) {
318 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3)};
319
320 for (size_t i = 0; i < arraysize(smis); i++) {
321 int offset = static_cast<int>(i * sizeof(Smi*));
322 FieldAccess access = {kUntaggedBase, offset, Handle<Name>(),
323 Type::Integral32(), kMachAnyTagged};
324
325 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
326 Node* p0 = t.Parameter(0);
327 t.StoreField(access, t.PointerConstant(smis), p0);
328 t.Return(p0);
329 t.LowerAllNodes();
330
331 if (!Pipeline::SupportedTarget()) continue;
332
333 for (int j = -5; j <= 5; j++) {
334 Smi* expected = Smi::FromInt(j);
335 smis[i] = Smi::FromInt(-100);
336 CHECK_EQ(expected, t.Call(expected));
337 CHECK_EQ(expected, smis[i]);
338 }
339 }
340}
341
342
343TEST(RunLoadElementFromUntaggedBase) {
344 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3),
345 Smi::FromInt(4), Smi::FromInt(5)};
346
347 for (size_t i = 0; i < arraysize(smis); i++) { // for header sizes
348 for (size_t j = 0; (i + j) < arraysize(smis); j++) { // for element index
349 int offset = static_cast<int>(i * sizeof(Smi*));
350 ElementAccess access = {kUntaggedBase, offset, Type::Integral32(),
351 kMachAnyTagged};
352
353 SimplifiedLoweringTester<Object*> t;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400354 Node* load = t.LoadElement(access, t.PointerConstant(smis),
355 t.Int32Constant(static_cast<int>(j)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000356 t.Return(load);
357 t.LowerAllNodes();
358
359 if (!Pipeline::SupportedTarget()) continue;
360
361 for (int k = -5; k <= 5; k++) {
362 Smi* expected = Smi::FromInt(k);
363 smis[i + j] = expected;
364 CHECK_EQ(expected, t.Call());
365 }
366 }
367 }
368}
369
370
371TEST(RunStoreElementFromUntaggedBase) {
372 Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3),
373 Smi::FromInt(4), Smi::FromInt(5)};
374
375 for (size_t i = 0; i < arraysize(smis); i++) { // for header sizes
376 for (size_t j = 0; (i + j) < arraysize(smis); j++) { // for element index
377 int offset = static_cast<int>(i * sizeof(Smi*));
378 ElementAccess access = {kUntaggedBase, offset, Type::Integral32(),
379 kMachAnyTagged};
380
381 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
382 Node* p0 = t.Parameter(0);
383 t.StoreElement(access, t.PointerConstant(smis),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384 t.Int32Constant(static_cast<int>(j)), p0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000385 t.Return(p0);
386 t.LowerAllNodes();
387
388 if (!Pipeline::SupportedTarget()) continue;
389
390 for (int k = -5; k <= 5; k++) {
391 Smi* expected = Smi::FromInt(k);
392 smis[i + j] = Smi::FromInt(-100);
393 CHECK_EQ(expected, t.Call(expected));
394 CHECK_EQ(expected, smis[i + j]);
395 }
396
397 // TODO(titzer): assert the contents of the array.
398 }
399 }
400}
401
402
403// A helper class for accessing fields and elements of various types, on both
404// tagged and untagged base pointers. Contains both tagged and untagged buffers
405// for testing direct memory access from generated code.
406template <typename E>
407class AccessTester : public HandleAndZoneScope {
408 public:
409 bool tagged;
410 MachineType rep;
411 E* original_elements;
412 size_t num_elements;
413 E* untagged_array;
414 Handle<ByteArray> tagged_array; // TODO(titzer): use FixedArray for tagged.
415
416 AccessTester(bool t, MachineType r, E* orig, size_t num)
417 : tagged(t),
418 rep(r),
419 original_elements(orig),
420 num_elements(num),
421 untagged_array(static_cast<E*>(malloc(ByteSize()))),
422 tagged_array(main_isolate()->factory()->NewByteArray(
423 static_cast<int>(ByteSize()))) {
424 Reinitialize();
425 }
426
427 ~AccessTester() { free(untagged_array); }
428
429 size_t ByteSize() { return num_elements * sizeof(E); }
430
431 // Nuke both {untagged_array} and {tagged_array} with {original_elements}.
432 void Reinitialize() {
433 memcpy(untagged_array, original_elements, ByteSize());
434 CHECK_EQ(static_cast<int>(ByteSize()), tagged_array->length());
435 E* raw = reinterpret_cast<E*>(tagged_array->GetDataStartAddress());
436 memcpy(raw, original_elements, ByteSize());
437 }
438
439 // Create and run code that copies the element in either {untagged_array}
440 // or {tagged_array} at index {from_index} to index {to_index}.
441 void RunCopyElement(int from_index, int to_index) {
442 // TODO(titzer): test element and field accesses where the base is not
443 // a constant in the code.
444 BoundsCheck(from_index);
445 BoundsCheck(to_index);
446 ElementAccess access = GetElementAccess();
447
448 SimplifiedLoweringTester<Object*> t;
449 Node* ptr = GetBaseNode(&t);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400450 Node* load = t.LoadElement(access, ptr, t.Int32Constant(from_index));
451 t.StoreElement(access, ptr, t.Int32Constant(to_index), load);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 t.Return(t.jsgraph.TrueConstant());
453 t.LowerAllNodes();
454 t.GenerateCode();
455
456 if (Pipeline::SupportedTarget()) {
457 Object* result = t.Call();
458 CHECK_EQ(t.isolate()->heap()->true_value(), result);
459 }
460 }
461
462 // Create and run code that copies the field in either {untagged_array}
463 // or {tagged_array} at index {from_index} to index {to_index}.
464 void RunCopyField(int from_index, int to_index) {
465 BoundsCheck(from_index);
466 BoundsCheck(to_index);
467 FieldAccess from_access = GetFieldAccess(from_index);
468 FieldAccess to_access = GetFieldAccess(to_index);
469
470 SimplifiedLoweringTester<Object*> t;
471 Node* ptr = GetBaseNode(&t);
472 Node* load = t.LoadField(from_access, ptr);
473 t.StoreField(to_access, ptr, load);
474 t.Return(t.jsgraph.TrueConstant());
475 t.LowerAllNodes();
476 t.GenerateCode();
477
478 if (Pipeline::SupportedTarget()) {
479 Object* result = t.Call();
480 CHECK_EQ(t.isolate()->heap()->true_value(), result);
481 }
482 }
483
484 // Create and run code that copies the elements from {this} to {that}.
485 void RunCopyElements(AccessTester<E>* that) {
486// TODO(titzer): Rewrite this test without StructuredGraphBuilder support.
487#if 0
488 SimplifiedLoweringTester<Object*> t;
489
490 Node* one = t.Int32Constant(1);
491 Node* index = t.Int32Constant(0);
492 Node* limit = t.Int32Constant(static_cast<int>(num_elements));
493 t.environment()->Push(index);
494 Node* src = this->GetBaseNode(&t);
495 Node* dst = that->GetBaseNode(&t);
496 {
497 LoopBuilder loop(&t);
498 loop.BeginLoop();
499 // Loop exit condition
500 index = t.environment()->Top();
501 Node* condition = t.Int32LessThan(index, limit);
502 loop.BreakUnless(condition);
503 // dst[index] = src[index]
504 index = t.environment()->Pop();
505 Node* load = t.LoadElement(this->GetElementAccess(), src, index);
506 t.StoreElement(that->GetElementAccess(), dst, index, load);
507 // index++
508 index = t.Int32Add(index, one);
509 t.environment()->Push(index);
510 // continue
511 loop.EndBody();
512 loop.EndLoop();
513 }
514 index = t.environment()->Pop();
515 t.Return(t.jsgraph.TrueConstant());
516 t.LowerAllNodes();
517 t.GenerateCode();
518
519 if (Pipeline::SupportedTarget()) {
520 Object* result = t.Call();
521 CHECK_EQ(t.isolate()->heap()->true_value(), result);
522 }
523#endif
524 }
525
526 E GetElement(int index) {
527 BoundsCheck(index);
528 if (tagged) {
529 E* raw = reinterpret_cast<E*>(tagged_array->GetDataStartAddress());
530 return raw[index];
531 } else {
532 return untagged_array[index];
533 }
534 }
535
536 private:
537 ElementAccess GetElementAccess() {
538 ElementAccess access = {tagged ? kTaggedBase : kUntaggedBase,
539 tagged ? FixedArrayBase::kHeaderSize : 0,
540 Type::Any(), rep};
541 return access;
542 }
543
544 FieldAccess GetFieldAccess(int field) {
545 int offset = field * sizeof(E);
546 FieldAccess access = {tagged ? kTaggedBase : kUntaggedBase,
547 offset + (tagged ? FixedArrayBase::kHeaderSize : 0),
548 Handle<Name>(), Type::Any(), rep};
549 return access;
550 }
551
552 template <typename T>
553 Node* GetBaseNode(SimplifiedLoweringTester<T>* t) {
554 return tagged ? t->HeapConstant(tagged_array)
555 : t->PointerConstant(untagged_array);
556 }
557
558 void BoundsCheck(int index) {
559 CHECK_GE(index, 0);
560 CHECK_LT(index, static_cast<int>(num_elements));
561 CHECK_EQ(static_cast<int>(ByteSize()), tagged_array->length());
562 }
563};
564
565
566template <typename E>
567static void RunAccessTest(MachineType rep, E* original_elements, size_t num) {
568 int num_elements = static_cast<int>(num);
569
570 for (int taggedness = 0; taggedness < 2; taggedness++) {
571 AccessTester<E> a(taggedness == 1, rep, original_elements, num);
572 for (int field = 0; field < 2; field++) {
573 for (int i = 0; i < num_elements - 1; i++) {
574 a.Reinitialize();
575 if (field == 0) {
576 a.RunCopyField(i, i + 1); // Test field read/write.
577 } else {
578 a.RunCopyElement(i, i + 1); // Test element read/write.
579 }
580 if (Pipeline::SupportedTarget()) { // verify.
581 for (int j = 0; j < num_elements; j++) {
582 E expect =
583 j == (i + 1) ? original_elements[i] : original_elements[j];
584 CHECK_EQ(expect, a.GetElement(j));
585 }
586 }
587 }
588 }
589 }
590 // Test array copy.
591 for (int tf = 0; tf < 2; tf++) {
592 for (int tt = 0; tt < 2; tt++) {
593 AccessTester<E> a(tf == 1, rep, original_elements, num);
594 AccessTester<E> b(tt == 1, rep, original_elements, num);
595 a.RunCopyElements(&b);
596 if (Pipeline::SupportedTarget()) { // verify.
597 for (int i = 0; i < num_elements; i++) {
598 CHECK_EQ(a.GetElement(i), b.GetElement(i));
599 }
600 }
601 }
602 }
603}
604
605
606TEST(RunAccessTests_uint8) {
607 uint8_t data[] = {0x07, 0x16, 0x25, 0x34, 0x43, 0x99,
608 0xab, 0x78, 0x89, 0x19, 0x2b, 0x38};
609 RunAccessTest<uint8_t>(kMachInt8, data, arraysize(data));
610}
611
612
613TEST(RunAccessTests_uint16) {
614 uint16_t data[] = {0x071a, 0x162b, 0x253c, 0x344d, 0x435e, 0x7777};
615 RunAccessTest<uint16_t>(kMachInt16, data, arraysize(data));
616}
617
618
619TEST(RunAccessTests_int32) {
620 int32_t data[] = {-211, 211, 628347, 2000000000, -2000000000, -1, -100000034};
621 RunAccessTest<int32_t>(kMachInt32, data, arraysize(data));
622}
623
624
625#define V8_2PART_INT64(a, b) (((static_cast<int64_t>(a) << 32) + 0x##b##u))
626
627
628TEST(RunAccessTests_int64) {
629 if (kPointerSize != 8) return;
630 int64_t data[] = {V8_2PART_INT64(0x10111213, 14151617),
631 V8_2PART_INT64(0x20212223, 24252627),
632 V8_2PART_INT64(0x30313233, 34353637),
633 V8_2PART_INT64(0xa0a1a2a3, a4a5a6a7),
634 V8_2PART_INT64(0xf0f1f2f3, f4f5f6f7)};
635 RunAccessTest<int64_t>(kMachInt64, data, arraysize(data));
636}
637
638
639TEST(RunAccessTests_float64) {
640 double data[] = {1.25, -1.25, 2.75, 11.0, 11100.8};
641 RunAccessTest<double>(kMachFloat64, data, arraysize(data));
642}
643
644
645TEST(RunAccessTests_Smi) {
646 Smi* data[] = {Smi::FromInt(-1), Smi::FromInt(-9),
647 Smi::FromInt(0), Smi::FromInt(666),
648 Smi::FromInt(77777), Smi::FromInt(Smi::kMaxValue)};
649 RunAccessTest<Smi*>(kMachAnyTagged, data, arraysize(data));
650}
651
652
653// Fills in most of the nodes of the graph in order to make tests shorter.
654class TestingGraph : public HandleAndZoneScope, public GraphAndBuilders {
655 public:
656 Typer typer;
657 JSOperatorBuilder javascript;
658 JSGraph jsgraph;
659 Node* p0;
660 Node* p1;
661 Node* p2;
662 Node* start;
663 Node* end;
664 Node* ret;
665
666 explicit TestingGraph(Type* p0_type, Type* p1_type = Type::None(),
667 Type* p2_type = Type::None())
668 : GraphAndBuilders(main_zone()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400669 typer(graph(), MaybeHandle<Context>()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 javascript(main_zone()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400671 jsgraph(graph(), common(), &javascript, machine()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000672 start = graph()->NewNode(common()->Start(2));
673 graph()->SetStart(start);
674 ret =
675 graph()->NewNode(common()->Return(), jsgraph.Constant(0), start, start);
676 end = graph()->NewNode(common()->End(), ret);
677 graph()->SetEnd(end);
678 p0 = graph()->NewNode(common()->Parameter(0), start);
679 p1 = graph()->NewNode(common()->Parameter(1), start);
680 p2 = graph()->NewNode(common()->Parameter(2), start);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400681 typer.Run();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 NodeProperties::SetBounds(p0, Bounds(p0_type));
683 NodeProperties::SetBounds(p1, Bounds(p1_type));
684 NodeProperties::SetBounds(p2, Bounds(p2_type));
685 }
686
687 void CheckLoweringBinop(IrOpcode::Value expected, const Operator* op) {
688 Node* node = Return(graph()->NewNode(op, p0, p1));
689 Lower();
690 CHECK_EQ(expected, node->opcode());
691 }
692
693 void CheckLoweringTruncatedBinop(IrOpcode::Value expected, const Operator* op,
694 const Operator* trunc) {
695 Node* node = graph()->NewNode(op, p0, p1);
696 Return(graph()->NewNode(trunc, node));
697 Lower();
698 CHECK_EQ(expected, node->opcode());
699 }
700
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400701 void Lower() { SimplifiedLowering(&jsgraph, jsgraph.zone()).LowerAllNodes(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000702
703 // Inserts the node as the return value of the graph.
704 Node* Return(Node* node) {
705 ret->ReplaceInput(0, node);
706 return node;
707 }
708
709 // Inserts the node as the effect input to the return of the graph.
710 void Effect(Node* node) { ret->ReplaceInput(1, node); }
711
712 Node* ExampleWithOutput(MachineType type) {
713 // TODO(titzer): use parameters with guaranteed representations.
714 if (type & kTypeInt32) {
715 return graph()->NewNode(machine()->Int32Add(), jsgraph.Int32Constant(1),
716 jsgraph.Int32Constant(1));
717 } else if (type & kTypeUint32) {
718 return graph()->NewNode(machine()->Word32Shr(), jsgraph.Int32Constant(1),
719 jsgraph.Int32Constant(1));
720 } else if (type & kRepFloat64) {
721 return graph()->NewNode(machine()->Float64Add(),
722 jsgraph.Float64Constant(1),
723 jsgraph.Float64Constant(1));
724 } else if (type & kRepBit) {
725 return graph()->NewNode(machine()->Word32Equal(),
726 jsgraph.Int32Constant(1),
727 jsgraph.Int32Constant(1));
728 } else if (type & kRepWord64) {
729 return graph()->NewNode(machine()->Int64Add(), Int64Constant(1),
730 Int64Constant(1));
731 } else {
732 CHECK(type & kRepTagged);
733 return p0;
734 }
735 }
736
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400737 Node* ExampleWithTypeAndRep(Type* type, MachineType mach_type) {
738 FieldAccess access = {kUntaggedBase, 0, Handle<Name>::null(), type,
739 mach_type};
740 // TODO(titzer): using loads here just to force the representation is ugly.
741 Node* node = graph()->NewNode(simplified()->LoadField(access),
742 jsgraph.IntPtrConstant(0), graph()->start(),
743 graph()->start());
744 NodeProperties::SetBounds(node, Bounds(type));
745 return node;
746 }
747
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000748 Node* Use(Node* node, MachineType type) {
749 if (type & kTypeInt32) {
750 return graph()->NewNode(machine()->Int32LessThan(), node,
751 jsgraph.Int32Constant(1));
752 } else if (type & kTypeUint32) {
753 return graph()->NewNode(machine()->Uint32LessThan(), node,
754 jsgraph.Int32Constant(1));
755 } else if (type & kRepFloat64) {
756 return graph()->NewNode(machine()->Float64Add(), node,
757 jsgraph.Float64Constant(1));
758 } else if (type & kRepWord64) {
759 return graph()->NewNode(machine()->Int64LessThan(), node,
760 Int64Constant(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400761 } else if (type & kRepWord32) {
762 return graph()->NewNode(machine()->Word32Equal(), node,
763 jsgraph.Int32Constant(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000764 } else {
765 return graph()->NewNode(simplified()->ReferenceEqual(Type::Any()), node,
766 jsgraph.TrueConstant());
767 }
768 }
769
770 Node* Branch(Node* cond) {
771 Node* br = graph()->NewNode(common()->Branch(), cond, start);
772 Node* tb = graph()->NewNode(common()->IfTrue(), br);
773 Node* fb = graph()->NewNode(common()->IfFalse(), br);
774 Node* m = graph()->NewNode(common()->Merge(2), tb, fb);
775 NodeProperties::ReplaceControlInput(ret, m);
776 return br;
777 }
778
779 Node* Int64Constant(int64_t v) {
780 return graph()->NewNode(common()->Int64Constant(v));
781 }
782
783 SimplifiedOperatorBuilder* simplified() { return &main_simplified_; }
784 MachineOperatorBuilder* machine() { return &main_machine_; }
785 CommonOperatorBuilder* common() { return &main_common_; }
786 Graph* graph() { return main_graph_; }
787};
788
789
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400790TEST(LowerAnyToBoolean_bit_bit) {
791 // AnyToBoolean(x: kRepBit) used as kRepBit
792 HandleAndZoneScope scope;
793 Factory* f = scope.main_zone()->isolate()->factory();
794 Handle<Object> zero = f->NewNumber(0);
795 Handle<Object> one = f->NewNumber(1);
796 Type* singleton_zero = Type::Constant(zero, scope.main_zone());
797 Type* singleton_one = Type::Constant(one, scope.main_zone());
798 Type* zero_one_range = Type::Range(zero, one, scope.main_zone());
799 static Type* kTypes[] = {
800 singleton_zero, singleton_one, zero_one_range, Type::Boolean(),
801 Type::Union(Type::Boolean(), singleton_zero, scope.main_zone()),
802 Type::Union(Type::Boolean(), singleton_one, scope.main_zone()),
803 Type::Union(Type::Boolean(), zero_one_range, scope.main_zone())};
804 for (Type* type : kTypes) {
805 TestingGraph t(type);
806 Node* x = t.ExampleWithTypeAndRep(type, kRepBit);
807 Node* cnv = t.graph()->NewNode(t.simplified()->AnyToBoolean(), x);
808 Node* use = t.Branch(cnv);
809 t.Lower();
810 CHECK_EQ(x, use->InputAt(0));
811 }
812}
813
814
815#if V8_TURBOFAN_TARGET
816
817TEST(LowerAnyToBoolean_tagged_tagged) {
818 // AnyToBoolean(x: kRepTagged) used as kRepTagged
819 TestingGraph t(Type::Any());
820 Node* x = t.p0;
821 Node* cnv = t.graph()->NewNode(t.simplified()->AnyToBoolean(), x);
822 Node* use = t.Use(cnv, kRepTagged);
823 t.Return(use);
824 t.Lower();
825 CHECK_EQ(IrOpcode::kCall, cnv->opcode());
826 CHECK_EQ(IrOpcode::kHeapConstant, cnv->InputAt(0)->opcode());
827 CHECK_EQ(x, cnv->InputAt(1));
828 CHECK_EQ(t.jsgraph.NoContextConstant(), cnv->InputAt(2));
829}
830
831#endif
832
833
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834TEST(LowerBooleanNot_bit_bit) {
835 // BooleanNot(x: kRepBit) used as kRepBit
836 TestingGraph t(Type::Boolean());
837 Node* b = t.ExampleWithOutput(kRepBit);
838 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
839 Node* use = t.Branch(inv);
840 t.Lower();
841 Node* cmp = use->InputAt(0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400842 CHECK_EQ(t.machine()->Word32Equal()->opcode(), cmp->opcode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000843 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
844 Node* f = t.jsgraph.Int32Constant(0);
845 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
846}
847
848
849TEST(LowerBooleanNot_bit_tagged) {
850 // BooleanNot(x: kRepBit) used as kRepTagged
851 TestingGraph t(Type::Boolean());
852 Node* b = t.ExampleWithOutput(kRepBit);
853 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
854 Node* use = t.Use(inv, kRepTagged);
855 t.Return(use);
856 t.Lower();
857 CHECK_EQ(IrOpcode::kChangeBitToBool, use->InputAt(0)->opcode());
858 Node* cmp = use->InputAt(0)->InputAt(0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400859 CHECK_EQ(t.machine()->Word32Equal()->opcode(), cmp->opcode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000860 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
861 Node* f = t.jsgraph.Int32Constant(0);
862 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
863}
864
865
866TEST(LowerBooleanNot_tagged_bit) {
867 // BooleanNot(x: kRepTagged) used as kRepBit
868 TestingGraph t(Type::Boolean());
869 Node* b = t.p0;
870 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
871 Node* use = t.Branch(inv);
872 t.Lower();
873 Node* cmp = use->InputAt(0);
874 CHECK_EQ(t.machine()->WordEqual()->opcode(), cmp->opcode());
875 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
876 Node* f = t.jsgraph.FalseConstant();
877 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
878}
879
880
881TEST(LowerBooleanNot_tagged_tagged) {
882 // BooleanNot(x: kRepTagged) used as kRepTagged
883 TestingGraph t(Type::Boolean());
884 Node* b = t.p0;
885 Node* inv = t.graph()->NewNode(t.simplified()->BooleanNot(), b);
886 Node* use = t.Use(inv, kRepTagged);
887 t.Return(use);
888 t.Lower();
889 CHECK_EQ(IrOpcode::kChangeBitToBool, use->InputAt(0)->opcode());
890 Node* cmp = use->InputAt(0)->InputAt(0);
891 CHECK_EQ(t.machine()->WordEqual()->opcode(), cmp->opcode());
892 CHECK(b == cmp->InputAt(0) || b == cmp->InputAt(1));
893 Node* f = t.jsgraph.FalseConstant();
894 CHECK(f == cmp->InputAt(0) || f == cmp->InputAt(1));
895}
896
897
898TEST(LowerBooleanToNumber_bit_int32) {
899 // BooleanToNumber(x: kRepBit) used as kMachInt32
900 TestingGraph t(Type::Boolean());
901 Node* b = t.ExampleWithOutput(kRepBit);
902 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
903 Node* use = t.Use(cnv, kMachInt32);
904 t.Return(use);
905 t.Lower();
906 CHECK_EQ(b, use->InputAt(0));
907}
908
909
910TEST(LowerBooleanToNumber_tagged_int32) {
911 // BooleanToNumber(x: kRepTagged) used as kMachInt32
912 TestingGraph t(Type::Boolean());
913 Node* b = t.p0;
914 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
915 Node* use = t.Use(cnv, kMachInt32);
916 t.Return(use);
917 t.Lower();
918 CHECK_EQ(t.machine()->WordEqual()->opcode(), cnv->opcode());
919 CHECK(b == cnv->InputAt(0) || b == cnv->InputAt(1));
920 Node* c = t.jsgraph.TrueConstant();
921 CHECK(c == cnv->InputAt(0) || c == cnv->InputAt(1));
922}
923
924
925TEST(LowerBooleanToNumber_bit_tagged) {
926 // BooleanToNumber(x: kRepBit) used as kMachAnyTagged
927 TestingGraph t(Type::Boolean());
928 Node* b = t.ExampleWithOutput(kRepBit);
929 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
930 Node* use = t.Use(cnv, kMachAnyTagged);
931 t.Return(use);
932 t.Lower();
933 CHECK_EQ(b, use->InputAt(0)->InputAt(0));
934 CHECK_EQ(IrOpcode::kChangeInt32ToTagged, use->InputAt(0)->opcode());
935}
936
937
938TEST(LowerBooleanToNumber_tagged_tagged) {
939 // BooleanToNumber(x: kRepTagged) used as kMachAnyTagged
940 TestingGraph t(Type::Boolean());
941 Node* b = t.p0;
942 Node* cnv = t.graph()->NewNode(t.simplified()->BooleanToNumber(), b);
943 Node* use = t.Use(cnv, kMachAnyTagged);
944 t.Return(use);
945 t.Lower();
946 CHECK_EQ(cnv, use->InputAt(0)->InputAt(0));
947 CHECK_EQ(IrOpcode::kChangeInt32ToTagged, use->InputAt(0)->opcode());
948 CHECK_EQ(t.machine()->WordEqual()->opcode(), cnv->opcode());
949 CHECK(b == cnv->InputAt(0) || b == cnv->InputAt(1));
950 Node* c = t.jsgraph.TrueConstant();
951 CHECK(c == cnv->InputAt(0) || c == cnv->InputAt(1));
952}
953
954
955static Type* test_types[] = {Type::Signed32(), Type::Unsigned32(),
956 Type::Number(), Type::Any()};
957
958
959TEST(LowerNumberCmp_to_int32) {
960 TestingGraph t(Type::Signed32(), Type::Signed32());
961
962 t.CheckLoweringBinop(IrOpcode::kWord32Equal, t.simplified()->NumberEqual());
963 t.CheckLoweringBinop(IrOpcode::kInt32LessThan,
964 t.simplified()->NumberLessThan());
965 t.CheckLoweringBinop(IrOpcode::kInt32LessThanOrEqual,
966 t.simplified()->NumberLessThanOrEqual());
967}
968
969
970TEST(LowerNumberCmp_to_uint32) {
971 TestingGraph t(Type::Unsigned32(), Type::Unsigned32());
972
973 t.CheckLoweringBinop(IrOpcode::kWord32Equal, t.simplified()->NumberEqual());
974 t.CheckLoweringBinop(IrOpcode::kUint32LessThan,
975 t.simplified()->NumberLessThan());
976 t.CheckLoweringBinop(IrOpcode::kUint32LessThanOrEqual,
977 t.simplified()->NumberLessThanOrEqual());
978}
979
980
981TEST(LowerNumberCmp_to_float64) {
982 static Type* types[] = {Type::Number(), Type::Any()};
983
984 for (size_t i = 0; i < arraysize(types); i++) {
985 TestingGraph t(types[i], types[i]);
986
987 t.CheckLoweringBinop(IrOpcode::kFloat64Equal,
988 t.simplified()->NumberEqual());
989 t.CheckLoweringBinop(IrOpcode::kFloat64LessThan,
990 t.simplified()->NumberLessThan());
991 t.CheckLoweringBinop(IrOpcode::kFloat64LessThanOrEqual,
992 t.simplified()->NumberLessThanOrEqual());
993 }
994}
995
996
997TEST(LowerNumberAddSub_to_int32) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400998 HandleAndZoneScope scope;
999 Factory* f = scope.main_zone()->isolate()->factory();
1000 Type* small_range =
1001 Type::Range(f->NewNumber(1), f->NewNumber(10), scope.main_zone());
1002 Type* large_range =
1003 Type::Range(f->NewNumber(-1e+13), f->NewNumber(1e+14), scope.main_zone());
1004 static Type* types[] = {Type::Signed32(), Type::Integral32(), small_range,
1005 large_range};
1006
1007 for (size_t i = 0; i < arraysize(types); i++) {
1008 for (size_t j = 0; j < arraysize(types); j++) {
1009 TestingGraph t(types[i], types[j]);
1010 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Add,
1011 t.simplified()->NumberAdd(),
1012 t.simplified()->NumberToInt32());
1013 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Sub,
1014 t.simplified()->NumberSubtract(),
1015 t.simplified()->NumberToInt32());
1016 }
1017 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001018}
1019
1020
1021TEST(LowerNumberAddSub_to_uint32) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001022 HandleAndZoneScope scope;
1023 Factory* f = scope.main_zone()->isolate()->factory();
1024 Type* small_range =
1025 Type::Range(f->NewNumber(1), f->NewNumber(10), scope.main_zone());
1026 Type* large_range =
1027 Type::Range(f->NewNumber(-1e+13), f->NewNumber(1e+14), scope.main_zone());
1028 static Type* types[] = {Type::Signed32(), Type::Integral32(), small_range,
1029 large_range};
1030
1031 for (size_t i = 0; i < arraysize(types); i++) {
1032 for (size_t j = 0; j < arraysize(types); j++) {
1033 TestingGraph t(types[i], types[j]);
1034 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Add,
1035 t.simplified()->NumberAdd(),
1036 t.simplified()->NumberToUint32());
1037 t.CheckLoweringTruncatedBinop(IrOpcode::kInt32Sub,
1038 t.simplified()->NumberSubtract(),
1039 t.simplified()->NumberToUint32());
1040 }
1041 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001042}
1043
1044
1045TEST(LowerNumberAddSub_to_float64) {
1046 for (size_t i = 0; i < arraysize(test_types); i++) {
1047 TestingGraph t(test_types[i], test_types[i]);
1048
1049 t.CheckLoweringBinop(IrOpcode::kFloat64Add, t.simplified()->NumberAdd());
1050 t.CheckLoweringBinop(IrOpcode::kFloat64Sub,
1051 t.simplified()->NumberSubtract());
1052 }
1053}
1054
1055
1056TEST(LowerNumberDivMod_to_float64) {
1057 for (size_t i = 0; i < arraysize(test_types); i++) {
1058 TestingGraph t(test_types[i], test_types[i]);
1059
1060 t.CheckLoweringBinop(IrOpcode::kFloat64Div, t.simplified()->NumberDivide());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001061 if (!test_types[i]->Is(Type::Unsigned32())) {
1062 t.CheckLoweringBinop(IrOpcode::kFloat64Mod,
1063 t.simplified()->NumberModulus());
1064 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001065 }
1066}
1067
1068
1069static void CheckChangeOf(IrOpcode::Value change, Node* of, Node* node) {
1070 CHECK_EQ(change, node->opcode());
1071 CHECK_EQ(of, node->InputAt(0));
1072}
1073
1074
1075TEST(LowerNumberToInt32_to_nop) {
1076 // NumberToInt32(x: kRepTagged | kTypeInt32) used as kRepTagged
1077 TestingGraph t(Type::Signed32());
1078 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
1079 Node* use = t.Use(trunc, kRepTagged);
1080 t.Return(use);
1081 t.Lower();
1082 CHECK_EQ(t.p0, use->InputAt(0));
1083}
1084
1085
1086TEST(LowerNumberToInt32_to_ChangeTaggedToFloat64) {
1087 // NumberToInt32(x: kRepTagged | kTypeInt32) used as kRepFloat64
1088 TestingGraph t(Type::Signed32());
1089 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
1090 Node* use = t.Use(trunc, kRepFloat64);
1091 t.Return(use);
1092 t.Lower();
1093 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p0, use->InputAt(0));
1094}
1095
1096
1097TEST(LowerNumberToInt32_to_ChangeTaggedToInt32) {
1098 // NumberToInt32(x: kRepTagged | kTypeInt32) used as kRepWord32
1099 TestingGraph t(Type::Signed32());
1100 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
1101 Node* use = t.Use(trunc, kTypeInt32);
1102 t.Return(use);
1103 t.Lower();
1104 CheckChangeOf(IrOpcode::kChangeTaggedToInt32, t.p0, use->InputAt(0));
1105}
1106
1107
1108TEST(LowerNumberToInt32_to_TruncateFloat64ToInt32) {
1109 // NumberToInt32(x: kRepFloat64) used as kMachInt32
1110 TestingGraph t(Type::Number());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001111 Node* p0 = t.ExampleWithTypeAndRep(Type::Number(), kMachFloat64);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001112 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), p0);
1113 Node* use = t.Use(trunc, kMachInt32);
1114 t.Return(use);
1115 t.Lower();
1116 CheckChangeOf(IrOpcode::kTruncateFloat64ToInt32, p0, use->InputAt(0));
1117}
1118
1119
1120TEST(LowerNumberToInt32_to_TruncateFloat64ToInt32_with_change) {
1121 // NumberToInt32(x: kTypeNumber | kRepTagged) used as kMachInt32
1122 TestingGraph t(Type::Number());
1123 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), t.p0);
1124 Node* use = t.Use(trunc, kMachInt32);
1125 t.Return(use);
1126 t.Lower();
1127 Node* node = use->InputAt(0);
1128 CHECK_EQ(IrOpcode::kTruncateFloat64ToInt32, node->opcode());
1129 Node* of = node->InputAt(0);
1130 CHECK_EQ(IrOpcode::kChangeTaggedToFloat64, of->opcode());
1131 CHECK_EQ(t.p0, of->InputAt(0));
1132}
1133
1134
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135TEST(LowerNumberToUint32_to_nop) {
1136 // NumberToUint32(x: kRepTagged | kTypeUint32) used as kRepTagged
1137 TestingGraph t(Type::Unsigned32());
1138 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1139 Node* use = t.Use(trunc, kRepTagged);
1140 t.Return(use);
1141 t.Lower();
1142 CHECK_EQ(t.p0, use->InputAt(0));
1143}
1144
1145
1146TEST(LowerNumberToUint32_to_ChangeTaggedToFloat64) {
1147 // NumberToUint32(x: kRepTagged | kTypeUint32) used as kRepWord32
1148 TestingGraph t(Type::Unsigned32());
1149 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1150 Node* use = t.Use(trunc, kRepFloat64);
1151 t.Return(use);
1152 t.Lower();
1153 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p0, use->InputAt(0));
1154}
1155
1156
1157TEST(LowerNumberToUint32_to_ChangeTaggedToUint32) {
1158 // NumberToUint32(x: kRepTagged | kTypeUint32) used as kRepWord32
1159 TestingGraph t(Type::Unsigned32());
1160 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1161 Node* use = t.Use(trunc, kTypeUint32);
1162 t.Return(use);
1163 t.Lower();
1164 CheckChangeOf(IrOpcode::kChangeTaggedToUint32, t.p0, use->InputAt(0));
1165}
1166
1167
1168TEST(LowerNumberToUint32_to_TruncateFloat64ToInt32) {
1169 // NumberToUint32(x: kRepFloat64) used as kMachUint32
1170 TestingGraph t(Type::Number());
1171 Node* p0 = t.ExampleWithOutput(kMachFloat64);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001172 // TODO(titzer): run the typer here, or attach machine type to param.
1173 NodeProperties::SetBounds(p0, Bounds(Type::Number()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001174 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), p0);
1175 Node* use = t.Use(trunc, kMachUint32);
1176 t.Return(use);
1177 t.Lower();
1178 CheckChangeOf(IrOpcode::kTruncateFloat64ToInt32, p0, use->InputAt(0));
1179}
1180
1181
1182TEST(LowerNumberToUint32_to_TruncateFloat64ToInt32_with_change) {
1183 // NumberToInt32(x: kTypeNumber | kRepTagged) used as kMachUint32
1184 TestingGraph t(Type::Number());
1185 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), t.p0);
1186 Node* use = t.Use(trunc, kMachUint32);
1187 t.Return(use);
1188 t.Lower();
1189 Node* node = use->InputAt(0);
1190 CHECK_EQ(IrOpcode::kTruncateFloat64ToInt32, node->opcode());
1191 Node* of = node->InputAt(0);
1192 CHECK_EQ(IrOpcode::kChangeTaggedToFloat64, of->opcode());
1193 CHECK_EQ(t.p0, of->InputAt(0));
1194}
1195
1196
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001197TEST(LowerNumberToUint32_to_TruncateFloat64ToInt32_uint32) {
1198 // NumberToUint32(x: kRepFloat64) used as kRepWord32
1199 TestingGraph t(Type::Unsigned32());
1200 Node* input = t.ExampleWithTypeAndRep(Type::Number(), kMachFloat64);
1201 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), input);
1202 Node* use = t.Use(trunc, kRepWord32);
1203 t.Return(use);
1204 t.Lower();
1205 CheckChangeOf(IrOpcode::kTruncateFloat64ToInt32, input, use->InputAt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001206}
1207
1208
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001209TEST(LowerNumberToUI32_of_Float64_used_as_word32) {
1210 // NumberTo(Int,Uint)32(x: kRepFloat64 | kType(Int,Uint)32) used as
1211 // kType(Int,Uint)32 | kRepWord32
1212 Type* types[] = {Type::Signed32(), Type::Unsigned32()};
1213 MachineType mach[] = {kTypeInt32, kTypeUint32, kMachNone};
1214
1215 for (int i = 0; i < 2; i++) {
1216 for (int u = 0; u < 3; u++) {
1217 TestingGraph t(types[i]);
1218 Node* input = t.ExampleWithTypeAndRep(
1219 types[i], static_cast<MachineType>(kRepFloat64 | mach[i]));
1220 const Operator* op = i == 0 ? t.simplified()->NumberToInt32()
1221 : t.simplified()->NumberToUint32();
1222 Node* trunc = t.graph()->NewNode(op, input);
1223 Node* use = t.Use(trunc, static_cast<MachineType>(kRepWord32 | mach[u]));
1224 t.Return(use);
1225 t.Lower();
1226 IrOpcode::Value opcode = i == 0 ? IrOpcode::kChangeFloat64ToInt32
1227 : IrOpcode::kChangeFloat64ToUint32;
1228 CheckChangeOf(opcode, input, use->InputAt(0));
1229 }
1230 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001231}
1232
1233
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001234TEST(LowerNumberToUI32_of_Float64_used_as_tagged) {
1235 // NumberTo(Int,Uint)32(x: kRepFloat64 | kType(Int,Uint)32) used as
1236 // kType(Int,Uint)32 | kRepTagged
1237 Type* types[] = {Type::Signed32(), Type::Unsigned32(), Type::Any()};
1238 MachineType mach[] = {kTypeInt32, kTypeUint32, kMachNone};
1239
1240 for (int i = 0; i < 2; i++) {
1241 for (int u = 0; u < 3; u++) {
1242 TestingGraph t(types[i]);
1243 Node* input = t.ExampleWithTypeAndRep(
1244 types[i], static_cast<MachineType>(kRepFloat64 | mach[i]));
1245 const Operator* op = i == 0 ? t.simplified()->NumberToInt32()
1246 : t.simplified()->NumberToUint32();
1247 Node* trunc = t.graph()->NewNode(op, input);
1248 // TODO(titzer): we use the store here to force the representation.
1249 FieldAccess access = {kTaggedBase, 0, Handle<Name>(), types[u],
1250 static_cast<MachineType>(mach[u] | kRepTagged)};
1251 Node* store = t.graph()->NewNode(t.simplified()->StoreField(access), t.p0,
1252 trunc, t.start, t.start);
1253 t.Effect(store);
1254 t.Lower();
1255 CheckChangeOf(IrOpcode::kChangeFloat64ToTagged, input, store->InputAt(2));
1256 }
1257 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258}
1259
1260
1261TEST(LowerReferenceEqual_to_wordeq) {
1262 TestingGraph t(Type::Any(), Type::Any());
1263 IrOpcode::Value opcode =
1264 static_cast<IrOpcode::Value>(t.machine()->WordEqual()->opcode());
1265 t.CheckLoweringBinop(opcode, t.simplified()->ReferenceEqual(Type::Any()));
1266}
1267
1268
1269TEST(LowerStringOps_to_call_and_compare) {
1270 if (Pipeline::SupportedTarget()) {
1271 // These tests need linkage for the calls.
1272 TestingGraph t(Type::String(), Type::String());
1273 IrOpcode::Value compare_eq =
1274 static_cast<IrOpcode::Value>(t.machine()->WordEqual()->opcode());
1275 IrOpcode::Value compare_lt =
1276 static_cast<IrOpcode::Value>(t.machine()->IntLessThan()->opcode());
1277 IrOpcode::Value compare_le = static_cast<IrOpcode::Value>(
1278 t.machine()->IntLessThanOrEqual()->opcode());
1279 t.CheckLoweringBinop(compare_eq, t.simplified()->StringEqual());
1280 t.CheckLoweringBinop(compare_lt, t.simplified()->StringLessThan());
1281 t.CheckLoweringBinop(compare_le, t.simplified()->StringLessThanOrEqual());
1282 t.CheckLoweringBinop(IrOpcode::kCall, t.simplified()->StringAdd());
1283 }
1284}
1285
1286
1287void CheckChangeInsertion(IrOpcode::Value expected, MachineType from,
1288 MachineType to) {
1289 TestingGraph t(Type::Any());
1290 Node* in = t.ExampleWithOutput(from);
1291 Node* use = t.Use(in, to);
1292 t.Return(use);
1293 t.Lower();
1294 CHECK_EQ(expected, use->InputAt(0)->opcode());
1295 CHECK_EQ(in, use->InputAt(0)->InputAt(0));
1296}
1297
1298
1299TEST(InsertBasicChanges) {
1300 CheckChangeInsertion(IrOpcode::kChangeFloat64ToInt32, kRepFloat64,
1301 kTypeInt32);
1302 CheckChangeInsertion(IrOpcode::kChangeFloat64ToUint32, kRepFloat64,
1303 kTypeUint32);
1304 CheckChangeInsertion(IrOpcode::kChangeTaggedToInt32, kRepTagged, kTypeInt32);
1305 CheckChangeInsertion(IrOpcode::kChangeTaggedToUint32, kRepTagged,
1306 kTypeUint32);
1307
1308 CheckChangeInsertion(IrOpcode::kChangeFloat64ToTagged, kRepFloat64,
1309 kRepTagged);
1310 CheckChangeInsertion(IrOpcode::kChangeTaggedToFloat64, kRepTagged,
1311 kRepFloat64);
1312
1313 CheckChangeInsertion(IrOpcode::kChangeInt32ToFloat64, kTypeInt32,
1314 kRepFloat64);
1315 CheckChangeInsertion(IrOpcode::kChangeInt32ToTagged, kTypeInt32, kRepTagged);
1316
1317 CheckChangeInsertion(IrOpcode::kChangeUint32ToFloat64, kTypeUint32,
1318 kRepFloat64);
1319 CheckChangeInsertion(IrOpcode::kChangeUint32ToTagged, kTypeUint32,
1320 kRepTagged);
1321}
1322
1323
1324static void CheckChangesAroundBinop(TestingGraph* t, const Operator* op,
1325 IrOpcode::Value input_change,
1326 IrOpcode::Value output_change) {
1327 Node* binop = t->graph()->NewNode(op, t->p0, t->p1);
1328 t->Return(binop);
1329 t->Lower();
1330 CHECK_EQ(input_change, binop->InputAt(0)->opcode());
1331 CHECK_EQ(input_change, binop->InputAt(1)->opcode());
1332 CHECK_EQ(t->p0, binop->InputAt(0)->InputAt(0));
1333 CHECK_EQ(t->p1, binop->InputAt(1)->InputAt(0));
1334 CHECK_EQ(output_change, t->ret->InputAt(0)->opcode());
1335 CHECK_EQ(binop, t->ret->InputAt(0)->InputAt(0));
1336}
1337
1338
1339TEST(InsertChangesAroundInt32Binops) {
1340 TestingGraph t(Type::Signed32(), Type::Signed32());
1341
1342 const Operator* ops[] = {t.machine()->Int32Add(), t.machine()->Int32Sub(),
1343 t.machine()->Int32Mul(), t.machine()->Int32Div(),
1344 t.machine()->Int32Mod(), t.machine()->Word32And(),
1345 t.machine()->Word32Or(), t.machine()->Word32Xor(),
1346 t.machine()->Word32Shl(), t.machine()->Word32Sar()};
1347
1348 for (size_t i = 0; i < arraysize(ops); i++) {
1349 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToInt32,
1350 IrOpcode::kChangeInt32ToTagged);
1351 }
1352}
1353
1354
1355TEST(InsertChangesAroundInt32Cmp) {
1356 TestingGraph t(Type::Signed32(), Type::Signed32());
1357
1358 const Operator* ops[] = {t.machine()->Int32LessThan(),
1359 t.machine()->Int32LessThanOrEqual()};
1360
1361 for (size_t i = 0; i < arraysize(ops); i++) {
1362 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToInt32,
1363 IrOpcode::kChangeBitToBool);
1364 }
1365}
1366
1367
1368TEST(InsertChangesAroundUint32Cmp) {
1369 TestingGraph t(Type::Unsigned32(), Type::Unsigned32());
1370
1371 const Operator* ops[] = {t.machine()->Uint32LessThan(),
1372 t.machine()->Uint32LessThanOrEqual()};
1373
1374 for (size_t i = 0; i < arraysize(ops); i++) {
1375 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToUint32,
1376 IrOpcode::kChangeBitToBool);
1377 }
1378}
1379
1380
1381TEST(InsertChangesAroundFloat64Binops) {
1382 TestingGraph t(Type::Number(), Type::Number());
1383
1384 const Operator* ops[] = {
1385 t.machine()->Float64Add(), t.machine()->Float64Sub(),
1386 t.machine()->Float64Mul(), t.machine()->Float64Div(),
1387 t.machine()->Float64Mod(),
1388 };
1389
1390 for (size_t i = 0; i < arraysize(ops); i++) {
1391 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToFloat64,
1392 IrOpcode::kChangeFloat64ToTagged);
1393 }
1394}
1395
1396
1397TEST(InsertChangesAroundFloat64Cmp) {
1398 TestingGraph t(Type::Number(), Type::Number());
1399
1400 const Operator* ops[] = {t.machine()->Float64Equal(),
1401 t.machine()->Float64LessThan(),
1402 t.machine()->Float64LessThanOrEqual()};
1403
1404 for (size_t i = 0; i < arraysize(ops); i++) {
1405 CheckChangesAroundBinop(&t, ops[i], IrOpcode::kChangeTaggedToFloat64,
1406 IrOpcode::kChangeBitToBool);
1407 }
1408}
1409
1410
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001411namespace {
1412
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001413void CheckFieldAccessArithmetic(FieldAccess access, Node* load_or_store) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001414 IntPtrMatcher mindex(load_or_store->InputAt(1));
1415 CHECK(mindex.Is(access.offset - access.tag()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001416}
1417
1418
1419Node* CheckElementAccessArithmetic(ElementAccess access, Node* load_or_store) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001420 Node* index = load_or_store->InputAt(1);
1421 if (kPointerSize == 8) {
1422 CHECK_EQ(IrOpcode::kChangeUint32ToUint64, index->opcode());
1423 index = index->InputAt(0);
1424 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001425
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001426 Int32BinopMatcher mindex(index);
1427 CHECK_EQ(IrOpcode::kInt32Add, mindex.node()->opcode());
1428 CHECK(mindex.right().Is(access.header_size - access.tag()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001429
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001430 const int element_size_shift = ElementSizeLog2Of(access.machine_type);
1431 if (element_size_shift) {
1432 Int32BinopMatcher shl(mindex.left().node());
1433 CHECK_EQ(IrOpcode::kWord32Shl, shl.node()->opcode());
1434 CHECK(shl.right().Is(element_size_shift));
1435 return shl.left().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001436 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001437 return mindex.left().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438 }
1439}
1440
1441
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001442const MachineType kMachineReps[] = {kRepBit, kMachInt8, kMachInt16,
1443 kMachInt32, kMachInt64, kMachFloat64,
1444 kMachAnyTagged};
1445
1446} // namespace
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001447
1448
1449TEST(LowerLoadField_to_load) {
1450 TestingGraph t(Type::Any(), Type::Signed32());
1451
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001452 for (size_t i = 0; i < arraysize(kMachineReps); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001453 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001454 Handle<Name>::null(), Type::Any(), kMachineReps[i]};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001455
1456 Node* load =
1457 t.graph()->NewNode(t.simplified()->LoadField(access), t.p0, t.start);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001458 Node* use = t.Use(load, kMachineReps[i]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001459 t.Return(use);
1460 t.Lower();
1461 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1462 CHECK_EQ(t.p0, load->InputAt(0));
1463 CheckFieldAccessArithmetic(access, load);
1464
1465 MachineType rep = OpParameter<MachineType>(load);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001466 CHECK_EQ(kMachineReps[i], rep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001467 }
1468}
1469
1470
1471TEST(LowerStoreField_to_store) {
1472 TestingGraph t(Type::Any(), Type::Signed32());
1473
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001474 for (size_t i = 0; i < arraysize(kMachineReps); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001475 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001476 Handle<Name>::null(), Type::Any(), kMachineReps[i]};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001477
1478
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001479 Node* val = t.ExampleWithOutput(kMachineReps[i]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001480 Node* store = t.graph()->NewNode(t.simplified()->StoreField(access), t.p0,
1481 val, t.start, t.start);
1482 t.Effect(store);
1483 t.Lower();
1484 CHECK_EQ(IrOpcode::kStore, store->opcode());
1485 CHECK_EQ(val, store->InputAt(2));
1486 CheckFieldAccessArithmetic(access, store);
1487
1488 StoreRepresentation rep = OpParameter<StoreRepresentation>(store);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001489 if (kMachineReps[i] & kRepTagged) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001490 CHECK_EQ(kFullWriteBarrier, rep.write_barrier_kind());
1491 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001492 CHECK_EQ(kMachineReps[i], rep.machine_type());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001493 }
1494}
1495
1496
1497TEST(LowerLoadElement_to_load) {
1498 TestingGraph t(Type::Any(), Type::Signed32());
1499
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001500 for (size_t i = 0; i < arraysize(kMachineReps); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001501 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001502 Type::Any(), kMachineReps[i]};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001504 Node* load = t.graph()->NewNode(t.simplified()->LoadElement(access), t.p0,
1505 t.p1, t.start, t.start);
1506 Node* use = t.Use(load, kMachineReps[i]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001507 t.Return(use);
1508 t.Lower();
1509 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1510 CHECK_EQ(t.p0, load->InputAt(0));
1511 CheckElementAccessArithmetic(access, load);
1512
1513 MachineType rep = OpParameter<MachineType>(load);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001514 CHECK_EQ(kMachineReps[i], rep);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515 }
1516}
1517
1518
1519TEST(LowerStoreElement_to_store) {
1520 TestingGraph t(Type::Any(), Type::Signed32());
1521
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001522 for (size_t i = 0; i < arraysize(kMachineReps); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001524 Type::Any(), kMachineReps[i]};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001525
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001526 Node* val = t.ExampleWithOutput(kMachineReps[i]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001527 Node* store = t.graph()->NewNode(t.simplified()->StoreElement(access), t.p0,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001528 t.p1, val, t.start, t.start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001529 t.Effect(store);
1530 t.Lower();
1531 CHECK_EQ(IrOpcode::kStore, store->opcode());
1532 CHECK_EQ(val, store->InputAt(2));
1533 CheckElementAccessArithmetic(access, store);
1534
1535 StoreRepresentation rep = OpParameter<StoreRepresentation>(store);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001536 if (kMachineReps[i] & kRepTagged) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001537 CHECK_EQ(kFullWriteBarrier, rep.write_barrier_kind());
1538 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001539 CHECK_EQ(kMachineReps[i], rep.machine_type());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001540 }
1541}
1542
1543
1544TEST(InsertChangeForLoadElementIndex) {
1545 // LoadElement(obj: Tagged, index: kTypeInt32 | kRepTagged, length) =>
1546 // Load(obj, Int32Add(Int32Mul(ChangeTaggedToInt32(index), #k), #k))
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001547 TestingGraph t(Type::Any(), Type::Signed32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001548 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1549 kMachAnyTagged};
1550
1551 Node* load = t.graph()->NewNode(t.simplified()->LoadElement(access), t.p0,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001552 t.p1, t.start, t.start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001553 t.Return(load);
1554 t.Lower();
1555 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1556 CHECK_EQ(t.p0, load->InputAt(0));
1557
1558 Node* index = CheckElementAccessArithmetic(access, load);
1559 CheckChangeOf(IrOpcode::kChangeTaggedToInt32, t.p1, index);
1560}
1561
1562
1563TEST(InsertChangeForStoreElementIndex) {
1564 // StoreElement(obj: Tagged, index: kTypeInt32 | kRepTagged, length, val) =>
1565 // Store(obj, Int32Add(Int32Mul(ChangeTaggedToInt32(index), #k), #k), val)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001566 TestingGraph t(Type::Any(), Type::Signed32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001567 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1568 kMachAnyTagged};
1569
1570 Node* store =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001571 t.graph()->NewNode(t.simplified()->StoreElement(access), t.p0, t.p1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001572 t.jsgraph.TrueConstant(), t.start, t.start);
1573 t.Effect(store);
1574 t.Lower();
1575 CHECK_EQ(IrOpcode::kStore, store->opcode());
1576 CHECK_EQ(t.p0, store->InputAt(0));
1577
1578 Node* index = CheckElementAccessArithmetic(access, store);
1579 CheckChangeOf(IrOpcode::kChangeTaggedToInt32, t.p1, index);
1580}
1581
1582
1583TEST(InsertChangeForLoadElement) {
1584 // TODO(titzer): test all load/store representation change insertions.
1585 TestingGraph t(Type::Any(), Type::Signed32(), Type::Any());
1586 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1587 kMachFloat64};
1588
1589 Node* load = t.graph()->NewNode(t.simplified()->LoadElement(access), t.p0,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001590 t.p1, t.start, t.start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001591 t.Return(load);
1592 t.Lower();
1593 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1594 CHECK_EQ(t.p0, load->InputAt(0));
1595 CheckChangeOf(IrOpcode::kChangeFloat64ToTagged, load, t.ret->InputAt(0));
1596}
1597
1598
1599TEST(InsertChangeForLoadField) {
1600 // TODO(titzer): test all load/store representation change insertions.
1601 TestingGraph t(Type::Any(), Type::Signed32());
1602 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1603 Handle<Name>::null(), Type::Any(), kMachFloat64};
1604
1605 Node* load =
1606 t.graph()->NewNode(t.simplified()->LoadField(access), t.p0, t.start);
1607 t.Return(load);
1608 t.Lower();
1609 CHECK_EQ(IrOpcode::kLoad, load->opcode());
1610 CHECK_EQ(t.p0, load->InputAt(0));
1611 CheckChangeOf(IrOpcode::kChangeFloat64ToTagged, load, t.ret->InputAt(0));
1612}
1613
1614
1615TEST(InsertChangeForStoreElement) {
1616 // TODO(titzer): test all load/store representation change insertions.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001617 TestingGraph t(Type::Any(), Type::Signed32());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 ElementAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize, Type::Any(),
1619 kMachFloat64};
1620
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001621 Node* store =
1622 t.graph()->NewNode(t.simplified()->StoreElement(access), t.p0,
1623 t.jsgraph.Int32Constant(0), t.p1, t.start, t.start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001624 t.Effect(store);
1625 t.Lower();
1626
1627 CHECK_EQ(IrOpcode::kStore, store->opcode());
1628 CHECK_EQ(t.p0, store->InputAt(0));
1629 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p1, store->InputAt(2));
1630}
1631
1632
1633TEST(InsertChangeForStoreField) {
1634 // TODO(titzer): test all load/store representation change insertions.
1635 TestingGraph t(Type::Any(), Type::Signed32());
1636 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
1637 Handle<Name>::null(), Type::Any(), kMachFloat64};
1638
1639 Node* store = t.graph()->NewNode(t.simplified()->StoreField(access), t.p0,
1640 t.p1, t.start, t.start);
1641 t.Effect(store);
1642 t.Lower();
1643
1644 CHECK_EQ(IrOpcode::kStore, store->opcode());
1645 CHECK_EQ(t.p0, store->InputAt(0));
1646 CheckChangeOf(IrOpcode::kChangeTaggedToFloat64, t.p1, store->InputAt(2));
1647}
1648
1649
1650TEST(UpdatePhi) {
1651 TestingGraph t(Type::Any(), Type::Signed32());
1652 static const MachineType kMachineTypes[] = {kMachInt32, kMachUint32,
1653 kMachFloat64};
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001654 Type* kTypes[] = {Type::Signed32(), Type::Unsigned32(), Type::Number()};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001655
1656 for (size_t i = 0; i < arraysize(kMachineTypes); i++) {
1657 FieldAccess access = {kTaggedBase, FixedArrayBase::kHeaderSize,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001658 Handle<Name>::null(), kTypes[i], kMachineTypes[i]};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001659
1660 Node* load0 =
1661 t.graph()->NewNode(t.simplified()->LoadField(access), t.p0, t.start);
1662 Node* load1 =
1663 t.graph()->NewNode(t.simplified()->LoadField(access), t.p1, t.start);
1664 Node* phi = t.graph()->NewNode(t.common()->Phi(kMachAnyTagged, 2), load0,
1665 load1, t.start);
1666 t.Return(t.Use(phi, kMachineTypes[i]));
1667 t.Lower();
1668
1669 CHECK_EQ(IrOpcode::kPhi, phi->opcode());
1670 CHECK_EQ(RepresentationOf(kMachineTypes[i]),
1671 RepresentationOf(OpParameter<MachineType>(phi)));
1672 }
1673}
1674
1675
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001676TEST(RunNumberDivide_minus_1_TruncatingToInt32) {
1677 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1678 Node* num = t.NumberToInt32(t.Parameter(0));
1679 Node* div = t.NumberDivide(num, t.jsgraph.Constant(-1));
1680 Node* trunc = t.NumberToInt32(div);
1681 t.Return(trunc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001683 if (Pipeline::SupportedTarget()) {
1684 t.LowerAllNodesAndLowerChanges();
1685 t.GenerateCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001687 FOR_INT32_INPUTS(i) {
1688 int32_t x = 0 - *i;
1689 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1690 }
1691 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001692}
1693
1694
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001695TEST(NumberMultiply_TruncatingToInt32) {
1696 int32_t constants[] = {-100, -10, -1, 0, 1, 100, 1000};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001697
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001698 for (size_t i = 0; i < arraysize(constants); i++) {
1699 TestingGraph t(Type::Signed32());
1700 Node* k = t.jsgraph.Constant(constants[i]);
1701 Node* mul = t.graph()->NewNode(t.simplified()->NumberMultiply(), t.p0, k);
1702 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), mul);
1703 t.Return(trunc);
1704 t.Lower();
1705
1706 CHECK_EQ(IrOpcode::kInt32Mul, mul->opcode());
1707 }
1708}
1709
1710
1711TEST(RunNumberMultiply_TruncatingToInt32) {
1712 int32_t constants[] = {-100, -10, -1, 0, 1, 100, 1000, 3000999};
1713
1714 for (size_t i = 0; i < arraysize(constants); i++) {
1715 double k = static_cast<double>(constants[i]);
1716 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1717 Node* num = t.NumberToInt32(t.Parameter(0));
1718 Node* mul = t.NumberMultiply(num, t.jsgraph.Constant(k));
1719 Node* trunc = t.NumberToInt32(mul);
1720 t.Return(trunc);
1721
1722 if (Pipeline::SupportedTarget()) {
1723 t.LowerAllNodesAndLowerChanges();
1724 t.GenerateCode();
1725
1726 FOR_INT32_INPUTS(i) {
1727 int32_t x = DoubleToInt32(static_cast<double>(*i) * k);
1728 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1729 }
1730 }
1731 }
1732}
1733
1734
1735TEST(RunNumberMultiply_TruncatingToUint32) {
1736 uint32_t constants[] = {0, 1, 2, 3, 4, 100, 1000, 1024, 2048, 3000999};
1737
1738 for (size_t i = 0; i < arraysize(constants); i++) {
1739 double k = static_cast<double>(constants[i]);
1740 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1741 Node* num = t.NumberToUint32(t.Parameter(0));
1742 Node* mul = t.NumberMultiply(num, t.jsgraph.Constant(k));
1743 Node* trunc = t.NumberToUint32(mul);
1744 t.Return(trunc);
1745
1746 if (Pipeline::SupportedTarget()) {
1747 t.LowerAllNodesAndLowerChanges();
1748 t.GenerateCode();
1749
1750 FOR_UINT32_INPUTS(i) {
1751 uint32_t x = DoubleToUint32(static_cast<double>(*i) * k);
1752 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1753 }
1754 }
1755 }
1756}
1757
1758
1759TEST(RunNumberDivide_2_TruncatingToUint32) {
1760 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1761 Node* num = t.NumberToUint32(t.Parameter(0));
1762 Node* div = t.NumberDivide(num, t.jsgraph.Constant(2));
1763 Node* trunc = t.NumberToUint32(div);
1764 t.Return(trunc);
1765
1766 if (Pipeline::SupportedTarget()) {
1767 t.LowerAllNodesAndLowerChanges();
1768 t.GenerateCode();
1769
1770 FOR_UINT32_INPUTS(i) {
1771 uint32_t x = DoubleToUint32(static_cast<double>(*i / 2.0));
1772 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1773 }
1774 }
1775}
1776
1777
1778TEST(NumberMultiply_ConstantOutOfRange) {
1779 TestingGraph t(Type::Signed32());
1780 Node* k = t.jsgraph.Constant(1000000023);
1781 Node* mul = t.graph()->NewNode(t.simplified()->NumberMultiply(), t.p0, k);
1782 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToInt32(), mul);
1783 t.Return(trunc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784 t.Lower();
1785
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001786 CHECK_EQ(IrOpcode::kFloat64Mul, mul->opcode());
1787}
1788
1789
1790TEST(NumberMultiply_NonTruncating) {
1791 TestingGraph t(Type::Signed32());
1792 Node* k = t.jsgraph.Constant(111);
1793 Node* mul = t.graph()->NewNode(t.simplified()->NumberMultiply(), t.p0, k);
1794 t.Return(mul);
1795 t.Lower();
1796
1797 CHECK_EQ(IrOpcode::kFloat64Mul, mul->opcode());
1798}
1799
1800
1801TEST(NumberDivide_TruncatingToInt32) {
1802 int32_t constants[] = {-100, -10, 1, 4, 100, 1000};
1803
1804 for (size_t i = 0; i < arraysize(constants); i++) {
1805 TestingGraph t(Type::Signed32());
1806 Node* k = t.jsgraph.Constant(constants[i]);
1807 Node* div = t.graph()->NewNode(t.simplified()->NumberDivide(), t.p0, k);
1808 Node* use = t.Use(div, kMachInt32);
1809 t.Return(use);
1810 t.Lower();
1811
1812 CHECK_EQ(IrOpcode::kInt32Div, use->InputAt(0)->opcode());
1813 }
1814}
1815
1816
1817TEST(RunNumberDivide_TruncatingToInt32) {
1818 int32_t constants[] = {-100, -10, -1, 1, 2, 100, 1000, 1024, 2048};
1819
1820 for (size_t i = 0; i < arraysize(constants); i++) {
1821 int32_t k = constants[i];
1822 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1823 Node* num = t.NumberToInt32(t.Parameter(0));
1824 Node* div = t.NumberDivide(num, t.jsgraph.Constant(k));
1825 Node* trunc = t.NumberToInt32(div);
1826 t.Return(trunc);
1827
1828 if (Pipeline::SupportedTarget()) {
1829 t.LowerAllNodesAndLowerChanges();
1830 t.GenerateCode();
1831
1832 FOR_INT32_INPUTS(i) {
1833 if (*i == INT_MAX) continue; // exclude max int.
1834 int32_t x = DoubleToInt32(static_cast<double>(*i) / k);
1835 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1836 }
1837 }
1838 }
1839}
1840
1841
1842TEST(NumberDivide_TruncatingToUint32) {
1843 double constants[] = {1, 3, 100, 1000, 100998348};
1844
1845 for (size_t i = 0; i < arraysize(constants); i++) {
1846 TestingGraph t(Type::Unsigned32());
1847 Node* k = t.jsgraph.Constant(constants[i]);
1848 Node* div = t.graph()->NewNode(t.simplified()->NumberDivide(), t.p0, k);
1849 Node* use = t.Use(div, kMachUint32);
1850 t.Return(use);
1851 t.Lower();
1852
1853 CHECK_EQ(IrOpcode::kUint32Div, use->InputAt(0)->opcode());
1854 }
1855}
1856
1857
1858TEST(RunNumberDivide_TruncatingToUint32) {
1859 uint32_t constants[] = {100, 10, 1, 1, 2, 4, 1000, 1024, 2048};
1860
1861 for (size_t i = 0; i < arraysize(constants); i++) {
1862 uint32_t k = constants[i];
1863 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1864 Node* num = t.NumberToUint32(t.Parameter(0));
1865 Node* div = t.NumberDivide(num, t.jsgraph.Constant(static_cast<double>(k)));
1866 Node* trunc = t.NumberToUint32(div);
1867 t.Return(trunc);
1868
1869 if (Pipeline::SupportedTarget()) {
1870 t.LowerAllNodesAndLowerChanges();
1871 t.GenerateCode();
1872
1873 FOR_UINT32_INPUTS(i) {
1874 uint32_t x = *i / k;
1875 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1876 }
1877 }
1878 }
1879}
1880
1881
1882TEST(NumberDivide_BadConstants) {
1883 {
1884 TestingGraph t(Type::Signed32());
1885 Node* k = t.jsgraph.Constant(-1);
1886 Node* div = t.graph()->NewNode(t.simplified()->NumberDivide(), t.p0, k);
1887 Node* use = t.Use(div, kMachInt32);
1888 t.Return(use);
1889 t.Lower();
1890
1891 CHECK_EQ(IrOpcode::kInt32Sub, use->InputAt(0)->opcode());
1892 }
1893
1894 {
1895 TestingGraph t(Type::Signed32());
1896 Node* k = t.jsgraph.Constant(0);
1897 Node* div = t.graph()->NewNode(t.simplified()->NumberDivide(), t.p0, k);
1898 Node* use = t.Use(div, kMachInt32);
1899 t.Return(use);
1900 t.Lower();
1901
1902 CHECK_EQ(IrOpcode::kInt32Constant, use->InputAt(0)->opcode());
1903 CHECK_EQ(0, OpParameter<int32_t>(use->InputAt(0)));
1904 }
1905
1906 {
1907 TestingGraph t(Type::Unsigned32());
1908 Node* k = t.jsgraph.Constant(0);
1909 Node* div = t.graph()->NewNode(t.simplified()->NumberDivide(), t.p0, k);
1910 Node* use = t.Use(div, kMachUint32);
1911 t.Return(use);
1912 t.Lower();
1913
1914 CHECK_EQ(IrOpcode::kInt32Constant, use->InputAt(0)->opcode());
1915 CHECK_EQ(0, OpParameter<int32_t>(use->InputAt(0)));
1916 }
1917}
1918
1919
1920TEST(NumberModulus_TruncatingToInt32) {
1921 int32_t constants[] = {-100, -10, 1, 4, 100, 1000};
1922
1923 for (size_t i = 0; i < arraysize(constants); i++) {
1924 TestingGraph t(Type::Signed32());
1925 Node* k = t.jsgraph.Constant(constants[i]);
1926 Node* mod = t.graph()->NewNode(t.simplified()->NumberModulus(), t.p0, k);
1927 Node* use = t.Use(mod, kMachInt32);
1928 t.Return(use);
1929 t.Lower();
1930
1931 CHECK_EQ(IrOpcode::kInt32Mod, use->InputAt(0)->opcode());
1932 }
1933}
1934
1935
1936TEST(RunNumberModulus_TruncatingToInt32) {
1937 int32_t constants[] = {-100, -10, -1, 1, 2, 100, 1000, 1024, 2048};
1938
1939 for (size_t i = 0; i < arraysize(constants); i++) {
1940 int32_t k = constants[i];
1941 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1942 Node* num = t.NumberToInt32(t.Parameter(0));
1943 Node* mod = t.NumberModulus(num, t.jsgraph.Constant(k));
1944 Node* trunc = t.NumberToInt32(mod);
1945 t.Return(trunc);
1946
1947 if (Pipeline::SupportedTarget()) {
1948 t.LowerAllNodesAndLowerChanges();
1949 t.GenerateCode();
1950
1951 FOR_INT32_INPUTS(i) {
1952 if (*i == INT_MAX) continue; // exclude max int.
1953 int32_t x = DoubleToInt32(std::fmod(static_cast<double>(*i), k));
1954 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1955 }
1956 }
1957 }
1958}
1959
1960
1961TEST(NumberModulus_TruncatingToUint32) {
1962 double constants[] = {1, 3, 100, 1000, 100998348};
1963
1964 for (size_t i = 0; i < arraysize(constants); i++) {
1965 TestingGraph t(Type::Unsigned32());
1966 Node* k = t.jsgraph.Constant(constants[i]);
1967 Node* mod = t.graph()->NewNode(t.simplified()->NumberModulus(), t.p0, k);
1968 Node* trunc = t.graph()->NewNode(t.simplified()->NumberToUint32(), mod);
1969 Node* ret = t.Return(trunc);
1970 t.Lower();
1971
1972 CHECK_EQ(IrOpcode::kUint32Mod, ret->InputAt(0)->opcode());
1973 }
1974}
1975
1976
1977TEST(RunNumberModulus_TruncatingToUint32) {
1978 uint32_t constants[] = {1, 2, 100, 1000, 1024, 2048};
1979
1980 for (size_t i = 0; i < arraysize(constants); i++) {
1981 uint32_t k = constants[i];
1982 SimplifiedLoweringTester<Object*> t(kMachAnyTagged);
1983 Node* num = t.NumberToUint32(t.Parameter(0));
1984 Node* mod =
1985 t.NumberModulus(num, t.jsgraph.Constant(static_cast<double>(k)));
1986 Node* trunc = t.NumberToUint32(mod);
1987 t.Return(trunc);
1988
1989 if (Pipeline::SupportedTarget()) {
1990 t.LowerAllNodesAndLowerChanges();
1991 t.GenerateCode();
1992
1993 FOR_UINT32_INPUTS(i) {
1994 uint32_t x = *i % k;
1995 t.CheckNumberCall(static_cast<double>(x), static_cast<double>(*i));
1996 }
1997 }
1998 }
1999}
2000
2001
2002TEST(NumberModulus_Int32) {
2003 int32_t constants[] = {-100, -10, 1, 4, 100, 1000};
2004
2005 for (size_t i = 0; i < arraysize(constants); i++) {
2006 TestingGraph t(Type::Signed32());
2007 Node* k = t.jsgraph.Constant(constants[i]);
2008 Node* mod = t.graph()->NewNode(t.simplified()->NumberModulus(), t.p0, k);
2009 t.Return(mod);
2010 t.Lower();
2011
2012 CHECK_EQ(IrOpcode::kFloat64Mod, mod->opcode()); // Pesky -0 behavior.
2013 }
2014}
2015
2016
2017TEST(NumberModulus_Uint32) {
2018 const double kConstants[] = {2, 100, 1000, 1024, 2048};
2019 const MachineType kTypes[] = {kMachInt32, kMachUint32};
2020
2021 for (auto const type : kTypes) {
2022 for (auto const c : kConstants) {
2023 TestingGraph t(Type::Unsigned32());
2024 Node* k = t.jsgraph.Constant(c);
2025 Node* mod = t.graph()->NewNode(t.simplified()->NumberModulus(), t.p0, k);
2026 Node* use = t.Use(mod, type);
2027 t.Return(use);
2028 t.Lower();
2029
2030 CHECK_EQ(IrOpcode::kUint32Mod, use->InputAt(0)->opcode());
2031 }
2032 }
2033}
2034
2035
2036TEST(PhiRepresentation) {
2037 HandleAndZoneScope scope;
2038 Zone* z = scope.main_zone();
2039
2040 struct TestData {
2041 Type* arg1;
2042 Type* arg2;
2043 MachineType use;
2044 MachineTypeUnion expected;
2045 };
2046
2047 TestData test_data[] = {
2048 {Type::Signed32(), Type::Unsigned32(), kMachInt32,
2049 kRepWord32 | kTypeNumber},
2050 {Type::Signed32(), Type::Unsigned32(), kMachUint32,
2051 kRepWord32 | kTypeNumber},
2052 {Type::Signed32(), Type::Signed32(), kMachInt32, kMachInt32},
2053 {Type::Unsigned32(), Type::Unsigned32(), kMachInt32, kMachUint32},
2054 {Type::Number(), Type::Signed32(), kMachInt32, kMachFloat64},
2055 {Type::Signed32(), Type::String(), kMachInt32, kMachAnyTagged}};
2056
2057 for (auto const d : test_data) {
2058 TestingGraph t(d.arg1, d.arg2, Type::Boolean());
2059
2060 Node* br = t.graph()->NewNode(t.common()->Branch(), t.p2, t.start);
2061 Node* tb = t.graph()->NewNode(t.common()->IfTrue(), br);
2062 Node* fb = t.graph()->NewNode(t.common()->IfFalse(), br);
2063 Node* m = t.graph()->NewNode(t.common()->Merge(2), tb, fb);
2064
2065 Node* phi =
2066 t.graph()->NewNode(t.common()->Phi(kMachAnyTagged, 2), t.p0, t.p1, m);
2067
2068 Bounds phi_bounds = Bounds::Either(Bounds(d.arg1), Bounds(d.arg2), z);
2069 NodeProperties::SetBounds(phi, phi_bounds);
2070
2071 Node* use = t.Use(phi, d.use);
2072 t.Return(use);
2073 t.Lower();
2074
2075 CHECK_EQ(d.expected, OpParameter<MachineType>(phi));
2076 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002077}