blob: ef9e89ed4efa71b7bbaa51aec1c649b8980d198c [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/bits.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13// Adds Arm-specific methods for generating InstructionOperands.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014class ArmOperandGenerator : public OperandGenerator {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015 public:
16 explicit ArmOperandGenerator(InstructionSelector* selector)
17 : OperandGenerator(selector) {}
18
Emily Bernierd0a1eb72015-03-24 16:35:39 -040019 bool CanBeImmediate(int32_t value) const {
20 return Assembler::ImmediateFitsAddrMode1Instruction(value);
21 }
22
23 bool CanBeImmediate(uint32_t value) const {
24 return CanBeImmediate(bit_cast<int32_t>(value));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025 }
26
27 bool CanBeImmediate(Node* node, InstructionCode opcode) {
28 Int32Matcher m(node);
29 if (!m.HasValue()) return false;
30 int32_t value = m.Value();
31 switch (ArchOpcodeField::decode(opcode)) {
32 case kArmAnd:
33 case kArmMov:
34 case kArmMvn:
35 case kArmBic:
Emily Bernierd0a1eb72015-03-24 16:35:39 -040036 return CanBeImmediate(value) || CanBeImmediate(~value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037
38 case kArmAdd:
39 case kArmSub:
40 case kArmCmp:
41 case kArmCmn:
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042 return CanBeImmediate(value) || CanBeImmediate(-value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043
44 case kArmTst:
45 case kArmTeq:
46 case kArmOrr:
47 case kArmEor:
48 case kArmRsb:
Emily Bernierd0a1eb72015-03-24 16:35:39 -040049 return CanBeImmediate(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000050
Emily Bernierd0a1eb72015-03-24 16:35:39 -040051 case kArmVldrF32:
52 case kArmVstrF32:
53 case kArmVldrF64:
54 case kArmVstrF64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 return value >= -1020 && value <= 1020 && (value % 4) == 0;
56
57 case kArmLdrb:
58 case kArmLdrsb:
59 case kArmStrb:
60 case kArmLdr:
61 case kArmStr:
62 case kArmStoreWriteBarrier:
63 return value >= -4095 && value <= 4095;
64
65 case kArmLdrh:
66 case kArmLdrsh:
67 case kArmStrh:
68 return value >= -255 && value <= 255;
69
Emily Bernierd0a1eb72015-03-24 16:35:39 -040070 default:
71 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 return false;
74 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075};
76
77
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078namespace {
79
80void VisitRRFloat64(InstructionSelector* selector, ArchOpcode opcode,
81 Node* node) {
82 ArmOperandGenerator g(selector);
83 selector->Emit(opcode, g.DefineAsRegister(node),
84 g.UseRegister(node->InputAt(0)));
85}
86
87
88void VisitRRRFloat64(InstructionSelector* selector, ArchOpcode opcode,
89 Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000090 ArmOperandGenerator g(selector);
91 selector->Emit(opcode, g.DefineAsRegister(node),
92 g.UseRegister(node->InputAt(0)),
93 g.UseRegister(node->InputAt(1)));
94}
95
96
Emily Bernierd0a1eb72015-03-24 16:35:39 -040097template <IrOpcode::Value kOpcode, int kImmMin, int kImmMax,
98 AddressingMode kImmMode, AddressingMode kRegMode>
99bool TryMatchShift(InstructionSelector* selector,
100 InstructionCode* opcode_return, Node* node,
101 InstructionOperand** value_return,
102 InstructionOperand** shift_return) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 ArmOperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400104 if (node->opcode() == kOpcode) {
105 Int32BinopMatcher m(node);
106 *value_return = g.UseRegister(m.left().node());
107 if (m.right().IsInRange(kImmMin, kImmMax)) {
108 *opcode_return |= AddressingModeField::encode(kImmMode);
109 *shift_return = g.UseImmediate(m.right().node());
110 } else {
111 *opcode_return |= AddressingModeField::encode(kRegMode);
112 *shift_return = g.UseRegister(m.right().node());
113 }
114 return true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000115 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400116 return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117}
118
119
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400120bool TryMatchROR(InstructionSelector* selector, InstructionCode* opcode_return,
121 Node* node, InstructionOperand** value_return,
122 InstructionOperand** shift_return) {
123 return TryMatchShift<IrOpcode::kWord32Ror, 1, 31, kMode_Operand2_R_ROR_I,
124 kMode_Operand2_R_ROR_R>(selector, opcode_return, node,
125 value_return, shift_return);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126}
127
128
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400129bool TryMatchASR(InstructionSelector* selector, InstructionCode* opcode_return,
130 Node* node, InstructionOperand** value_return,
131 InstructionOperand** shift_return) {
132 return TryMatchShift<IrOpcode::kWord32Sar, 1, 32, kMode_Operand2_R_ASR_I,
133 kMode_Operand2_R_ASR_R>(selector, opcode_return, node,
134 value_return, shift_return);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135}
136
137
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400138bool TryMatchLSL(InstructionSelector* selector, InstructionCode* opcode_return,
139 Node* node, InstructionOperand** value_return,
140 InstructionOperand** shift_return) {
141 return TryMatchShift<IrOpcode::kWord32Shl, 0, 31, kMode_Operand2_R_LSL_I,
142 kMode_Operand2_R_LSL_R>(selector, opcode_return, node,
143 value_return, shift_return);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144}
145
146
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400147bool TryMatchLSR(InstructionSelector* selector, InstructionCode* opcode_return,
148 Node* node, InstructionOperand** value_return,
149 InstructionOperand** shift_return) {
150 return TryMatchShift<IrOpcode::kWord32Shr, 1, 32, kMode_Operand2_R_LSR_I,
151 kMode_Operand2_R_LSR_R>(selector, opcode_return, node,
152 value_return, shift_return);
153}
154
155
156bool TryMatchShift(InstructionSelector* selector,
157 InstructionCode* opcode_return, Node* node,
158 InstructionOperand** value_return,
159 InstructionOperand** shift_return) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 return (
161 TryMatchASR(selector, opcode_return, node, value_return, shift_return) ||
162 TryMatchLSL(selector, opcode_return, node, value_return, shift_return) ||
163 TryMatchLSR(selector, opcode_return, node, value_return, shift_return) ||
164 TryMatchROR(selector, opcode_return, node, value_return, shift_return));
165}
166
167
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400168bool TryMatchImmediateOrShift(InstructionSelector* selector,
169 InstructionCode* opcode_return, Node* node,
170 size_t* input_count_return,
171 InstructionOperand** inputs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 ArmOperandGenerator g(selector);
173 if (g.CanBeImmediate(node, *opcode_return)) {
174 *opcode_return |= AddressingModeField::encode(kMode_Operand2_I);
175 inputs[0] = g.UseImmediate(node);
176 *input_count_return = 1;
177 return true;
178 }
179 if (TryMatchShift(selector, opcode_return, node, &inputs[0], &inputs[1])) {
180 *input_count_return = 2;
181 return true;
182 }
183 return false;
184}
185
186
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400187void VisitBinop(InstructionSelector* selector, Node* node,
188 InstructionCode opcode, InstructionCode reverse_opcode,
189 FlagsContinuation* cont) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000190 ArmOperandGenerator g(selector);
191 Int32BinopMatcher m(node);
192 InstructionOperand* inputs[5];
193 size_t input_count = 0;
194 InstructionOperand* outputs[2];
195 size_t output_count = 0;
196
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400197 if (m.left().node() == m.right().node()) {
198 // If both inputs refer to the same operand, enforce allocating a register
199 // for both of them to ensure that we don't end up generating code like
200 // this:
201 //
202 // mov r0, r1, asr #16
203 // adds r0, r0, r1, asr #16
204 // bvs label
205 InstructionOperand* const input = g.UseRegister(m.left().node());
206 opcode |= AddressingModeField::encode(kMode_Operand2_R);
207 inputs[input_count++] = input;
208 inputs[input_count++] = input;
209 } else if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(),
210 &input_count, &inputs[1])) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 inputs[0] = g.UseRegister(m.left().node());
212 input_count++;
213 } else if (TryMatchImmediateOrShift(selector, &reverse_opcode,
214 m.left().node(), &input_count,
215 &inputs[1])) {
216 inputs[0] = g.UseRegister(m.right().node());
217 opcode = reverse_opcode;
218 input_count++;
219 } else {
220 opcode |= AddressingModeField::encode(kMode_Operand2_R);
221 inputs[input_count++] = g.UseRegister(m.left().node());
222 inputs[input_count++] = g.UseRegister(m.right().node());
223 }
224
225 if (cont->IsBranch()) {
226 inputs[input_count++] = g.Label(cont->true_block());
227 inputs[input_count++] = g.Label(cont->false_block());
228 }
229
230 outputs[output_count++] = g.DefineAsRegister(node);
231 if (cont->IsSet()) {
232 outputs[output_count++] = g.DefineAsRegister(cont->result());
233 }
234
235 DCHECK_NE(0, input_count);
236 DCHECK_NE(0, output_count);
237 DCHECK_GE(arraysize(inputs), input_count);
238 DCHECK_GE(arraysize(outputs), output_count);
239 DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
240
241 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
242 outputs, input_count, inputs);
243 if (cont->IsBranch()) instr->MarkAsControl();
244}
245
246
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400247void VisitBinop(InstructionSelector* selector, Node* node,
248 InstructionCode opcode, InstructionCode reverse_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 FlagsContinuation cont;
250 VisitBinop(selector, node, opcode, reverse_opcode, &cont);
251}
252
253
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400254} // namespace
255
256
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000257void InstructionSelector::VisitLoad(Node* node) {
258 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node));
259 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node));
260 ArmOperandGenerator g(this);
261 Node* base = node->InputAt(0);
262 Node* index = node->InputAt(1);
263
264 ArchOpcode opcode;
265 switch (rep) {
266 case kRepFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400267 opcode = kArmVldrF32;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 break;
269 case kRepFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270 opcode = kArmVldrF64;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271 break;
272 case kRepBit: // Fall through.
273 case kRepWord8:
274 opcode = typ == kTypeUint32 ? kArmLdrb : kArmLdrsb;
275 break;
276 case kRepWord16:
277 opcode = typ == kTypeUint32 ? kArmLdrh : kArmLdrsh;
278 break;
279 case kRepTagged: // Fall through.
280 case kRepWord32:
281 opcode = kArmLdr;
282 break;
283 default:
284 UNREACHABLE();
285 return;
286 }
287
288 if (g.CanBeImmediate(index, opcode)) {
289 Emit(opcode | AddressingModeField::encode(kMode_Offset_RI),
290 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
291 } else {
292 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR),
293 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
294 }
295}
296
297
298void InstructionSelector::VisitStore(Node* node) {
299 ArmOperandGenerator g(this);
300 Node* base = node->InputAt(0);
301 Node* index = node->InputAt(1);
302 Node* value = node->InputAt(2);
303
304 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node);
305 MachineType rep = RepresentationOf(store_rep.machine_type());
306 if (store_rep.write_barrier_kind() == kFullWriteBarrier) {
307 DCHECK(rep == kRepTagged);
308 // TODO(dcarney): refactor RecordWrite function to take temp registers
309 // and pass them here instead of using fixed regs
310 // TODO(dcarney): handle immediate indices.
311 InstructionOperand* temps[] = {g.TempRegister(r5), g.TempRegister(r6)};
312 Emit(kArmStoreWriteBarrier, NULL, g.UseFixed(base, r4),
313 g.UseFixed(index, r5), g.UseFixed(value, r6), arraysize(temps),
314 temps);
315 return;
316 }
317 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind());
318
319 ArchOpcode opcode;
320 switch (rep) {
321 case kRepFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400322 opcode = kArmVstrF32;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323 break;
324 case kRepFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400325 opcode = kArmVstrF64;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 break;
327 case kRepBit: // Fall through.
328 case kRepWord8:
329 opcode = kArmStrb;
330 break;
331 case kRepWord16:
332 opcode = kArmStrh;
333 break;
334 case kRepTagged: // Fall through.
335 case kRepWord32:
336 opcode = kArmStr;
337 break;
338 default:
339 UNREACHABLE();
340 return;
341 }
342
343 if (g.CanBeImmediate(index, opcode)) {
344 Emit(opcode | AddressingModeField::encode(kMode_Offset_RI), NULL,
345 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
346 } else {
347 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR), NULL,
348 g.UseRegister(base), g.UseRegister(index), g.UseRegister(value));
349 }
350}
351
352
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400353void InstructionSelector::VisitCheckedLoad(Node* node) {
354 MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
355 MachineType typ = TypeOf(OpParameter<MachineType>(node));
356 ArmOperandGenerator g(this);
357 Node* const buffer = node->InputAt(0);
358 Node* const offset = node->InputAt(1);
359 Node* const length = node->InputAt(2);
360 ArchOpcode opcode;
361 switch (rep) {
362 case kRepWord8:
363 opcode = typ == kTypeInt32 ? kCheckedLoadInt8 : kCheckedLoadUint8;
364 break;
365 case kRepWord16:
366 opcode = typ == kTypeInt32 ? kCheckedLoadInt16 : kCheckedLoadUint16;
367 break;
368 case kRepWord32:
369 opcode = kCheckedLoadWord32;
370 break;
371 case kRepFloat32:
372 opcode = kCheckedLoadFloat32;
373 break;
374 case kRepFloat64:
375 opcode = kCheckedLoadFloat64;
376 break;
377 default:
378 UNREACHABLE();
379 return;
380 }
381 InstructionOperand* offset_operand = g.UseRegister(offset);
382 InstructionOperand* length_operand = g.CanBeImmediate(length, kArmCmp)
383 ? g.UseImmediate(length)
384 : g.UseRegister(length);
385 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR),
386 g.DefineAsRegister(node), offset_operand, length_operand,
387 g.UseRegister(buffer), offset_operand);
388}
389
390
391void InstructionSelector::VisitCheckedStore(Node* node) {
392 MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
393 ArmOperandGenerator g(this);
394 Node* const buffer = node->InputAt(0);
395 Node* const offset = node->InputAt(1);
396 Node* const length = node->InputAt(2);
397 Node* const value = node->InputAt(3);
398 ArchOpcode opcode;
399 switch (rep) {
400 case kRepWord8:
401 opcode = kCheckedStoreWord8;
402 break;
403 case kRepWord16:
404 opcode = kCheckedStoreWord16;
405 break;
406 case kRepWord32:
407 opcode = kCheckedStoreWord32;
408 break;
409 case kRepFloat32:
410 opcode = kCheckedStoreFloat32;
411 break;
412 case kRepFloat64:
413 opcode = kCheckedStoreFloat64;
414 break;
415 default:
416 UNREACHABLE();
417 return;
418 }
419 InstructionOperand* offset_operand = g.UseRegister(offset);
420 InstructionOperand* length_operand = g.CanBeImmediate(length, kArmCmp)
421 ? g.UseImmediate(length)
422 : g.UseRegister(length);
423 Emit(opcode | AddressingModeField::encode(kMode_Offset_RR), nullptr,
424 offset_operand, length_operand, g.UseRegister(value),
425 g.UseRegister(buffer), offset_operand);
426}
427
428
429namespace {
430
431void EmitBic(InstructionSelector* selector, Node* node, Node* left,
432 Node* right) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000433 ArmOperandGenerator g(selector);
434 InstructionCode opcode = kArmBic;
435 InstructionOperand* value_operand;
436 InstructionOperand* shift_operand;
437 if (TryMatchShift(selector, &opcode, right, &value_operand, &shift_operand)) {
438 selector->Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
439 value_operand, shift_operand);
440 return;
441 }
442 selector->Emit(opcode | AddressingModeField::encode(kMode_Operand2_R),
443 g.DefineAsRegister(node), g.UseRegister(left),
444 g.UseRegister(right));
445}
446
447
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400448void EmitUbfx(InstructionSelector* selector, Node* node, Node* left,
449 uint32_t lsb, uint32_t width) {
450 DCHECK_LE(1, width);
451 DCHECK_LE(width, 32 - lsb);
452 ArmOperandGenerator g(selector);
453 selector->Emit(kArmUbfx, g.DefineAsRegister(node), g.UseRegister(left),
454 g.TempImmediate(lsb), g.TempImmediate(width));
455}
456
457} // namespace
458
459
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460void InstructionSelector::VisitWord32And(Node* node) {
461 ArmOperandGenerator g(this);
462 Int32BinopMatcher m(node);
463 if (m.left().IsWord32Xor() && CanCover(node, m.left().node())) {
464 Int32BinopMatcher mleft(m.left().node());
465 if (mleft.right().Is(-1)) {
466 EmitBic(this, node, m.right().node(), mleft.left().node());
467 return;
468 }
469 }
470 if (m.right().IsWord32Xor() && CanCover(node, m.right().node())) {
471 Int32BinopMatcher mright(m.right().node());
472 if (mright.right().Is(-1)) {
473 EmitBic(this, node, m.left().node(), mright.left().node());
474 return;
475 }
476 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400477 if (m.right().HasValue()) {
478 uint32_t const value = m.right().Value();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 uint32_t width = base::bits::CountPopulation32(value);
480 uint32_t msb = base::bits::CountLeadingZeros32(value);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400481 // Try to interpret this AND as UBFX.
482 if (IsSupported(ARMv7) && width != 0 && msb + width == 32) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000483 DCHECK_EQ(0, base::bits::CountTrailingZeros32(value));
484 if (m.left().IsWord32Shr()) {
485 Int32BinopMatcher mleft(m.left().node());
486 if (mleft.right().IsInRange(0, 31)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400487 // UBFX cannot extract bits past the register size, however since
488 // shifting the original value would have introduced some zeros we can
489 // still use UBFX with a smaller mask and the remaining bits will be
490 // zeros.
491 uint32_t const lsb = mleft.right().Value();
492 return EmitUbfx(this, node, mleft.left().node(), lsb,
493 std::min(width, 32 - lsb));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494 }
495 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400496 return EmitUbfx(this, node, m.left().node(), 0, width);
497 }
498 // Try to interpret this AND as BIC.
499 if (g.CanBeImmediate(~value)) {
500 Emit(kArmBic | AddressingModeField::encode(kMode_Operand2_I),
501 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
502 g.TempImmediate(~value));
503 return;
504 }
505 // Try to interpret this AND as UXTH.
506 if (value == 0xffff) {
507 Emit(kArmUxth, g.DefineAsRegister(m.node()),
508 g.UseRegister(m.left().node()), g.TempImmediate(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 return;
510 }
511 // Try to interpret this AND as BFC.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400512 if (IsSupported(ARMv7)) {
513 width = 32 - width;
514 msb = base::bits::CountLeadingZeros32(~value);
515 uint32_t lsb = base::bits::CountTrailingZeros32(~value);
516 if (msb + width + lsb == 32) {
517 Emit(kArmBfc, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
518 g.TempImmediate(lsb), g.TempImmediate(width));
519 return;
520 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521 }
522 }
523 VisitBinop(this, node, kArmAnd, kArmAnd);
524}
525
526
527void InstructionSelector::VisitWord32Or(Node* node) {
528 VisitBinop(this, node, kArmOrr, kArmOrr);
529}
530
531
532void InstructionSelector::VisitWord32Xor(Node* node) {
533 ArmOperandGenerator g(this);
534 Int32BinopMatcher m(node);
535 if (m.right().Is(-1)) {
536 InstructionCode opcode = kArmMvn;
537 InstructionOperand* value_operand;
538 InstructionOperand* shift_operand;
539 if (TryMatchShift(this, &opcode, m.left().node(), &value_operand,
540 &shift_operand)) {
541 Emit(opcode, g.DefineAsRegister(node), value_operand, shift_operand);
542 return;
543 }
544 Emit(opcode | AddressingModeField::encode(kMode_Operand2_R),
545 g.DefineAsRegister(node), g.UseRegister(m.left().node()));
546 return;
547 }
548 VisitBinop(this, node, kArmEor, kArmEor);
549}
550
551
552template <typename TryMatchShift>
553static inline void VisitShift(InstructionSelector* selector, Node* node,
554 TryMatchShift try_match_shift,
555 FlagsContinuation* cont) {
556 ArmOperandGenerator g(selector);
557 InstructionCode opcode = kArmMov;
558 InstructionOperand* inputs[4];
559 size_t input_count = 2;
560 InstructionOperand* outputs[2];
561 size_t output_count = 0;
562
563 CHECK(try_match_shift(selector, &opcode, node, &inputs[0], &inputs[1]));
564
565 if (cont->IsBranch()) {
566 inputs[input_count++] = g.Label(cont->true_block());
567 inputs[input_count++] = g.Label(cont->false_block());
568 }
569
570 outputs[output_count++] = g.DefineAsRegister(node);
571 if (cont->IsSet()) {
572 outputs[output_count++] = g.DefineAsRegister(cont->result());
573 }
574
575 DCHECK_NE(0, input_count);
576 DCHECK_NE(0, output_count);
577 DCHECK_GE(arraysize(inputs), input_count);
578 DCHECK_GE(arraysize(outputs), output_count);
579 DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
580
581 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
582 outputs, input_count, inputs);
583 if (cont->IsBranch()) instr->MarkAsControl();
584}
585
586
587template <typename TryMatchShift>
588static inline void VisitShift(InstructionSelector* selector, Node* node,
589 TryMatchShift try_match_shift) {
590 FlagsContinuation cont;
591 VisitShift(selector, node, try_match_shift, &cont);
592}
593
594
595void InstructionSelector::VisitWord32Shl(Node* node) {
596 VisitShift(this, node, TryMatchLSL);
597}
598
599
600void InstructionSelector::VisitWord32Shr(Node* node) {
601 ArmOperandGenerator g(this);
602 Int32BinopMatcher m(node);
603 if (IsSupported(ARMv7) && m.left().IsWord32And() &&
604 m.right().IsInRange(0, 31)) {
605 int32_t lsb = m.right().Value();
606 Int32BinopMatcher mleft(m.left().node());
607 if (mleft.right().HasValue()) {
608 uint32_t value = (mleft.right().Value() >> lsb) << lsb;
609 uint32_t width = base::bits::CountPopulation32(value);
610 uint32_t msb = base::bits::CountLeadingZeros32(value);
611 if (msb + width + lsb == 32) {
612 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(value));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400613 return EmitUbfx(this, node, mleft.left().node(), lsb, width);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000614 }
615 }
616 }
617 VisitShift(this, node, TryMatchLSR);
618}
619
620
621void InstructionSelector::VisitWord32Sar(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400622 ArmOperandGenerator g(this);
623 Int32BinopMatcher m(node);
624 if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
625 Int32BinopMatcher mleft(m.left().node());
626 if (mleft.right().Is(16) && m.right().Is(16)) {
627 Emit(kArmSxth, g.DefineAsRegister(node),
628 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
629 return;
630 } else if (mleft.right().Is(24) && m.right().Is(24)) {
631 Emit(kArmSxtb, g.DefineAsRegister(node),
632 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
633 return;
634 }
635 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000636 VisitShift(this, node, TryMatchASR);
637}
638
639
640void InstructionSelector::VisitWord32Ror(Node* node) {
641 VisitShift(this, node, TryMatchROR);
642}
643
644
645void InstructionSelector::VisitInt32Add(Node* node) {
646 ArmOperandGenerator g(this);
647 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400648 if (CanCover(node, m.left().node())) {
649 switch (m.left().opcode()) {
650 case IrOpcode::kInt32Mul: {
651 Int32BinopMatcher mleft(m.left().node());
652 Emit(kArmMla, g.DefineAsRegister(node),
653 g.UseRegister(mleft.left().node()),
654 g.UseRegister(mleft.right().node()),
655 g.UseRegister(m.right().node()));
656 return;
657 }
658 case IrOpcode::kInt32MulHigh: {
659 Int32BinopMatcher mleft(m.left().node());
660 Emit(kArmSmmla, g.DefineAsRegister(node),
661 g.UseRegister(mleft.left().node()),
662 g.UseRegister(mleft.right().node()),
663 g.UseRegister(m.right().node()));
664 return;
665 }
666 case IrOpcode::kWord32And: {
667 Int32BinopMatcher mleft(m.left().node());
668 if (mleft.right().Is(0xff)) {
669 Emit(kArmUxtab, g.DefineAsRegister(node),
670 g.UseRegister(m.right().node()),
671 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
672 return;
673 } else if (mleft.right().Is(0xffff)) {
674 Emit(kArmUxtah, g.DefineAsRegister(node),
675 g.UseRegister(m.right().node()),
676 g.UseRegister(mleft.left().node()), g.TempImmediate(0));
677 return;
678 }
679 }
680 case IrOpcode::kWord32Sar: {
681 Int32BinopMatcher mleft(m.left().node());
682 if (CanCover(mleft.node(), mleft.left().node()) &&
683 mleft.left().IsWord32Shl()) {
684 Int32BinopMatcher mleftleft(mleft.left().node());
685 if (mleft.right().Is(24) && mleftleft.right().Is(24)) {
686 Emit(kArmSxtab, g.DefineAsRegister(node),
687 g.UseRegister(m.right().node()),
688 g.UseRegister(mleftleft.left().node()), g.TempImmediate(0));
689 return;
690 } else if (mleft.right().Is(16) && mleftleft.right().Is(16)) {
691 Emit(kArmSxtah, g.DefineAsRegister(node),
692 g.UseRegister(m.right().node()),
693 g.UseRegister(mleftleft.left().node()), g.TempImmediate(0));
694 return;
695 }
696 }
697 }
698 default:
699 break;
700 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400702 if (CanCover(node, m.right().node())) {
703 switch (m.right().opcode()) {
704 case IrOpcode::kInt32Mul: {
705 Int32BinopMatcher mright(m.right().node());
706 Emit(kArmMla, g.DefineAsRegister(node),
707 g.UseRegister(mright.left().node()),
708 g.UseRegister(mright.right().node()),
709 g.UseRegister(m.left().node()));
710 return;
711 }
712 case IrOpcode::kInt32MulHigh: {
713 Int32BinopMatcher mright(m.right().node());
714 Emit(kArmSmmla, g.DefineAsRegister(node),
715 g.UseRegister(mright.left().node()),
716 g.UseRegister(mright.right().node()),
717 g.UseRegister(m.left().node()));
718 return;
719 }
720 case IrOpcode::kWord32And: {
721 Int32BinopMatcher mright(m.right().node());
722 if (mright.right().Is(0xff)) {
723 Emit(kArmUxtab, g.DefineAsRegister(node),
724 g.UseRegister(m.left().node()),
725 g.UseRegister(mright.left().node()), g.TempImmediate(0));
726 return;
727 } else if (mright.right().Is(0xffff)) {
728 Emit(kArmUxtah, g.DefineAsRegister(node),
729 g.UseRegister(m.left().node()),
730 g.UseRegister(mright.left().node()), g.TempImmediate(0));
731 return;
732 }
733 }
734 case IrOpcode::kWord32Sar: {
735 Int32BinopMatcher mright(m.right().node());
736 if (CanCover(mright.node(), mright.left().node()) &&
737 mright.left().IsWord32Shl()) {
738 Int32BinopMatcher mrightleft(mright.left().node());
739 if (mright.right().Is(24) && mrightleft.right().Is(24)) {
740 Emit(kArmSxtab, g.DefineAsRegister(node),
741 g.UseRegister(m.left().node()),
742 g.UseRegister(mrightleft.left().node()), g.TempImmediate(0));
743 return;
744 } else if (mright.right().Is(16) && mrightleft.right().Is(16)) {
745 Emit(kArmSxtah, g.DefineAsRegister(node),
746 g.UseRegister(m.left().node()),
747 g.UseRegister(mrightleft.left().node()), g.TempImmediate(0));
748 return;
749 }
750 }
751 }
752 default:
753 break;
754 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 }
756 VisitBinop(this, node, kArmAdd, kArmAdd);
757}
758
759
760void InstructionSelector::VisitInt32Sub(Node* node) {
761 ArmOperandGenerator g(this);
762 Int32BinopMatcher m(node);
763 if (IsSupported(MLS) && m.right().IsInt32Mul() &&
764 CanCover(node, m.right().node())) {
765 Int32BinopMatcher mright(m.right().node());
766 Emit(kArmMls, g.DefineAsRegister(node), g.UseRegister(mright.left().node()),
767 g.UseRegister(mright.right().node()), g.UseRegister(m.left().node()));
768 return;
769 }
770 VisitBinop(this, node, kArmSub, kArmRsb);
771}
772
773
774void InstructionSelector::VisitInt32Mul(Node* node) {
775 ArmOperandGenerator g(this);
776 Int32BinopMatcher m(node);
777 if (m.right().HasValue() && m.right().Value() > 0) {
778 int32_t value = m.right().Value();
779 if (base::bits::IsPowerOfTwo32(value - 1)) {
780 Emit(kArmAdd | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
781 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
782 g.UseRegister(m.left().node()),
783 g.TempImmediate(WhichPowerOf2(value - 1)));
784 return;
785 }
786 if (value < kMaxInt && base::bits::IsPowerOfTwo32(value + 1)) {
787 Emit(kArmRsb | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
788 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
789 g.UseRegister(m.left().node()),
790 g.TempImmediate(WhichPowerOf2(value + 1)));
791 return;
792 }
793 }
794 Emit(kArmMul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
795 g.UseRegister(m.right().node()));
796}
797
798
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400799void InstructionSelector::VisitInt32MulHigh(Node* node) {
800 ArmOperandGenerator g(this);
801 Emit(kArmSmmul, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
802 g.UseRegister(node->InputAt(1)));
803}
804
805
806void InstructionSelector::VisitUint32MulHigh(Node* node) {
807 ArmOperandGenerator g(this);
808 InstructionOperand* outputs[] = {g.TempRegister(), g.DefineAsRegister(node)};
809 InstructionOperand* inputs[] = {g.UseRegister(node->InputAt(0)),
810 g.UseRegister(node->InputAt(1))};
811 Emit(kArmUmull, arraysize(outputs), outputs, arraysize(inputs), inputs);
812}
813
814
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000815static void EmitDiv(InstructionSelector* selector, ArchOpcode div_opcode,
816 ArchOpcode f64i32_opcode, ArchOpcode i32f64_opcode,
817 InstructionOperand* result_operand,
818 InstructionOperand* left_operand,
819 InstructionOperand* right_operand) {
820 ArmOperandGenerator g(selector);
821 if (selector->IsSupported(SUDIV)) {
822 selector->Emit(div_opcode, result_operand, left_operand, right_operand);
823 return;
824 }
825 InstructionOperand* left_double_operand = g.TempDoubleRegister();
826 InstructionOperand* right_double_operand = g.TempDoubleRegister();
827 InstructionOperand* result_double_operand = g.TempDoubleRegister();
828 selector->Emit(f64i32_opcode, left_double_operand, left_operand);
829 selector->Emit(f64i32_opcode, right_double_operand, right_operand);
830 selector->Emit(kArmVdivF64, result_double_operand, left_double_operand,
831 right_double_operand);
832 selector->Emit(i32f64_opcode, result_operand, result_double_operand);
833}
834
835
836static void VisitDiv(InstructionSelector* selector, Node* node,
837 ArchOpcode div_opcode, ArchOpcode f64i32_opcode,
838 ArchOpcode i32f64_opcode) {
839 ArmOperandGenerator g(selector);
840 Int32BinopMatcher m(node);
841 EmitDiv(selector, div_opcode, f64i32_opcode, i32f64_opcode,
842 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
843 g.UseRegister(m.right().node()));
844}
845
846
847void InstructionSelector::VisitInt32Div(Node* node) {
848 VisitDiv(this, node, kArmSdiv, kArmVcvtF64S32, kArmVcvtS32F64);
849}
850
851
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400852void InstructionSelector::VisitUint32Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000853 VisitDiv(this, node, kArmUdiv, kArmVcvtF64U32, kArmVcvtU32F64);
854}
855
856
857static void VisitMod(InstructionSelector* selector, Node* node,
858 ArchOpcode div_opcode, ArchOpcode f64i32_opcode,
859 ArchOpcode i32f64_opcode) {
860 ArmOperandGenerator g(selector);
861 Int32BinopMatcher m(node);
862 InstructionOperand* div_operand = g.TempRegister();
863 InstructionOperand* result_operand = g.DefineAsRegister(node);
864 InstructionOperand* left_operand = g.UseRegister(m.left().node());
865 InstructionOperand* right_operand = g.UseRegister(m.right().node());
866 EmitDiv(selector, div_opcode, f64i32_opcode, i32f64_opcode, div_operand,
867 left_operand, right_operand);
868 if (selector->IsSupported(MLS)) {
869 selector->Emit(kArmMls, result_operand, div_operand, right_operand,
870 left_operand);
871 return;
872 }
873 InstructionOperand* mul_operand = g.TempRegister();
874 selector->Emit(kArmMul, mul_operand, div_operand, right_operand);
875 selector->Emit(kArmSub, result_operand, left_operand, mul_operand);
876}
877
878
879void InstructionSelector::VisitInt32Mod(Node* node) {
880 VisitMod(this, node, kArmSdiv, kArmVcvtF64S32, kArmVcvtS32F64);
881}
882
883
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400884void InstructionSelector::VisitUint32Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000885 VisitMod(this, node, kArmUdiv, kArmVcvtF64U32, kArmVcvtU32F64);
886}
887
888
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400889void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
890 ArmOperandGenerator g(this);
891 Emit(kArmVcvtF64F32, g.DefineAsRegister(node),
892 g.UseRegister(node->InputAt(0)));
893}
894
895
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
897 ArmOperandGenerator g(this);
898 Emit(kArmVcvtF64S32, g.DefineAsRegister(node),
899 g.UseRegister(node->InputAt(0)));
900}
901
902
903void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
904 ArmOperandGenerator g(this);
905 Emit(kArmVcvtF64U32, g.DefineAsRegister(node),
906 g.UseRegister(node->InputAt(0)));
907}
908
909
910void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
911 ArmOperandGenerator g(this);
912 Emit(kArmVcvtS32F64, g.DefineAsRegister(node),
913 g.UseRegister(node->InputAt(0)));
914}
915
916
917void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
918 ArmOperandGenerator g(this);
919 Emit(kArmVcvtU32F64, g.DefineAsRegister(node),
920 g.UseRegister(node->InputAt(0)));
921}
922
923
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400924void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
925 ArmOperandGenerator g(this);
926 Emit(kArmVcvtF32F64, g.DefineAsRegister(node),
927 g.UseRegister(node->InputAt(0)));
928}
929
930
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000931void InstructionSelector::VisitFloat64Add(Node* node) {
932 ArmOperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400933 Float64BinopMatcher m(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934 if (m.left().IsFloat64Mul() && CanCover(node, m.left().node())) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400935 Float64BinopMatcher mleft(m.left().node());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000936 Emit(kArmVmlaF64, g.DefineSameAsFirst(node),
937 g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
938 g.UseRegister(mleft.right().node()));
939 return;
940 }
941 if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400942 Float64BinopMatcher mright(m.right().node());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943 Emit(kArmVmlaF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
944 g.UseRegister(mright.left().node()),
945 g.UseRegister(mright.right().node()));
946 return;
947 }
948 VisitRRRFloat64(this, kArmVaddF64, node);
949}
950
951
952void InstructionSelector::VisitFloat64Sub(Node* node) {
953 ArmOperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400954 Float64BinopMatcher m(node);
955 if (m.left().IsMinusZero()) {
956 Emit(kArmVnegF64, g.DefineAsRegister(node),
957 g.UseRegister(m.right().node()));
958 return;
959 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000960 if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400961 Float64BinopMatcher mright(m.right().node());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000962 Emit(kArmVmlsF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
963 g.UseRegister(mright.left().node()),
964 g.UseRegister(mright.right().node()));
965 return;
966 }
967 VisitRRRFloat64(this, kArmVsubF64, node);
968}
969
970
971void InstructionSelector::VisitFloat64Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400972 VisitRRRFloat64(this, kArmVmulF64, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000973}
974
975
976void InstructionSelector::VisitFloat64Div(Node* node) {
977 VisitRRRFloat64(this, kArmVdivF64, node);
978}
979
980
981void InstructionSelector::VisitFloat64Mod(Node* node) {
982 ArmOperandGenerator g(this);
983 Emit(kArmVmodF64, g.DefineAsFixed(node, d0), g.UseFixed(node->InputAt(0), d0),
984 g.UseFixed(node->InputAt(1), d1))->MarkAsCall();
985}
986
987
988void InstructionSelector::VisitFloat64Sqrt(Node* node) {
989 ArmOperandGenerator g(this);
990 Emit(kArmVsqrtF64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
991}
992
993
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400994void InstructionSelector::VisitFloat64Floor(Node* node) {
995 DCHECK(CpuFeatures::IsSupported(ARMv8));
996 VisitRRFloat64(this, kArmVfloorF64, node);
997}
998
999
1000void InstructionSelector::VisitFloat64Ceil(Node* node) {
1001 DCHECK(CpuFeatures::IsSupported(ARMv8));
1002 VisitRRFloat64(this, kArmVceilF64, node);
1003}
1004
1005
1006void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1007 DCHECK(CpuFeatures::IsSupported(ARMv8));
1008 VisitRRFloat64(this, kArmVroundTruncateF64, node);
1009}
1010
1011
1012void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1013 DCHECK(CpuFeatures::IsSupported(ARMv8));
1014 VisitRRFloat64(this, kArmVroundTiesAwayF64, node);
1015}
1016
1017
1018void InstructionSelector::VisitCall(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019 ArmOperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001020 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021
1022 FrameStateDescriptor* frame_state_descriptor = NULL;
1023 if (descriptor->NeedsFrameState()) {
1024 frame_state_descriptor =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001025 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001026 }
1027
1028 CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
1029
1030 // Compute InstructionOperands for inputs and outputs.
1031 // TODO(turbofan): on ARM64 it's probably better to use the code object in a
1032 // register if there are multiple uses of it. Improve constant pool and the
1033 // heuristics in the register allocator for where to emit constants.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001034 InitializeCallBuffer(node, &buffer, true, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001035
1036 // TODO(dcarney): might be possible to use claim/poke instead
1037 // Push any stack arguments.
1038 for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
1039 input != buffer.pushed_nodes.rend(); input++) {
1040 Emit(kArmPush, NULL, g.UseRegister(*input));
1041 }
1042
1043 // Select the appropriate opcode based on the call type.
1044 InstructionCode opcode;
1045 switch (descriptor->kind()) {
1046 case CallDescriptor::kCallCodeObject: {
1047 opcode = kArchCallCodeObject;
1048 break;
1049 }
1050 case CallDescriptor::kCallJSFunction:
1051 opcode = kArchCallJSFunction;
1052 break;
1053 default:
1054 UNREACHABLE();
1055 return;
1056 }
1057 opcode |= MiscField::encode(descriptor->flags());
1058
1059 // Emit the call instruction.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001060 InstructionOperand** first_output =
1061 buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001062 Instruction* call_instr =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001063 Emit(opcode, buffer.outputs.size(), first_output,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001064 buffer.instruction_args.size(), &buffer.instruction_args.front());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001065 call_instr->MarkAsCall();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001066}
1067
1068
1069namespace {
1070
1071// Shared routine for multiple float compare operations.
1072void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1073 FlagsContinuation* cont) {
1074 ArmOperandGenerator g(selector);
1075 Float64BinopMatcher m(node);
1076 if (cont->IsBranch()) {
1077 selector->Emit(cont->Encode(kArmVcmpF64), nullptr,
1078 g.UseRegister(m.left().node()),
1079 g.UseRegister(m.right().node()), g.Label(cont->true_block()),
1080 g.Label(cont->false_block()))->MarkAsControl();
1081 } else {
1082 DCHECK(cont->IsSet());
1083 selector->Emit(
1084 cont->Encode(kArmVcmpF64), g.DefineAsRegister(cont->result()),
1085 g.UseRegister(m.left().node()), g.UseRegister(m.right().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 }
1087}
1088
1089
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001090// Shared routine for multiple word compare operations.
1091void VisitWordCompare(InstructionSelector* selector, Node* node,
1092 InstructionCode opcode, FlagsContinuation* cont) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001093 ArmOperandGenerator g(selector);
1094 Int32BinopMatcher m(node);
1095 InstructionOperand* inputs[5];
1096 size_t input_count = 0;
1097 InstructionOperand* outputs[1];
1098 size_t output_count = 0;
1099
1100 if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(),
1101 &input_count, &inputs[1])) {
1102 inputs[0] = g.UseRegister(m.left().node());
1103 input_count++;
1104 } else if (TryMatchImmediateOrShift(selector, &opcode, m.left().node(),
1105 &input_count, &inputs[1])) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001106 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001107 inputs[0] = g.UseRegister(m.right().node());
1108 input_count++;
1109 } else {
1110 opcode |= AddressingModeField::encode(kMode_Operand2_R);
1111 inputs[input_count++] = g.UseRegister(m.left().node());
1112 inputs[input_count++] = g.UseRegister(m.right().node());
1113 }
1114
1115 if (cont->IsBranch()) {
1116 inputs[input_count++] = g.Label(cont->true_block());
1117 inputs[input_count++] = g.Label(cont->false_block());
1118 } else {
1119 DCHECK(cont->IsSet());
1120 outputs[output_count++] = g.DefineAsRegister(cont->result());
1121 }
1122
1123 DCHECK_NE(0, input_count);
1124 DCHECK_GE(arraysize(inputs), input_count);
1125 DCHECK_GE(arraysize(outputs), output_count);
1126
1127 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
1128 outputs, input_count, inputs);
1129 if (cont->IsBranch()) instr->MarkAsControl();
1130}
1131
1132
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001133void VisitWordCompare(InstructionSelector* selector, Node* node,
1134 FlagsContinuation* cont) {
1135 VisitWordCompare(selector, node, kArmCmp, cont);
1136}
1137
1138
1139// Shared routine for word comparisons against zero.
1140void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1141 Node* value, FlagsContinuation* cont) {
1142 while (selector->CanCover(user, value)) {
1143 switch (value->opcode()) {
1144 case IrOpcode::kWord32Equal: {
1145 // Combine with comparisons against 0 by simply inverting the
1146 // continuation.
1147 Int32BinopMatcher m(value);
1148 if (m.right().Is(0)) {
1149 user = value;
1150 value = m.left().node();
1151 cont->Negate();
1152 continue;
1153 }
1154 cont->OverwriteAndNegateIfEqual(kEqual);
1155 return VisitWordCompare(selector, value, cont);
1156 }
1157 case IrOpcode::kInt32LessThan:
1158 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1159 return VisitWordCompare(selector, value, cont);
1160 case IrOpcode::kInt32LessThanOrEqual:
1161 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1162 return VisitWordCompare(selector, value, cont);
1163 case IrOpcode::kUint32LessThan:
1164 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1165 return VisitWordCompare(selector, value, cont);
1166 case IrOpcode::kUint32LessThanOrEqual:
1167 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1168 return VisitWordCompare(selector, value, cont);
1169 case IrOpcode::kFloat64Equal:
1170 cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1171 return VisitFloat64Compare(selector, value, cont);
1172 case IrOpcode::kFloat64LessThan:
1173 cont->OverwriteAndNegateIfEqual(kUnorderedLessThan);
1174 return VisitFloat64Compare(selector, value, cont);
1175 case IrOpcode::kFloat64LessThanOrEqual:
1176 cont->OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual);
1177 return VisitFloat64Compare(selector, value, cont);
1178 case IrOpcode::kProjection:
1179 // Check if this is the overflow output projection of an
1180 // <Operation>WithOverflow node.
1181 if (OpParameter<size_t>(value) == 1u) {
1182 // We cannot combine the <Operation>WithOverflow with this branch
1183 // unless the 0th projection (the use of the actual value of the
1184 // <Operation> is either NULL, which means there's no use of the
1185 // actual value, or was already defined, which means it is scheduled
1186 // *AFTER* this branch).
1187 Node* const node = value->InputAt(0);
1188 Node* const result = node->FindProjection(0);
1189 if (!result || selector->IsDefined(result)) {
1190 switch (node->opcode()) {
1191 case IrOpcode::kInt32AddWithOverflow:
1192 cont->OverwriteAndNegateIfEqual(kOverflow);
1193 return VisitBinop(selector, node, kArmAdd, kArmAdd, cont);
1194 case IrOpcode::kInt32SubWithOverflow:
1195 cont->OverwriteAndNegateIfEqual(kOverflow);
1196 return VisitBinop(selector, node, kArmSub, kArmRsb, cont);
1197 default:
1198 break;
1199 }
1200 }
1201 }
1202 break;
1203 case IrOpcode::kInt32Add:
1204 return VisitWordCompare(selector, value, kArmCmn, cont);
1205 case IrOpcode::kInt32Sub:
1206 return VisitWordCompare(selector, value, kArmCmp, cont);
1207 case IrOpcode::kWord32And:
1208 return VisitWordCompare(selector, value, kArmTst, cont);
1209 case IrOpcode::kWord32Or:
1210 return VisitBinop(selector, value, kArmOrr, kArmOrr, cont);
1211 case IrOpcode::kWord32Xor:
1212 return VisitWordCompare(selector, value, kArmTeq, cont);
1213 case IrOpcode::kWord32Sar:
1214 return VisitShift(selector, value, TryMatchASR, cont);
1215 case IrOpcode::kWord32Shl:
1216 return VisitShift(selector, value, TryMatchLSL, cont);
1217 case IrOpcode::kWord32Shr:
1218 return VisitShift(selector, value, TryMatchLSR, cont);
1219 case IrOpcode::kWord32Ror:
1220 return VisitShift(selector, value, TryMatchROR, cont);
1221 default:
1222 break;
1223 }
1224 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001225 }
1226
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001227 // Continuation could not be combined with a compare, emit compare against 0.
1228 ArmOperandGenerator g(selector);
1229 InstructionCode const opcode =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230 cont->Encode(kArmTst) | AddressingModeField::encode(kMode_Operand2_R);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001231 InstructionOperand* const value_operand = g.UseRegister(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 if (cont->IsBranch()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001233 selector->Emit(opcode, nullptr, value_operand, value_operand,
1234 g.Label(cont->true_block()),
1235 g.Label(cont->false_block()))->MarkAsControl();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001236 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001237 selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
1238 value_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001239 }
1240}
1241
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001242} // namespace
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001243
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001244
1245void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1246 BasicBlock* fbranch) {
1247 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1248 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001249}
1250
1251
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001252void InstructionSelector::VisitWord32Equal(Node* const node) {
1253 FlagsContinuation cont(kEqual, node);
1254 Int32BinopMatcher m(node);
1255 if (m.right().Is(0)) {
1256 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001258 VisitWordCompare(this, node, &cont);
1259}
1260
1261
1262void InstructionSelector::VisitInt32LessThan(Node* node) {
1263 FlagsContinuation cont(kSignedLessThan, node);
1264 VisitWordCompare(this, node, &cont);
1265}
1266
1267
1268void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1269 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1270 VisitWordCompare(this, node, &cont);
1271}
1272
1273
1274void InstructionSelector::VisitUint32LessThan(Node* node) {
1275 FlagsContinuation cont(kUnsignedLessThan, node);
1276 VisitWordCompare(this, node, &cont);
1277}
1278
1279
1280void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1281 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1282 VisitWordCompare(this, node, &cont);
1283}
1284
1285
1286void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1287 if (Node* ovf = node->FindProjection(1)) {
1288 FlagsContinuation cont(kOverflow, ovf);
1289 return VisitBinop(this, node, kArmAdd, kArmAdd, &cont);
1290 }
1291 FlagsContinuation cont;
1292 VisitBinop(this, node, kArmAdd, kArmAdd, &cont);
1293}
1294
1295
1296void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1297 if (Node* ovf = node->FindProjection(1)) {
1298 FlagsContinuation cont(kOverflow, ovf);
1299 return VisitBinop(this, node, kArmSub, kArmRsb, &cont);
1300 }
1301 FlagsContinuation cont;
1302 VisitBinop(this, node, kArmSub, kArmRsb, &cont);
1303}
1304
1305
1306void InstructionSelector::VisitFloat64Equal(Node* node) {
1307 FlagsContinuation cont(kUnorderedEqual, node);
1308 VisitFloat64Compare(this, node, &cont);
1309}
1310
1311
1312void InstructionSelector::VisitFloat64LessThan(Node* node) {
1313 FlagsContinuation cont(kUnorderedLessThan, node);
1314 VisitFloat64Compare(this, node, &cont);
1315}
1316
1317
1318void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1319 FlagsContinuation cont(kUnorderedLessThanOrEqual, node);
1320 VisitFloat64Compare(this, node, &cont);
1321}
1322
1323
1324// static
1325MachineOperatorBuilder::Flags
1326InstructionSelector::SupportedMachineOperatorFlags() {
1327 MachineOperatorBuilder::Flags flags =
1328 MachineOperatorBuilder::kInt32DivIsSafe |
1329 MachineOperatorBuilder::kUint32DivIsSafe;
1330
1331 if (CpuFeatures::IsSupported(ARMv8)) {
1332 flags |= MachineOperatorBuilder::kFloat64Floor |
1333 MachineOperatorBuilder::kFloat64Ceil |
1334 MachineOperatorBuilder::kFloat64RoundTruncate |
1335 MachineOperatorBuilder::kFloat64RoundTiesAway;
1336 }
1337 return flags;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338}
1339
1340} // namespace compiler
1341} // namespace internal
1342} // namespace v8