blob: 240a4f2b205ae9aac90527e9a92e59263502964d [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/instruction-selector-impl.h"
6#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/compiler/node-properties.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13enum ImmediateMode {
14 kArithmeticImm, // 12 bit unsigned immediate shifted left 0 or 12 bits
15 kShift32Imm, // 0 - 31
16 kShift64Imm, // 0 - 63
17 kLogical32Imm,
18 kLogical64Imm,
19 kLoadStoreImm8, // signed 8 bit or 12 bit unsigned scaled by access size
20 kLoadStoreImm16,
21 kLoadStoreImm32,
22 kLoadStoreImm64,
23 kNoImmediate
24};
25
26
27// Adds Arm64-specific methods for generating operands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028class Arm64OperandGenerator final : public OperandGenerator {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 public:
30 explicit Arm64OperandGenerator(InstructionSelector* selector)
31 : OperandGenerator(selector) {}
32
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034 if (CanBeImmediate(node, mode)) {
35 return UseImmediate(node);
36 }
37 return UseRegister(node);
38 }
39
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 // Use the zero register if the node has the immediate value zero, otherwise
41 // assign a register.
42 InstructionOperand UseRegisterOrImmediateZero(Node* node) {
Ben Murdochc5610432016-08-08 18:44:38 +010043 if ((IsIntegerConstant(node) && (GetIntegerConstantValue(node) == 0)) ||
44 (IsFloatConstant(node) &&
45 (bit_cast<int64_t>(GetFloatConstantValue(node)) == V8_INT64_C(0)))) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 return UseImmediate(node);
47 }
48 return UseRegister(node);
49 }
50
51 // Use the provided node if it has the required value, or create a
52 // TempImmediate otherwise.
53 InstructionOperand UseImmediateOrTemp(Node* node, int32_t value) {
54 if (GetIntegerConstantValue(node) == value) {
55 return UseImmediate(node);
56 }
57 return TempImmediate(value);
58 }
59
60 bool IsIntegerConstant(Node* node) {
61 return (node->opcode() == IrOpcode::kInt32Constant) ||
62 (node->opcode() == IrOpcode::kInt64Constant);
63 }
64
65 int64_t GetIntegerConstantValue(Node* node) {
66 if (node->opcode() == IrOpcode::kInt32Constant) {
67 return OpParameter<int32_t>(node);
68 }
69 DCHECK(node->opcode() == IrOpcode::kInt64Constant);
70 return OpParameter<int64_t>(node);
71 }
72
Ben Murdochc5610432016-08-08 18:44:38 +010073 bool IsFloatConstant(Node* node) {
74 return (node->opcode() == IrOpcode::kFloat32Constant) ||
75 (node->opcode() == IrOpcode::kFloat64Constant);
76 }
77
78 double GetFloatConstantValue(Node* node) {
79 if (node->opcode() == IrOpcode::kFloat32Constant) {
80 return OpParameter<float>(node);
81 }
82 DCHECK_EQ(IrOpcode::kFloat64Constant, node->opcode());
83 return OpParameter<double>(node);
84 }
85
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086 bool CanBeImmediate(Node* node, ImmediateMode mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087 return IsIntegerConstant(node) &&
88 CanBeImmediate(GetIntegerConstantValue(node), mode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040089 }
90
91 bool CanBeImmediate(int64_t value, ImmediateMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092 unsigned ignored;
93 switch (mode) {
94 case kLogical32Imm:
95 // TODO(dcarney): some unencodable values can be handled by
96 // switching instructions.
97 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 32,
98 &ignored, &ignored, &ignored);
99 case kLogical64Imm:
100 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 64,
101 &ignored, &ignored, &ignored);
102 case kArithmeticImm:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 return Assembler::IsImmAddSub(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 case kLoadStoreImm8:
105 return IsLoadStoreImmediate(value, LSByte);
106 case kLoadStoreImm16:
107 return IsLoadStoreImmediate(value, LSHalfword);
108 case kLoadStoreImm32:
109 return IsLoadStoreImmediate(value, LSWord);
110 case kLoadStoreImm64:
111 return IsLoadStoreImmediate(value, LSDoubleWord);
112 case kNoImmediate:
113 return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 case kShift32Imm: // Fall through.
115 case kShift64Imm:
116 // Shift operations only observe the bottom 5 or 6 bits of the value.
117 // All possible shifts can be encoded by discarding bits which have no
118 // effect.
119 return true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 }
121 return false;
122 }
123
Ben Murdochc5610432016-08-08 18:44:38 +0100124 bool CanBeLoadStoreShiftImmediate(Node* node, MachineRepresentation rep) {
125 // TODO(arm64): Load and Store on 128 bit Q registers is not supported yet.
126 DCHECK_NE(MachineRepresentation::kSimd128, rep);
127 return IsIntegerConstant(node) &&
128 (GetIntegerConstantValue(node) == ElementSizeLog2Of(rep));
129 }
130
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 private:
132 bool IsLoadStoreImmediate(int64_t value, LSDataSize size) {
133 return Assembler::IsImmLSScaled(value, size) ||
134 Assembler::IsImmLSUnscaled(value);
135 }
136};
137
138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139namespace {
140
141void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400142 Arm64OperandGenerator g(selector);
143 selector->Emit(opcode, g.DefineAsRegister(node),
144 g.UseRegister(node->InputAt(0)));
145}
146
147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000148void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 Arm64OperandGenerator g(selector);
150 selector->Emit(opcode, g.DefineAsRegister(node),
151 g.UseRegister(node->InputAt(0)),
152 g.UseRegister(node->InputAt(1)));
153}
154
155
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
157 ImmediateMode operand_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000158 Arm64OperandGenerator g(selector);
159 selector->Emit(opcode, g.DefineAsRegister(node),
160 g.UseRegister(node->InputAt(0)),
161 g.UseOperand(node->InputAt(1), operand_mode));
162}
163
164
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000165bool TryMatchAnyShift(InstructionSelector* selector, Node* node,
166 Node* input_node, InstructionCode* opcode, bool try_ror) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400167 Arm64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168
169 if (!selector->CanCover(node, input_node)) return false;
170 if (input_node->InputCount() != 2) return false;
171 if (!g.IsIntegerConstant(input_node->InputAt(1))) return false;
172
173 switch (input_node->opcode()) {
174 case IrOpcode::kWord32Shl:
175 case IrOpcode::kWord64Shl:
176 *opcode |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
177 return true;
178 case IrOpcode::kWord32Shr:
179 case IrOpcode::kWord64Shr:
180 *opcode |= AddressingModeField::encode(kMode_Operand2_R_LSR_I);
181 return true;
182 case IrOpcode::kWord32Sar:
183 case IrOpcode::kWord64Sar:
184 *opcode |= AddressingModeField::encode(kMode_Operand2_R_ASR_I);
185 return true;
186 case IrOpcode::kWord32Ror:
187 case IrOpcode::kWord64Ror:
188 if (try_ror) {
189 *opcode |= AddressingModeField::encode(kMode_Operand2_R_ROR_I);
190 return true;
191 }
192 return false;
193 default:
194 return false;
195 }
196}
197
198
199bool TryMatchAnyExtend(Arm64OperandGenerator* g, InstructionSelector* selector,
200 Node* node, Node* left_node, Node* right_node,
201 InstructionOperand* left_op,
202 InstructionOperand* right_op, InstructionCode* opcode) {
203 if (!selector->CanCover(node, right_node)) return false;
204
205 NodeMatcher nm(right_node);
206
207 if (nm.IsWord32And()) {
208 Int32BinopMatcher mright(right_node);
209 if (mright.right().Is(0xff) || mright.right().Is(0xffff)) {
210 int32_t mask = mright.right().Value();
211 *left_op = g->UseRegister(left_node);
212 *right_op = g->UseRegister(mright.left().node());
213 *opcode |= AddressingModeField::encode(
214 (mask == 0xff) ? kMode_Operand2_R_UXTB : kMode_Operand2_R_UXTH);
215 return true;
216 }
217 } else if (nm.IsWord32Sar()) {
218 Int32BinopMatcher mright(right_node);
219 if (selector->CanCover(mright.node(), mright.left().node()) &&
220 mright.left().IsWord32Shl()) {
221 Int32BinopMatcher mleft_of_right(mright.left().node());
222 if ((mright.right().Is(16) && mleft_of_right.right().Is(16)) ||
223 (mright.right().Is(24) && mleft_of_right.right().Is(24))) {
224 int32_t shift = mright.right().Value();
225 *left_op = g->UseRegister(left_node);
226 *right_op = g->UseRegister(mleft_of_right.left().node());
227 *opcode |= AddressingModeField::encode(
228 (shift == 24) ? kMode_Operand2_R_SXTB : kMode_Operand2_R_SXTH);
229 return true;
230 }
231 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400232 }
233 return false;
234}
235
Ben Murdochc5610432016-08-08 18:44:38 +0100236bool TryMatchLoadStoreShift(Arm64OperandGenerator* g,
237 InstructionSelector* selector,
238 MachineRepresentation rep, Node* node, Node* index,
239 InstructionOperand* index_op,
240 InstructionOperand* shift_immediate_op) {
241 if (!selector->CanCover(node, index)) return false;
242 if (index->InputCount() != 2) return false;
243 Node* left = index->InputAt(0);
244 Node* right = index->InputAt(1);
245 switch (index->opcode()) {
246 case IrOpcode::kWord32Shl:
247 case IrOpcode::kWord64Shl:
248 if (!g->CanBeLoadStoreShiftImmediate(right, rep)) {
249 return false;
250 }
251 *index_op = g->UseRegister(left);
252 *shift_immediate_op = g->UseImmediate(right);
253 return true;
254 default:
255 return false;
256 }
257}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400258
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259// Shared routine for multiple binary operations.
260template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261void VisitBinop(InstructionSelector* selector, Node* node,
262 InstructionCode opcode, ImmediateMode operand_mode,
263 FlagsContinuation* cont) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000264 Arm64OperandGenerator g(selector);
265 Matcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 InstructionOperand inputs[5];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267 size_t input_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 InstructionOperand outputs[2];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 size_t output_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 bool is_cmp = (opcode == kArm64Cmp32) || (opcode == kArm64Cmn32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000272 // We can commute cmp by switching the inputs and commuting the flags
273 // continuation.
274 bool can_commute = m.HasProperty(Operator::kCommutative) || is_cmp;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 // The cmp and cmn instructions are encoded as sub or add with zero output
277 // register, and therefore support the same operand modes.
278 bool is_add_sub = m.IsInt32Add() || m.IsInt64Add() || m.IsInt32Sub() ||
279 m.IsInt64Sub() || is_cmp;
280
281 Node* left_node = m.left().node();
282 Node* right_node = m.right().node();
283
284 if (g.CanBeImmediate(right_node, operand_mode)) {
285 inputs[input_count++] = g.UseRegister(left_node);
286 inputs[input_count++] = g.UseImmediate(right_node);
287 } else if (is_cmp && g.CanBeImmediate(left_node, operand_mode)) {
288 cont->Commute();
289 inputs[input_count++] = g.UseRegister(right_node);
290 inputs[input_count++] = g.UseImmediate(left_node);
291 } else if (is_add_sub &&
292 TryMatchAnyExtend(&g, selector, node, left_node, right_node,
293 &inputs[0], &inputs[1], &opcode)) {
294 input_count += 2;
295 } else if (is_add_sub && can_commute &&
296 TryMatchAnyExtend(&g, selector, node, right_node, left_node,
297 &inputs[0], &inputs[1], &opcode)) {
298 if (is_cmp) cont->Commute();
299 input_count += 2;
300 } else if (TryMatchAnyShift(selector, node, right_node, &opcode,
301 !is_add_sub)) {
302 Matcher m_shift(right_node);
303 inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400304 inputs[input_count++] = g.UseRegister(m_shift.left().node());
305 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000306 } else if (can_commute && TryMatchAnyShift(selector, node, left_node, &opcode,
307 !is_add_sub)) {
308 if (is_cmp) cont->Commute();
309 Matcher m_shift(left_node);
310 inputs[input_count++] = g.UseRegisterOrImmediateZero(right_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400311 inputs[input_count++] = g.UseRegister(m_shift.left().node());
312 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
313 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000314 inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
315 inputs[input_count++] = g.UseRegister(right_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400316 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317
318 if (cont->IsBranch()) {
319 inputs[input_count++] = g.Label(cont->true_block());
320 inputs[input_count++] = g.Label(cont->false_block());
321 }
322
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000323 if (!is_cmp) {
324 outputs[output_count++] = g.DefineAsRegister(node);
325 }
326
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327 if (cont->IsSet()) {
328 outputs[output_count++] = g.DefineAsRegister(cont->result());
329 }
330
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000331 DCHECK_NE(0u, input_count);
332 DCHECK((output_count != 0) || is_cmp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 DCHECK_GE(arraysize(inputs), input_count);
334 DCHECK_GE(arraysize(outputs), output_count);
335
Ben Murdochda12d292016-06-02 14:46:10 +0100336 opcode = cont->Encode(opcode);
337 if (cont->IsDeoptimize()) {
338 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
339 cont->frame_state());
340 } else {
341 selector->Emit(opcode, output_count, outputs, input_count, inputs);
342 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343}
344
345
346// Shared routine for multiple binary operations.
347template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000348void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
349 ImmediateMode operand_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350 FlagsContinuation cont;
351 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
352}
353
354
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400355template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000356void VisitAddSub(InstructionSelector* selector, Node* node, ArchOpcode opcode,
357 ArchOpcode negate_opcode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400358 Arm64OperandGenerator g(selector);
359 Matcher m(node);
360 if (m.right().HasValue() && (m.right().Value() < 0) &&
361 g.CanBeImmediate(-m.right().Value(), kArithmeticImm)) {
362 selector->Emit(negate_opcode, g.DefineAsRegister(node),
363 g.UseRegister(m.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000364 g.TempImmediate(static_cast<int32_t>(-m.right().Value())));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400365 } else {
366 VisitBinop<Matcher>(selector, node, opcode, kArithmeticImm);
367 }
368}
369
370
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000371// For multiplications by immediate of the form x * (2^k + 1), where k > 0,
372// return the value of k, otherwise return zero. This is used to reduce the
373// multiplication to addition with left shift: x + (x << k).
374template <typename Matcher>
375int32_t LeftShiftForReducedMultiply(Matcher* m) {
376 DCHECK(m->IsInt32Mul() || m->IsInt64Mul());
377 if (m->right().HasValue() && m->right().Value() >= 3) {
378 uint64_t value_minus_one = m->right().Value() - 1;
379 if (base::bits::IsPowerOfTwo64(value_minus_one)) {
380 return WhichPowerOf2_64(value_minus_one);
381 }
382 }
383 return 0;
384}
385
386} // namespace
387
388
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389void InstructionSelector::VisitLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000390 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
Ben Murdochc5610432016-08-08 18:44:38 +0100391 MachineRepresentation rep = load_rep.representation();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000392 Arm64OperandGenerator g(this);
393 Node* base = node->InputAt(0);
394 Node* index = node->InputAt(1);
Ben Murdochc5610432016-08-08 18:44:38 +0100395 InstructionCode opcode = kArchNop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000396 ImmediateMode immediate_mode = kNoImmediate;
Ben Murdochc5610432016-08-08 18:44:38 +0100397 InstructionOperand inputs[3];
398 size_t input_count = 0;
399 InstructionOperand outputs[1];
400 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000401 case MachineRepresentation::kFloat32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000402 opcode = kArm64LdrS;
403 immediate_mode = kLoadStoreImm32;
404 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405 case MachineRepresentation::kFloat64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 opcode = kArm64LdrD;
407 immediate_mode = kLoadStoreImm64;
408 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000409 case MachineRepresentation::kBit: // Fall through.
410 case MachineRepresentation::kWord8:
411 opcode = load_rep.IsSigned() ? kArm64Ldrsb : kArm64Ldrb;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000412 immediate_mode = kLoadStoreImm8;
413 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000414 case MachineRepresentation::kWord16:
415 opcode = load_rep.IsSigned() ? kArm64Ldrsh : kArm64Ldrh;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000416 immediate_mode = kLoadStoreImm16;
417 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000418 case MachineRepresentation::kWord32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000419 opcode = kArm64LdrW;
420 immediate_mode = kLoadStoreImm32;
421 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000422 case MachineRepresentation::kTagged: // Fall through.
423 case MachineRepresentation::kWord64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000424 opcode = kArm64Ldr;
425 immediate_mode = kLoadStoreImm64;
426 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100427 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000428 case MachineRepresentation::kNone:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000429 UNREACHABLE();
430 return;
431 }
Ben Murdochc5610432016-08-08 18:44:38 +0100432
433 outputs[0] = g.DefineAsRegister(node);
434 inputs[0] = g.UseRegister(base);
435
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000436 if (g.CanBeImmediate(index, immediate_mode)) {
Ben Murdochc5610432016-08-08 18:44:38 +0100437 input_count = 2;
438 inputs[1] = g.UseImmediate(index);
439 opcode |= AddressingModeField::encode(kMode_MRI);
440 } else if (TryMatchLoadStoreShift(&g, this, rep, node, index, &inputs[1],
441 &inputs[2])) {
442 input_count = 3;
443 opcode |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100445 input_count = 2;
446 inputs[1] = g.UseRegister(index);
447 opcode |= AddressingModeField::encode(kMode_MRR);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000448 }
Ben Murdochc5610432016-08-08 18:44:38 +0100449
450 Emit(opcode, arraysize(outputs), outputs, input_count, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000451}
452
453
454void InstructionSelector::VisitStore(Node* node) {
455 Arm64OperandGenerator g(this);
456 Node* base = node->InputAt(0);
457 Node* index = node->InputAt(1);
458 Node* value = node->InputAt(2);
459
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
461 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
462 MachineRepresentation rep = store_rep.representation();
463
464 // TODO(arm64): I guess this could be done in a better way.
465 if (write_barrier_kind != kNoWriteBarrier) {
466 DCHECK_EQ(MachineRepresentation::kTagged, rep);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100467 AddressingMode addressing_mode;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468 InstructionOperand inputs[3];
469 size_t input_count = 0;
470 inputs[input_count++] = g.UseUniqueRegister(base);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100471 // OutOfLineRecordWrite uses the index in an arithmetic instruction, so we
472 // must check kArithmeticImm as well as kLoadStoreImm64.
473 if (g.CanBeImmediate(index, kArithmeticImm) &&
474 g.CanBeImmediate(index, kLoadStoreImm64)) {
475 inputs[input_count++] = g.UseImmediate(index);
476 addressing_mode = kMode_MRI;
477 } else {
478 inputs[input_count++] = g.UseUniqueRegister(index);
479 addressing_mode = kMode_MRR;
480 }
Ben Murdochda12d292016-06-02 14:46:10 +0100481 inputs[input_count++] = g.UseUniqueRegister(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
483 switch (write_barrier_kind) {
484 case kNoWriteBarrier:
485 UNREACHABLE();
486 break;
487 case kMapWriteBarrier:
488 record_write_mode = RecordWriteMode::kValueIsMap;
489 break;
490 case kPointerWriteBarrier:
491 record_write_mode = RecordWriteMode::kValueIsPointer;
492 break;
493 case kFullWriteBarrier:
494 record_write_mode = RecordWriteMode::kValueIsAny;
495 break;
496 }
497 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
498 size_t const temp_count = arraysize(temps);
499 InstructionCode code = kArchStoreWithWriteBarrier;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100500 code |= AddressingModeField::encode(addressing_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000501 code |= MiscField::encode(static_cast<int>(record_write_mode));
502 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000503 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100504 InstructionOperand inputs[4];
505 size_t input_count = 0;
506 InstructionCode opcode = kArchNop;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000507 ImmediateMode immediate_mode = kNoImmediate;
508 switch (rep) {
509 case MachineRepresentation::kFloat32:
510 opcode = kArm64StrS;
511 immediate_mode = kLoadStoreImm32;
512 break;
513 case MachineRepresentation::kFloat64:
514 opcode = kArm64StrD;
515 immediate_mode = kLoadStoreImm64;
516 break;
517 case MachineRepresentation::kBit: // Fall through.
518 case MachineRepresentation::kWord8:
519 opcode = kArm64Strb;
520 immediate_mode = kLoadStoreImm8;
521 break;
522 case MachineRepresentation::kWord16:
523 opcode = kArm64Strh;
524 immediate_mode = kLoadStoreImm16;
525 break;
526 case MachineRepresentation::kWord32:
527 opcode = kArm64StrW;
528 immediate_mode = kLoadStoreImm32;
529 break;
530 case MachineRepresentation::kTagged: // Fall through.
531 case MachineRepresentation::kWord64:
532 opcode = kArm64Str;
533 immediate_mode = kLoadStoreImm64;
534 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100535 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 case MachineRepresentation::kNone:
537 UNREACHABLE();
538 return;
539 }
Ben Murdochc5610432016-08-08 18:44:38 +0100540
541 inputs[0] = g.UseRegisterOrImmediateZero(value);
542 inputs[1] = g.UseRegister(base);
543
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544 if (g.CanBeImmediate(index, immediate_mode)) {
Ben Murdochc5610432016-08-08 18:44:38 +0100545 input_count = 3;
546 inputs[2] = g.UseImmediate(index);
547 opcode |= AddressingModeField::encode(kMode_MRI);
548 } else if (TryMatchLoadStoreShift(&g, this, rep, node, index, &inputs[2],
549 &inputs[3])) {
550 input_count = 4;
551 opcode |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100553 input_count = 3;
554 inputs[2] = g.UseRegister(index);
555 opcode |= AddressingModeField::encode(kMode_MRR);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 }
Ben Murdochc5610432016-08-08 18:44:38 +0100557
558 Emit(opcode, 0, nullptr, input_count, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 }
560}
561
562
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400563void InstructionSelector::VisitCheckedLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000564 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400565 Arm64OperandGenerator g(this);
566 Node* const buffer = node->InputAt(0);
567 Node* const offset = node->InputAt(1);
568 Node* const length = node->InputAt(2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 ArchOpcode opcode = kArchNop;
570 switch (load_rep.representation()) {
571 case MachineRepresentation::kWord8:
572 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400573 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 case MachineRepresentation::kWord16:
575 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400576 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000577 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400578 opcode = kCheckedLoadWord32;
579 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000580 case MachineRepresentation::kWord64:
581 opcode = kCheckedLoadWord64;
582 break;
583 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400584 opcode = kCheckedLoadFloat32;
585 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400587 opcode = kCheckedLoadFloat64;
588 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100589 case MachineRepresentation::kBit: // Fall through.
590 case MachineRepresentation::kTagged: // Fall through.
591 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400593 UNREACHABLE();
594 return;
595 }
596 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
597 g.UseRegister(offset), g.UseOperand(length, kArithmeticImm));
598}
599
600
601void InstructionSelector::VisitCheckedStore(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000602 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400603 Arm64OperandGenerator g(this);
604 Node* const buffer = node->InputAt(0);
605 Node* const offset = node->InputAt(1);
606 Node* const length = node->InputAt(2);
607 Node* const value = node->InputAt(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000608 ArchOpcode opcode = kArchNop;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400609 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000610 case MachineRepresentation::kWord8:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400611 opcode = kCheckedStoreWord8;
612 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613 case MachineRepresentation::kWord16:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400614 opcode = kCheckedStoreWord16;
615 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400617 opcode = kCheckedStoreWord32;
618 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619 case MachineRepresentation::kWord64:
620 opcode = kCheckedStoreWord64;
621 break;
622 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400623 opcode = kCheckedStoreFloat32;
624 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000625 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400626 opcode = kCheckedStoreFloat64;
627 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100628 case MachineRepresentation::kBit: // Fall through.
629 case MachineRepresentation::kTagged: // Fall through.
630 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400632 UNREACHABLE();
633 return;
634 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635 Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
Ben Murdochc5610432016-08-08 18:44:38 +0100636 g.UseOperand(length, kArithmeticImm),
637 g.UseRegisterOrImmediateZero(value));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400638}
639
640
641template <typename Matcher>
642static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
643 ArchOpcode opcode, bool left_can_cover,
644 bool right_can_cover, ImmediateMode imm_mode) {
645 Arm64OperandGenerator g(selector);
646
647 // Map instruction to equivalent operation with inverted right input.
648 ArchOpcode inv_opcode = opcode;
649 switch (opcode) {
650 case kArm64And32:
651 inv_opcode = kArm64Bic32;
652 break;
653 case kArm64And:
654 inv_opcode = kArm64Bic;
655 break;
656 case kArm64Or32:
657 inv_opcode = kArm64Orn32;
658 break;
659 case kArm64Or:
660 inv_opcode = kArm64Orn;
661 break;
662 case kArm64Eor32:
663 inv_opcode = kArm64Eon32;
664 break;
665 case kArm64Eor:
666 inv_opcode = kArm64Eon;
667 break;
668 default:
669 UNREACHABLE();
670 }
671
672 // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
673 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
674 Matcher mleft(m->left().node());
675 if (mleft.right().Is(-1)) {
676 // TODO(all): support shifted operand on right.
677 selector->Emit(inv_opcode, g.DefineAsRegister(node),
678 g.UseRegister(m->right().node()),
679 g.UseRegister(mleft.left().node()));
680 return;
681 }
682 }
683
684 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
685 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
686 right_can_cover) {
687 Matcher mright(m->right().node());
688 if (mright.right().Is(-1)) {
689 // TODO(all): support shifted operand on right.
690 selector->Emit(inv_opcode, g.DefineAsRegister(node),
691 g.UseRegister(m->left().node()),
692 g.UseRegister(mright.left().node()));
693 return;
694 }
695 }
696
697 if (m->IsWord32Xor() && m->right().Is(-1)) {
698 selector->Emit(kArm64Not32, g.DefineAsRegister(node),
699 g.UseRegister(m->left().node()));
700 } else if (m->IsWord64Xor() && m->right().Is(-1)) {
701 selector->Emit(kArm64Not, g.DefineAsRegister(node),
702 g.UseRegister(m->left().node()));
703 } else {
704 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
705 }
706}
707
708
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709void InstructionSelector::VisitWord32And(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400710 Arm64OperandGenerator g(this);
711 Int32BinopMatcher m(node);
712 if (m.left().IsWord32Shr() && CanCover(node, m.left().node()) &&
713 m.right().HasValue()) {
714 uint32_t mask = m.right().Value();
715 uint32_t mask_width = base::bits::CountPopulation32(mask);
716 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
717 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
718 // The mask must be contiguous, and occupy the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000719 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400720
721 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
722 // significant bits.
723 Int32BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000724 if (mleft.right().HasValue()) {
725 // Any shift value can match; int32 shifts use `value % 32`.
726 uint32_t lsb = mleft.right().Value() & 0x1f;
727
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400728 // Ubfx cannot extract bits past the register size, however since
729 // shifting the original value would have introduced some zeros we can
730 // still use ubfx with a smaller mask and the remaining bits will be
731 // zeros.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400732 if (lsb + mask_width > 32) mask_width = 32 - lsb;
733
734 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
735 g.UseRegister(mleft.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000736 g.UseImmediateOrTemp(mleft.right().node(), lsb),
737 g.TempImmediate(mask_width));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400738 return;
739 }
740 // Other cases fall through to the normal And operation.
741 }
742 }
743 VisitLogical<Int32BinopMatcher>(
744 this, node, &m, kArm64And32, CanCover(node, m.left().node()),
745 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000746}
747
748
749void InstructionSelector::VisitWord64And(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400750 Arm64OperandGenerator g(this);
751 Int64BinopMatcher m(node);
752 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) &&
753 m.right().HasValue()) {
754 uint64_t mask = m.right().Value();
755 uint64_t mask_width = base::bits::CountPopulation64(mask);
756 uint64_t mask_msb = base::bits::CountLeadingZeros64(mask);
757 if ((mask_width != 0) && (mask_msb + mask_width == 64)) {
758 // The mask must be contiguous, and occupy the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000759 DCHECK_EQ(0u, base::bits::CountTrailingZeros64(mask));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400760
761 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
762 // significant bits.
763 Int64BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000764 if (mleft.right().HasValue()) {
765 // Any shift value can match; int64 shifts use `value % 64`.
766 uint32_t lsb = static_cast<uint32_t>(mleft.right().Value() & 0x3f);
767
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400768 // Ubfx cannot extract bits past the register size, however since
769 // shifting the original value would have introduced some zeros we can
770 // still use ubfx with a smaller mask and the remaining bits will be
771 // zeros.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400772 if (lsb + mask_width > 64) mask_width = 64 - lsb;
773
774 Emit(kArm64Ubfx, g.DefineAsRegister(node),
775 g.UseRegister(mleft.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000776 g.UseImmediateOrTemp(mleft.right().node(), lsb),
777 g.TempImmediate(static_cast<int32_t>(mask_width)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400778 return;
779 }
780 // Other cases fall through to the normal And operation.
781 }
782 }
783 VisitLogical<Int64BinopMatcher>(
784 this, node, &m, kArm64And, CanCover(node, m.left().node()),
785 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000786}
787
788
789void InstructionSelector::VisitWord32Or(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400790 Int32BinopMatcher m(node);
791 VisitLogical<Int32BinopMatcher>(
792 this, node, &m, kArm64Or32, CanCover(node, m.left().node()),
793 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000794}
795
796
797void InstructionSelector::VisitWord64Or(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400798 Int64BinopMatcher m(node);
799 VisitLogical<Int64BinopMatcher>(
800 this, node, &m, kArm64Or, CanCover(node, m.left().node()),
801 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802}
803
804
805void InstructionSelector::VisitWord32Xor(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000806 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400807 VisitLogical<Int32BinopMatcher>(
808 this, node, &m, kArm64Eor32, CanCover(node, m.left().node()),
809 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000810}
811
812
813void InstructionSelector::VisitWord64Xor(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000814 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400815 VisitLogical<Int64BinopMatcher>(
816 this, node, &m, kArm64Eor, CanCover(node, m.left().node()),
817 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000818}
819
820
821void InstructionSelector::VisitWord32Shl(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000822 Int32BinopMatcher m(node);
823 if (m.left().IsWord32And() && CanCover(node, m.left().node()) &&
824 m.right().IsInRange(1, 31)) {
825 Arm64OperandGenerator g(this);
826 Int32BinopMatcher mleft(m.left().node());
827 if (mleft.right().HasValue()) {
828 uint32_t mask = mleft.right().Value();
829 uint32_t mask_width = base::bits::CountPopulation32(mask);
830 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
831 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
832 uint32_t shift = m.right().Value();
833 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
834 DCHECK_NE(0u, shift);
835
836 if ((shift + mask_width) >= 32) {
837 // If the mask is contiguous and reaches or extends beyond the top
838 // bit, only the shift is needed.
839 Emit(kArm64Lsl32, g.DefineAsRegister(node),
840 g.UseRegister(mleft.left().node()),
841 g.UseImmediate(m.right().node()));
842 return;
843 } else {
844 // Select Ubfiz for Shl(And(x, mask), imm) where the mask is
845 // contiguous, and the shift immediate non-zero.
846 Emit(kArm64Ubfiz32, g.DefineAsRegister(node),
847 g.UseRegister(mleft.left().node()),
848 g.UseImmediate(m.right().node()), g.TempImmediate(mask_width));
849 return;
850 }
851 }
852 }
853 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400854 VisitRRO(this, kArm64Lsl32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000855}
856
857
858void InstructionSelector::VisitWord64Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400859 Arm64OperandGenerator g(this);
860 Int64BinopMatcher m(node);
861 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) &&
862 m.right().IsInRange(32, 63)) {
863 // There's no need to sign/zero-extend to 64-bit if we shift out the upper
864 // 32 bits anyway.
865 Emit(kArm64Lsl, g.DefineAsRegister(node),
866 g.UseRegister(m.left().node()->InputAt(0)),
867 g.UseImmediate(m.right().node()));
868 return;
869 }
870 VisitRRO(this, kArm64Lsl, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871}
872
873
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000874namespace {
875
876bool TryEmitBitfieldExtract32(InstructionSelector* selector, Node* node) {
877 Arm64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400878 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000879 if (selector->CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
880 // Select Ubfx or Sbfx for (x << (K & 0x1f)) OP (K & 0x1f), where
881 // OP is >>> or >> and (K & 0x1f) != 0.
882 Int32BinopMatcher mleft(m.left().node());
883 if (mleft.right().HasValue() && m.right().HasValue() &&
884 (mleft.right().Value() & 0x1f) == (m.right().Value() & 0x1f)) {
885 DCHECK(m.IsWord32Shr() || m.IsWord32Sar());
886 ArchOpcode opcode = m.IsWord32Sar() ? kArm64Sbfx32 : kArm64Ubfx32;
887
888 int right_val = m.right().Value() & 0x1f;
889 DCHECK_NE(right_val, 0);
890
891 selector->Emit(opcode, g.DefineAsRegister(node),
892 g.UseRegister(mleft.left().node()), g.TempImmediate(0),
893 g.TempImmediate(32 - right_val));
894 return true;
895 }
896 }
897 return false;
898}
899
900} // namespace
901
902
903void InstructionSelector::VisitWord32Shr(Node* node) {
904 Int32BinopMatcher m(node);
905 if (m.left().IsWord32And() && m.right().HasValue()) {
906 uint32_t lsb = m.right().Value() & 0x1f;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400907 Int32BinopMatcher mleft(m.left().node());
908 if (mleft.right().HasValue()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400909 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is
910 // shifted into the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000911 uint32_t mask = (mleft.right().Value() >> lsb) << lsb;
912 unsigned mask_width = base::bits::CountPopulation32(mask);
913 unsigned mask_msb = base::bits::CountLeadingZeros32(mask);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400914 if ((mask_msb + mask_width + lsb) == 32) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400916 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(mask));
917 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000918 g.UseRegister(mleft.left().node()),
919 g.UseImmediateOrTemp(m.right().node(), lsb),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400920 g.TempImmediate(mask_width));
921 return;
922 }
923 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924 } else if (TryEmitBitfieldExtract32(this, node)) {
925 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400926 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000927
928 if (m.left().IsUint32MulHigh() && m.right().HasValue() &&
929 CanCover(node, node->InputAt(0))) {
930 // Combine this shift with the multiply and shift that would be generated
931 // by Uint32MulHigh.
932 Arm64OperandGenerator g(this);
933 Node* left = m.left().node();
934 int shift = m.right().Value() & 0x1f;
935 InstructionOperand const smull_operand = g.TempRegister();
936 Emit(kArm64Umull, smull_operand, g.UseRegister(left->InputAt(0)),
937 g.UseRegister(left->InputAt(1)));
938 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand,
939 g.TempImmediate(32 + shift));
940 return;
941 }
942
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400943 VisitRRO(this, kArm64Lsr32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000944}
945
946
947void InstructionSelector::VisitWord64Shr(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400948 Int64BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000949 if (m.left().IsWord64And() && m.right().HasValue()) {
950 uint32_t lsb = m.right().Value() & 0x3f;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400951 Int64BinopMatcher mleft(m.left().node());
952 if (mleft.right().HasValue()) {
953 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is
954 // shifted into the least-significant bits.
955 uint64_t mask = (mleft.right().Value() >> lsb) << lsb;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000956 unsigned mask_width = base::bits::CountPopulation64(mask);
957 unsigned mask_msb = base::bits::CountLeadingZeros64(mask);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400958 if ((mask_msb + mask_width + lsb) == 64) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400960 DCHECK_EQ(lsb, base::bits::CountTrailingZeros64(mask));
961 Emit(kArm64Ubfx, g.DefineAsRegister(node),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000962 g.UseRegister(mleft.left().node()),
963 g.UseImmediateOrTemp(m.right().node(), lsb),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400964 g.TempImmediate(mask_width));
965 return;
966 }
967 }
968 }
969 VisitRRO(this, kArm64Lsr, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000970}
971
972
973void InstructionSelector::VisitWord32Sar(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000974 if (TryEmitBitfieldExtract32(this, node)) {
975 return;
976 }
977
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400978 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000979 if (m.left().IsInt32MulHigh() && m.right().HasValue() &&
980 CanCover(node, node->InputAt(0))) {
981 // Combine this shift with the multiply and shift that would be generated
982 // by Int32MulHigh.
983 Arm64OperandGenerator g(this);
984 Node* left = m.left().node();
985 int shift = m.right().Value() & 0x1f;
986 InstructionOperand const smull_operand = g.TempRegister();
987 Emit(kArm64Smull, smull_operand, g.UseRegister(left->InputAt(0)),
988 g.UseRegister(left->InputAt(1)));
989 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand,
990 g.TempImmediate(32 + shift));
991 return;
992 }
993
994 if (m.left().IsInt32Add() && m.right().HasValue() &&
995 CanCover(node, node->InputAt(0))) {
996 Node* add_node = m.left().node();
997 Int32BinopMatcher madd_node(add_node);
998 if (madd_node.left().IsInt32MulHigh() &&
999 CanCover(add_node, madd_node.left().node())) {
1000 // Combine the shift that would be generated by Int32MulHigh with the add
1001 // on the left of this Sar operation. We do it here, as the result of the
1002 // add potentially has 33 bits, so we have to ensure the result is
1003 // truncated by being the input to this 32-bit Sar operation.
1004 Arm64OperandGenerator g(this);
1005 Node* mul_node = madd_node.left().node();
1006
1007 InstructionOperand const smull_operand = g.TempRegister();
1008 Emit(kArm64Smull, smull_operand, g.UseRegister(mul_node->InputAt(0)),
1009 g.UseRegister(mul_node->InputAt(1)));
1010
1011 InstructionOperand const add_operand = g.TempRegister();
1012 Emit(kArm64Add | AddressingModeField::encode(kMode_Operand2_R_ASR_I),
1013 add_operand, g.UseRegister(add_node->InputAt(1)), smull_operand,
1014 g.TempImmediate(32));
1015
1016 Emit(kArm64Asr32, g.DefineAsRegister(node), add_operand,
1017 g.UseImmediate(node->InputAt(1)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001018 return;
1019 }
1020 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001021
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001022 VisitRRO(this, kArm64Asr32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023}
1024
1025
1026void InstructionSelector::VisitWord64Sar(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001027 VisitRRO(this, kArm64Asr, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028}
1029
1030
1031void InstructionSelector::VisitWord32Ror(Node* node) {
1032 VisitRRO(this, kArm64Ror32, node, kShift32Imm);
1033}
1034
1035
1036void InstructionSelector::VisitWord64Ror(Node* node) {
1037 VisitRRO(this, kArm64Ror, node, kShift64Imm);
1038}
1039
1040
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001041void InstructionSelector::VisitWord64Clz(Node* node) {
1042 Arm64OperandGenerator g(this);
1043 Emit(kArm64Clz, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1044}
1045
1046
1047void InstructionSelector::VisitWord32Clz(Node* node) {
1048 Arm64OperandGenerator g(this);
1049 Emit(kArm64Clz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1050}
1051
1052
1053void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
1054
1055
1056void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
1057
1058
Ben Murdoch097c5b22016-05-18 11:27:45 +01001059void InstructionSelector::VisitWord32ReverseBits(Node* node) {
1060 VisitRR(this, kArm64Rbit32, node);
1061}
1062
1063
1064void InstructionSelector::VisitWord64ReverseBits(Node* node) {
1065 VisitRR(this, kArm64Rbit, node);
1066}
1067
1068
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001069void InstructionSelector::VisitWord32Popcnt(Node* node) { UNREACHABLE(); }
1070
1071
1072void InstructionSelector::VisitWord64Popcnt(Node* node) { UNREACHABLE(); }
1073
1074
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075void InstructionSelector::VisitInt32Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001076 Arm64OperandGenerator g(this);
1077 Int32BinopMatcher m(node);
1078 // Select Madd(x, y, z) for Add(Mul(x, y), z).
1079 if (m.left().IsInt32Mul() && CanCover(node, m.left().node())) {
1080 Int32BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081 // Check multiply can't be later reduced to addition with shift.
1082 if (LeftShiftForReducedMultiply(&mleft) == 0) {
1083 Emit(kArm64Madd32, g.DefineAsRegister(node),
1084 g.UseRegister(mleft.left().node()),
1085 g.UseRegister(mleft.right().node()),
1086 g.UseRegister(m.right().node()));
1087 return;
1088 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001089 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001090 // Select Madd(x, y, z) for Add(z, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001091 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
1092 Int32BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001093 // Check multiply can't be later reduced to addition with shift.
1094 if (LeftShiftForReducedMultiply(&mright) == 0) {
1095 Emit(kArm64Madd32, g.DefineAsRegister(node),
1096 g.UseRegister(mright.left().node()),
1097 g.UseRegister(mright.right().node()),
1098 g.UseRegister(m.left().node()));
1099 return;
1100 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001101 }
1102 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Add32, kArm64Sub32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001103}
1104
1105
1106void InstructionSelector::VisitInt64Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001107 Arm64OperandGenerator g(this);
1108 Int64BinopMatcher m(node);
1109 // Select Madd(x, y, z) for Add(Mul(x, y), z).
1110 if (m.left().IsInt64Mul() && CanCover(node, m.left().node())) {
1111 Int64BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001112 // Check multiply can't be later reduced to addition with shift.
1113 if (LeftShiftForReducedMultiply(&mleft) == 0) {
1114 Emit(kArm64Madd, g.DefineAsRegister(node),
1115 g.UseRegister(mleft.left().node()),
1116 g.UseRegister(mleft.right().node()),
1117 g.UseRegister(m.right().node()));
1118 return;
1119 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001120 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001121 // Select Madd(x, y, z) for Add(z, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001122 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1123 Int64BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001124 // Check multiply can't be later reduced to addition with shift.
1125 if (LeftShiftForReducedMultiply(&mright) == 0) {
1126 Emit(kArm64Madd, g.DefineAsRegister(node),
1127 g.UseRegister(mright.left().node()),
1128 g.UseRegister(mright.right().node()),
1129 g.UseRegister(m.left().node()));
1130 return;
1131 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001132 }
1133 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Add, kArm64Sub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001134}
1135
1136
1137void InstructionSelector::VisitInt32Sub(Node* node) {
1138 Arm64OperandGenerator g(this);
1139 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001140
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001141 // Select Msub(x, y, a) for Sub(a, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001142 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
1143 Int32BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144 // Check multiply can't be later reduced to addition with shift.
1145 if (LeftShiftForReducedMultiply(&mright) == 0) {
1146 Emit(kArm64Msub32, g.DefineAsRegister(node),
1147 g.UseRegister(mright.left().node()),
1148 g.UseRegister(mright.right().node()),
1149 g.UseRegister(m.left().node()));
1150 return;
1151 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001152 }
1153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Sub32, kArm64Add32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001155}
1156
1157
1158void InstructionSelector::VisitInt64Sub(Node* node) {
1159 Arm64OperandGenerator g(this);
1160 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001161
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001162 // Select Msub(x, y, a) for Sub(a, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001163 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1164 Int64BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001165 // Check multiply can't be later reduced to addition with shift.
1166 if (LeftShiftForReducedMultiply(&mright) == 0) {
1167 Emit(kArm64Msub, g.DefineAsRegister(node),
1168 g.UseRegister(mright.left().node()),
1169 g.UseRegister(mright.right().node()),
1170 g.UseRegister(m.left().node()));
1171 return;
1172 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001173 }
1174
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001175 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Sub, kArm64Add);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001176}
1177
1178
1179void InstructionSelector::VisitInt32Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001180 Arm64OperandGenerator g(this);
1181 Int32BinopMatcher m(node);
1182
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001183 // First, try to reduce the multiplication to addition with left shift.
1184 // x * (2^k + 1) -> x + (x << k)
1185 int32_t shift = LeftShiftForReducedMultiply(&m);
1186 if (shift > 0) {
1187 Emit(kArm64Add32 | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
1188 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1189 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1190 return;
1191 }
1192
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001193 if (m.left().IsInt32Sub() && CanCover(node, m.left().node())) {
1194 Int32BinopMatcher mleft(m.left().node());
1195
1196 // Select Mneg(x, y) for Mul(Sub(0, x), y).
1197 if (mleft.left().Is(0)) {
1198 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1199 g.UseRegister(mleft.right().node()),
1200 g.UseRegister(m.right().node()));
1201 return;
1202 }
1203 }
1204
1205 if (m.right().IsInt32Sub() && CanCover(node, m.right().node())) {
1206 Int32BinopMatcher mright(m.right().node());
1207
1208 // Select Mneg(x, y) for Mul(x, Sub(0, y)).
1209 if (mright.left().Is(0)) {
1210 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1211 g.UseRegister(m.left().node()),
1212 g.UseRegister(mright.right().node()));
1213 return;
1214 }
1215 }
1216
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 VisitRRR(this, kArm64Mul32, node);
1218}
1219
1220
1221void InstructionSelector::VisitInt64Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001222 Arm64OperandGenerator g(this);
1223 Int64BinopMatcher m(node);
1224
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225 // First, try to reduce the multiplication to addition with left shift.
1226 // x * (2^k + 1) -> x + (x << k)
1227 int32_t shift = LeftShiftForReducedMultiply(&m);
1228 if (shift > 0) {
1229 Emit(kArm64Add | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
1230 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1231 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1232 return;
1233 }
1234
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001235 if (m.left().IsInt64Sub() && CanCover(node, m.left().node())) {
1236 Int64BinopMatcher mleft(m.left().node());
1237
1238 // Select Mneg(x, y) for Mul(Sub(0, x), y).
1239 if (mleft.left().Is(0)) {
1240 Emit(kArm64Mneg, g.DefineAsRegister(node),
1241 g.UseRegister(mleft.right().node()),
1242 g.UseRegister(m.right().node()));
1243 return;
1244 }
1245 }
1246
1247 if (m.right().IsInt64Sub() && CanCover(node, m.right().node())) {
1248 Int64BinopMatcher mright(m.right().node());
1249
1250 // Select Mneg(x, y) for Mul(x, Sub(0, y)).
1251 if (mright.left().Is(0)) {
1252 Emit(kArm64Mneg, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1253 g.UseRegister(mright.right().node()));
1254 return;
1255 }
1256 }
1257
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258 VisitRRR(this, kArm64Mul, node);
1259}
1260
1261
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001262void InstructionSelector::VisitInt32MulHigh(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001263 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001264 InstructionOperand const smull_operand = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001265 Emit(kArm64Smull, smull_operand, g.UseRegister(node->InputAt(0)),
1266 g.UseRegister(node->InputAt(1)));
1267 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1268}
1269
1270
1271void InstructionSelector::VisitUint32MulHigh(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001272 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001273 InstructionOperand const smull_operand = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001274 Emit(kArm64Umull, smull_operand, g.UseRegister(node->InputAt(0)),
1275 g.UseRegister(node->InputAt(1)));
1276 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1277}
1278
1279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001280void InstructionSelector::VisitInt32Div(Node* node) {
1281 VisitRRR(this, kArm64Idiv32, node);
1282}
1283
1284
1285void InstructionSelector::VisitInt64Div(Node* node) {
1286 VisitRRR(this, kArm64Idiv, node);
1287}
1288
1289
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001290void InstructionSelector::VisitUint32Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001291 VisitRRR(this, kArm64Udiv32, node);
1292}
1293
1294
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001295void InstructionSelector::VisitUint64Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001296 VisitRRR(this, kArm64Udiv, node);
1297}
1298
1299
1300void InstructionSelector::VisitInt32Mod(Node* node) {
1301 VisitRRR(this, kArm64Imod32, node);
1302}
1303
1304
1305void InstructionSelector::VisitInt64Mod(Node* node) {
1306 VisitRRR(this, kArm64Imod, node);
1307}
1308
1309
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001310void InstructionSelector::VisitUint32Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311 VisitRRR(this, kArm64Umod32, node);
1312}
1313
1314
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001315void InstructionSelector::VisitUint64Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001316 VisitRRR(this, kArm64Umod, node);
1317}
1318
1319
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001320void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001321 VisitRR(this, kArm64Float32ToFloat64, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001322}
1323
1324
Ben Murdoch097c5b22016-05-18 11:27:45 +01001325void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1326 VisitRR(this, kArm64Int32ToFloat32, node);
1327}
1328
1329
1330void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1331 VisitRR(this, kArm64Uint32ToFloat32, node);
1332}
1333
1334
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001335void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 VisitRR(this, kArm64Int32ToFloat64, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001337}
1338
1339
1340void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001341 VisitRR(this, kArm64Uint32ToFloat64, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001342}
1343
1344
Ben Murdoch097c5b22016-05-18 11:27:45 +01001345void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1346 VisitRR(this, kArm64Float32ToInt32, node);
1347}
1348
1349
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001350void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001351 VisitRR(this, kArm64Float64ToInt32, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352}
1353
1354
Ben Murdoch097c5b22016-05-18 11:27:45 +01001355void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1356 VisitRR(this, kArm64Float32ToUint32, node);
1357}
1358
1359
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001360void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 VisitRR(this, kArm64Float64ToUint32, node);
1362}
1363
Ben Murdochda12d292016-06-02 14:46:10 +01001364void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1365 VisitRR(this, kArm64Float64ToUint32, node);
1366}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367
1368void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001369 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370
1371 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1372 InstructionOperand outputs[2];
1373 size_t output_count = 0;
1374 outputs[output_count++] = g.DefineAsRegister(node);
1375
1376 Node* success_output = NodeProperties::FindProjection(node, 1);
1377 if (success_output) {
1378 outputs[output_count++] = g.DefineAsRegister(success_output);
1379 }
1380
1381 Emit(kArm64Float32ToInt64, output_count, outputs, 1, inputs);
1382}
1383
1384
1385void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1386 Arm64OperandGenerator g(this);
1387
1388 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1389 InstructionOperand outputs[2];
1390 size_t output_count = 0;
1391 outputs[output_count++] = g.DefineAsRegister(node);
1392
1393 Node* success_output = NodeProperties::FindProjection(node, 1);
1394 if (success_output) {
1395 outputs[output_count++] = g.DefineAsRegister(success_output);
1396 }
1397
1398 Emit(kArm64Float64ToInt64, output_count, outputs, 1, inputs);
1399}
1400
1401
1402void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1403 Arm64OperandGenerator g(this);
1404
1405 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1406 InstructionOperand outputs[2];
1407 size_t output_count = 0;
1408 outputs[output_count++] = g.DefineAsRegister(node);
1409
1410 Node* success_output = NodeProperties::FindProjection(node, 1);
1411 if (success_output) {
1412 outputs[output_count++] = g.DefineAsRegister(success_output);
1413 }
1414
1415 Emit(kArm64Float32ToUint64, output_count, outputs, 1, inputs);
1416}
1417
1418
1419void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1420 Arm64OperandGenerator g(this);
1421
1422 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1423 InstructionOperand outputs[2];
1424 size_t output_count = 0;
1425 outputs[output_count++] = g.DefineAsRegister(node);
1426
1427 Node* success_output = NodeProperties::FindProjection(node, 1);
1428 if (success_output) {
1429 outputs[output_count++] = g.DefineAsRegister(success_output);
1430 }
1431
1432 Emit(kArm64Float64ToUint64, output_count, outputs, 1, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433}
1434
1435
1436void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001437 VisitRR(this, kArm64Sxtw, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438}
1439
1440
1441void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1442 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001443 Node* value = node->InputAt(0);
1444 switch (value->opcode()) {
1445 case IrOpcode::kWord32And:
1446 case IrOpcode::kWord32Or:
1447 case IrOpcode::kWord32Xor:
1448 case IrOpcode::kWord32Shl:
1449 case IrOpcode::kWord32Shr:
1450 case IrOpcode::kWord32Sar:
1451 case IrOpcode::kWord32Ror:
1452 case IrOpcode::kWord32Equal:
1453 case IrOpcode::kInt32Add:
1454 case IrOpcode::kInt32AddWithOverflow:
1455 case IrOpcode::kInt32Sub:
1456 case IrOpcode::kInt32SubWithOverflow:
1457 case IrOpcode::kInt32Mul:
1458 case IrOpcode::kInt32MulHigh:
1459 case IrOpcode::kInt32Div:
1460 case IrOpcode::kInt32Mod:
1461 case IrOpcode::kInt32LessThan:
1462 case IrOpcode::kInt32LessThanOrEqual:
1463 case IrOpcode::kUint32Div:
1464 case IrOpcode::kUint32LessThan:
1465 case IrOpcode::kUint32LessThanOrEqual:
1466 case IrOpcode::kUint32Mod:
1467 case IrOpcode::kUint32MulHigh: {
1468 // 32-bit operations will write their result in a W register (implicitly
1469 // clearing the top 32-bit of the corresponding X register) so the
1470 // zero-extension is a no-op.
1471 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1472 return;
1473 }
Ben Murdochc5610432016-08-08 18:44:38 +01001474 case IrOpcode::kLoad: {
1475 // As for the operations above, a 32-bit load will implicitly clear the
1476 // top 32 bits of the destination register.
1477 LoadRepresentation load_rep = LoadRepresentationOf(value->op());
1478 switch (load_rep.representation()) {
1479 case MachineRepresentation::kWord8:
1480 case MachineRepresentation::kWord16:
1481 case MachineRepresentation::kWord32:
1482 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1483 return;
1484 default:
1485 break;
1486 }
1487 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001488 default:
1489 break;
1490 }
1491 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(value));
1492}
1493
1494
1495void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 VisitRR(this, kArm64Float64ToFloat32, node);
1497}
1498
Ben Murdochc5610432016-08-08 18:44:38 +01001499void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1500 VisitRR(this, kArchTruncateDoubleToI, node);
1501}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001502
Ben Murdochc5610432016-08-08 18:44:38 +01001503void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1504 VisitRR(this, kArm64Float64ToInt32, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001505}
1506
1507
1508void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1509 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001510 Node* value = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511 if (CanCover(node, value) && value->InputCount() >= 2) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001512 Int64BinopMatcher m(value);
1513 if ((m.IsWord64Sar() && m.right().HasValue() &&
1514 (m.right().Value() == 32)) ||
1515 (m.IsWord64Shr() && m.right().IsInRange(32, 63))) {
1516 Emit(kArm64Lsr, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1517 g.UseImmediate(m.right().node()));
1518 return;
1519 }
1520 }
1521
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001522 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1523}
1524
1525
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1527 VisitRR(this, kArm64Int64ToFloat32, node);
1528}
1529
1530
1531void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1532 VisitRR(this, kArm64Int64ToFloat64, node);
1533}
1534
1535
1536void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1537 VisitRR(this, kArm64Uint64ToFloat32, node);
1538}
1539
1540
1541void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1542 VisitRR(this, kArm64Uint64ToFloat64, node);
1543}
1544
1545
1546void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1547 VisitRR(this, kArm64Float64ExtractLowWord32, node);
1548}
1549
1550
1551void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1552 VisitRR(this, kArm64U64MoveFloat64, node);
1553}
1554
1555
1556void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1557 VisitRR(this, kArm64Float64MoveU64, node);
1558}
1559
1560
1561void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1562 VisitRR(this, kArm64Float64MoveU64, node);
1563}
1564
1565
1566void InstructionSelector::VisitFloat32Add(Node* node) {
1567 VisitRRR(this, kArm64Float32Add, node);
1568}
1569
1570
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001571void InstructionSelector::VisitFloat64Add(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 VisitRRR(this, kArm64Float64Add, node);
1573}
1574
1575
1576void InstructionSelector::VisitFloat32Sub(Node* node) {
1577 VisitRRR(this, kArm64Float32Sub, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001578}
1579
Ben Murdochc5610432016-08-08 18:44:38 +01001580void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) {
1581 VisitRRR(this, kArm64Float32Sub, node);
1582}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001583
1584void InstructionSelector::VisitFloat64Sub(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 Arm64OperandGenerator g(this);
1586 Float64BinopMatcher m(node);
1587 if (m.left().IsMinusZero()) {
1588 if (m.right().IsFloat64RoundDown() &&
1589 CanCover(m.node(), m.right().node())) {
1590 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1591 CanCover(m.right().node(), m.right().InputAt(0))) {
1592 Float64BinopMatcher mright0(m.right().InputAt(0));
1593 if (mright0.left().IsMinusZero()) {
1594 Emit(kArm64Float64RoundUp, g.DefineAsRegister(node),
1595 g.UseRegister(mright0.right().node()));
1596 return;
1597 }
1598 }
1599 }
1600 Emit(kArm64Float64Neg, g.DefineAsRegister(node),
1601 g.UseRegister(m.right().node()));
1602 return;
1603 }
1604 VisitRRR(this, kArm64Float64Sub, node);
1605}
1606
Ben Murdochc5610432016-08-08 18:44:38 +01001607void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) {
1608 VisitRRR(this, kArm64Float64Sub, node);
1609}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610
1611void InstructionSelector::VisitFloat32Mul(Node* node) {
1612 VisitRRR(this, kArm64Float32Mul, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001613}
1614
1615
1616void InstructionSelector::VisitFloat64Mul(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 VisitRRR(this, kArm64Float64Mul, node);
1618}
1619
1620
1621void InstructionSelector::VisitFloat32Div(Node* node) {
1622 VisitRRR(this, kArm64Float32Div, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001623}
1624
1625
1626void InstructionSelector::VisitFloat64Div(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627 VisitRRR(this, kArm64Float64Div, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001628}
1629
1630
1631void InstructionSelector::VisitFloat64Mod(Node* node) {
1632 Arm64OperandGenerator g(this);
1633 Emit(kArm64Float64Mod, g.DefineAsFixed(node, d0),
1634 g.UseFixed(node->InputAt(0), d0),
1635 g.UseFixed(node->InputAt(1), d1))->MarkAsCall();
1636}
1637
1638
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001639void InstructionSelector::VisitFloat32Max(Node* node) {
1640 VisitRRR(this, kArm64Float32Max, node);
1641}
1642
1643
1644void InstructionSelector::VisitFloat64Max(Node* node) {
1645 VisitRRR(this, kArm64Float64Max, node);
1646}
1647
1648
1649void InstructionSelector::VisitFloat32Min(Node* node) {
1650 VisitRRR(this, kArm64Float32Min, node);
1651}
1652
1653
1654void InstructionSelector::VisitFloat64Min(Node* node) {
1655 VisitRRR(this, kArm64Float64Min, node);
1656}
1657
1658
1659void InstructionSelector::VisitFloat32Abs(Node* node) {
1660 VisitRR(this, kArm64Float32Abs, node);
1661}
1662
1663
1664void InstructionSelector::VisitFloat64Abs(Node* node) {
1665 VisitRR(this, kArm64Float64Abs, node);
1666}
1667
1668
1669void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1670 VisitRR(this, kArm64Float32Sqrt, node);
1671}
1672
1673
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001674void InstructionSelector::VisitFloat64Sqrt(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675 VisitRR(this, kArm64Float64Sqrt, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001676}
1677
1678
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1680 VisitRR(this, kArm64Float32RoundDown, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001681}
1682
1683
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001684void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1685 VisitRR(this, kArm64Float64RoundDown, node);
1686}
1687
1688
1689void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1690 VisitRR(this, kArm64Float32RoundUp, node);
1691}
1692
1693
1694void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1695 VisitRR(this, kArm64Float64RoundUp, node);
1696}
1697
1698
1699void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1700 VisitRR(this, kArm64Float32RoundTruncate, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001701}
1702
1703
1704void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705 VisitRR(this, kArm64Float64RoundTruncate, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001706}
1707
1708
1709void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001710 VisitRR(this, kArm64Float64RoundTiesAway, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001711}
1712
1713
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001714void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1715 VisitRR(this, kArm64Float32RoundTiesEven, node);
1716}
1717
1718
1719void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1720 VisitRR(this, kArm64Float64RoundTiesEven, node);
1721}
1722
1723
1724void InstructionSelector::EmitPrepareArguments(
1725 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1726 Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001727 Arm64OperandGenerator g(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001728
Ben Murdochda12d292016-06-02 14:46:10 +01001729 bool from_native_stack = linkage()->GetIncomingDescriptor()->UseNativeStack();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001730 bool to_native_stack = descriptor->UseNativeStack();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001731
Ben Murdochda12d292016-06-02 14:46:10 +01001732 bool always_claim = to_native_stack != from_native_stack;
1733
Ben Murdoch097c5b22016-05-18 11:27:45 +01001734 int claim_count = static_cast<int>(arguments->size());
1735 int slot = claim_count - 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001736 // Bump the stack pointer(s).
Ben Murdochda12d292016-06-02 14:46:10 +01001737 if (claim_count > 0 || always_claim) {
1738 // TODO(titzer): claim and poke probably take small immediates.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001739 // TODO(titzer): it would be better to bump the csp here only
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740 // and emit paired stores with increment for non c frames.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001741 ArchOpcode claim = to_native_stack ? kArm64ClaimCSP : kArm64ClaimJSSP;
Ben Murdochda12d292016-06-02 14:46:10 +01001742 // Claim(0) isn't a nop if there is a mismatch between CSP and JSSP.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001743 Emit(claim, g.NoOutput(), g.TempImmediate(claim_count));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001744 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745
Ben Murdoch097c5b22016-05-18 11:27:45 +01001746 // Poke the arguments into the stack.
1747 ArchOpcode poke = to_native_stack ? kArm64PokeCSP : kArm64PokeJSSP;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001748 while (slot >= 0) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001749 Emit(poke, g.NoOutput(), g.UseRegister((*arguments)[slot].node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001750 g.TempImmediate(slot));
1751 slot--;
1752 // TODO(ahaas): Poke arguments in pairs if two subsequent arguments have the
1753 // same type.
1754 // Emit(kArm64PokePair, g.NoOutput(), g.UseRegister((*arguments)[slot]),
1755 // g.UseRegister((*arguments)[slot - 1]), g.TempImmediate(slot));
1756 // slot -= 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001758}
1759
1760
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001761bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1762
Ben Murdochda12d292016-06-02 14:46:10 +01001763int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764
1765namespace {
1766
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001767// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001768void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1769 InstructionOperand left, InstructionOperand right,
1770 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001771 Arm64OperandGenerator g(selector);
1772 opcode = cont->Encode(opcode);
1773 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001774 selector->Emit(opcode, g.NoOutput(), left, right,
1775 g.Label(cont->true_block()), g.Label(cont->false_block()));
Ben Murdochda12d292016-06-02 14:46:10 +01001776 } else if (cont->IsDeoptimize()) {
1777 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1778 cont->frame_state());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001779 } else {
1780 DCHECK(cont->IsSet());
1781 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001782 }
1783}
1784
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001785
1786// Shared routine for multiple word compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001787void VisitWordCompare(InstructionSelector* selector, Node* node,
1788 InstructionCode opcode, FlagsContinuation* cont,
1789 bool commutative, ImmediateMode immediate_mode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001790 Arm64OperandGenerator g(selector);
1791 Node* left = node->InputAt(0);
1792 Node* right = node->InputAt(1);
1793
1794 // Match immediates on left or right side of comparison.
1795 if (g.CanBeImmediate(right, immediate_mode)) {
1796 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1797 cont);
1798 } else if (g.CanBeImmediate(left, immediate_mode)) {
1799 if (!commutative) cont->Commute();
1800 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1801 cont);
1802 } else {
1803 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1804 cont);
1805 }
1806}
1807
1808
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001809void VisitWord32Compare(InstructionSelector* selector, Node* node,
1810 FlagsContinuation* cont) {
1811 Int32BinopMatcher m(node);
1812 ArchOpcode opcode = kArm64Cmp32;
1813
1814 // Select negated compare for comparisons with negated right input.
1815 if (m.right().IsInt32Sub()) {
1816 Node* sub = m.right().node();
1817 Int32BinopMatcher msub(sub);
1818 if (msub.left().Is(0)) {
1819 bool can_cover = selector->CanCover(node, sub);
1820 node->ReplaceInput(1, msub.right().node());
1821 // Even if the comparison node covers the subtraction, after the input
1822 // replacement above, the node still won't cover the input to the
1823 // subtraction; the subtraction still uses it.
1824 // In order to get shifted operations to work, we must remove the rhs
1825 // input to the subtraction, as TryMatchAnyShift requires this node to
1826 // cover the input shift. We do this by setting it to the lhs input,
1827 // as we know it's zero, and the result of the subtraction isn't used by
1828 // any other node.
1829 if (can_cover) sub->ReplaceInput(1, msub.left().node());
1830 opcode = kArm64Cmn32;
1831 }
1832 }
1833 VisitBinop<Int32BinopMatcher>(selector, node, opcode, kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001834}
1835
1836
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001837void VisitWordTest(InstructionSelector* selector, Node* node,
1838 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001839 Arm64OperandGenerator g(selector);
1840 VisitCompare(selector, opcode, g.UseRegister(node), g.UseRegister(node),
1841 cont);
1842}
1843
1844
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001845void VisitWord32Test(InstructionSelector* selector, Node* node,
1846 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001847 VisitWordTest(selector, node, kArm64Tst32, cont);
1848}
1849
1850
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851void VisitWord64Test(InstructionSelector* selector, Node* node,
1852 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001853 VisitWordTest(selector, node, kArm64Tst, cont);
1854}
1855
1856
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001857// Shared routine for multiple float32 compare operations.
1858void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1859 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001860 Arm64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001861 Float32BinopMatcher m(node);
1862 if (m.right().Is(0.0f)) {
1863 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1864 g.UseImmediate(m.right().node()), cont);
1865 } else if (m.left().Is(0.0f)) {
1866 cont->Commute();
1867 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.right().node()),
1868 g.UseImmediate(m.left().node()), cont);
1869 } else {
1870 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1871 g.UseRegister(m.right().node()), cont);
1872 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001873}
1874
1875
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001876// Shared routine for multiple float64 compare operations.
1877void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1878 FlagsContinuation* cont) {
1879 Arm64OperandGenerator g(selector);
1880 Float64BinopMatcher m(node);
1881 if (m.right().Is(0.0)) {
1882 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
1883 g.UseImmediate(m.right().node()), cont);
1884 } else if (m.left().Is(0.0)) {
1885 cont->Commute();
1886 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.right().node()),
1887 g.UseImmediate(m.left().node()), cont);
1888 } else {
1889 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
1890 g.UseRegister(m.right().node()), cont);
1891 }
1892}
1893
Ben Murdochda12d292016-06-02 14:46:10 +01001894void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1895 Node* value, FlagsContinuation* cont) {
1896 Arm64OperandGenerator g(selector);
1897 while (selector->CanCover(user, value)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001898 switch (value->opcode()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001899 case IrOpcode::kWord32Equal: {
1900 Int32BinopMatcher m(value);
1901 if (m.right().Is(0)) {
1902 user = value;
1903 value = m.left().node();
1904 cont->Negate();
1905 continue;
1906 }
1907 cont->OverwriteAndNegateIfEqual(kEqual);
1908 return VisitWord32Compare(selector, value, cont);
1909 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001910 case IrOpcode::kInt32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001911 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1912 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001913 case IrOpcode::kInt32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001914 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1915 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001916 case IrOpcode::kUint32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001917 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1918 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001919 case IrOpcode::kUint32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001920 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1921 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001922 case IrOpcode::kWord64Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001923 cont->OverwriteAndNegateIfEqual(kEqual);
1924 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001925 kArithmeticImm);
1926 case IrOpcode::kInt64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001927 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1928 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001929 kArithmeticImm);
1930 case IrOpcode::kInt64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001931 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1932 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001933 kArithmeticImm);
1934 case IrOpcode::kUint64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001935 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1936 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001937 kArithmeticImm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001938 case IrOpcode::kUint64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001939 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1940 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001941 kArithmeticImm);
1942 case IrOpcode::kFloat32Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001943 cont->OverwriteAndNegateIfEqual(kEqual);
1944 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001945 case IrOpcode::kFloat32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001946 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
1947 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001948 case IrOpcode::kFloat32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001949 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
1950 return VisitFloat32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001951 case IrOpcode::kFloat64Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001952 cont->OverwriteAndNegateIfEqual(kEqual);
1953 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001954 case IrOpcode::kFloat64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001955 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
1956 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001957 case IrOpcode::kFloat64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001958 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
1959 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001960 case IrOpcode::kProjection:
1961 // Check if this is the overflow output projection of an
1962 // <Operation>WithOverflow node.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001963 if (ProjectionIndexOf(value->op()) == 1u) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001964 // We cannot combine the <Operation>WithOverflow with this branch
1965 // unless the 0th projection (the use of the actual value of the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001966 // <Operation> is either nullptr, which means there's no use of the
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001967 // actual value, or was already defined, which means it is scheduled
1968 // *AFTER* this branch).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001969 Node* const node = value->InputAt(0);
1970 Node* const result = NodeProperties::FindProjection(node, 0);
Ben Murdochda12d292016-06-02 14:46:10 +01001971 if (result == nullptr || selector->IsDefined(result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001972 switch (node->opcode()) {
1973 case IrOpcode::kInt32AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001974 cont->OverwriteAndNegateIfEqual(kOverflow);
1975 return VisitBinop<Int32BinopMatcher>(
1976 selector, node, kArm64Add32, kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001977 case IrOpcode::kInt32SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001978 cont->OverwriteAndNegateIfEqual(kOverflow);
1979 return VisitBinop<Int32BinopMatcher>(
1980 selector, node, kArm64Sub32, kArithmeticImm, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001981 case IrOpcode::kInt64AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001982 cont->OverwriteAndNegateIfEqual(kOverflow);
1983 return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Add,
1984 kArithmeticImm, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001985 case IrOpcode::kInt64SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001986 cont->OverwriteAndNegateIfEqual(kOverflow);
1987 return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Sub,
1988 kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001989 default:
1990 break;
1991 }
1992 }
1993 }
1994 break;
1995 case IrOpcode::kInt32Add:
Ben Murdochda12d292016-06-02 14:46:10 +01001996 return VisitWordCompare(selector, value, kArm64Cmn32, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001997 kArithmeticImm);
1998 case IrOpcode::kInt32Sub:
Ben Murdochda12d292016-06-02 14:46:10 +01001999 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002000 case IrOpcode::kWord32And: {
2001 Int32BinopMatcher m(value);
Ben Murdochda12d292016-06-02 14:46:10 +01002002 if (cont->IsBranch() && m.right().HasValue() &&
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002003 (base::bits::CountPopulation32(m.right().Value()) == 1)) {
2004 // If the mask has only one bit set, we can use tbz/tbnz.
Ben Murdochda12d292016-06-02 14:46:10 +01002005 DCHECK((cont->condition() == kEqual) ||
2006 (cont->condition() == kNotEqual));
2007 selector->Emit(
2008 cont->Encode(kArm64TestAndBranch32), g.NoOutput(),
2009 g.UseRegister(m.left().node()),
2010 g.TempImmediate(
2011 base::bits::CountTrailingZeros32(m.right().Value())),
2012 g.Label(cont->true_block()), g.Label(cont->false_block()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002013 return;
2014 }
Ben Murdochda12d292016-06-02 14:46:10 +01002015 return VisitWordCompare(selector, value, kArm64Tst32, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002016 kLogical32Imm);
2017 }
2018 case IrOpcode::kWord64And: {
2019 Int64BinopMatcher m(value);
Ben Murdochda12d292016-06-02 14:46:10 +01002020 if (cont->IsBranch() && m.right().HasValue() &&
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002021 (base::bits::CountPopulation64(m.right().Value()) == 1)) {
2022 // If the mask has only one bit set, we can use tbz/tbnz.
Ben Murdochda12d292016-06-02 14:46:10 +01002023 DCHECK((cont->condition() == kEqual) ||
2024 (cont->condition() == kNotEqual));
2025 selector->Emit(
2026 cont->Encode(kArm64TestAndBranch), g.NoOutput(),
2027 g.UseRegister(m.left().node()),
2028 g.TempImmediate(
2029 base::bits::CountTrailingZeros64(m.right().Value())),
2030 g.Label(cont->true_block()), g.Label(cont->false_block()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002031 return;
2032 }
Ben Murdochda12d292016-06-02 14:46:10 +01002033 return VisitWordCompare(selector, value, kArm64Tst, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002034 kLogical64Imm);
2035 }
2036 default:
2037 break;
2038 }
Ben Murdochda12d292016-06-02 14:46:10 +01002039 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002040 }
2041
2042 // Branch could not be combined with a compare, compare against 0 and branch.
Ben Murdochda12d292016-06-02 14:46:10 +01002043 if (cont->IsBranch()) {
2044 selector->Emit(cont->Encode(kArm64CompareAndBranch32), g.NoOutput(),
2045 g.UseRegister(value), g.Label(cont->true_block()),
2046 g.Label(cont->false_block()));
2047 } else {
2048 DCHECK(cont->IsDeoptimize());
2049 selector->EmitDeoptimize(cont->Encode(kArm64Tst32), g.NoOutput(),
2050 g.UseRegister(value), g.UseRegister(value),
2051 cont->frame_state());
2052 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002053}
2054
Ben Murdochda12d292016-06-02 14:46:10 +01002055} // namespace
2056
2057void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
2058 BasicBlock* fbranch) {
2059 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
2060 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
2061}
2062
2063void InstructionSelector::VisitDeoptimizeIf(Node* node) {
2064 FlagsContinuation cont =
2065 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
2066 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2067}
2068
2069void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
2070 FlagsContinuation cont =
2071 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
2072 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2073}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002074
2075void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
2076 Arm64OperandGenerator g(this);
2077 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
2078
2079 // Emit either ArchTableSwitch or ArchLookupSwitch.
2080 size_t table_space_cost = 4 + sw.value_range;
2081 size_t table_time_cost = 3;
2082 size_t lookup_space_cost = 3 + 2 * sw.case_count;
2083 size_t lookup_time_cost = sw.case_count;
2084 if (sw.case_count > 0 &&
2085 table_space_cost + 3 * table_time_cost <=
2086 lookup_space_cost + 3 * lookup_time_cost &&
2087 sw.min_value > std::numeric_limits<int32_t>::min()) {
2088 InstructionOperand index_operand = value_operand;
2089 if (sw.min_value) {
2090 index_operand = g.TempRegister();
2091 Emit(kArm64Sub32, index_operand, value_operand,
2092 g.TempImmediate(sw.min_value));
2093 }
2094 // Generate a table lookup.
2095 return EmitTableSwitch(sw, index_operand);
2096 }
2097
2098 // Generate a sequence of conditional jumps.
2099 return EmitLookupSwitch(sw, value_operand);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002100}
2101
2102
2103void InstructionSelector::VisitWord32Equal(Node* const node) {
2104 Node* const user = node;
Ben Murdochda12d292016-06-02 14:46:10 +01002105 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002106 Int32BinopMatcher m(user);
2107 if (m.right().Is(0)) {
2108 Node* const value = m.left().node();
2109 if (CanCover(user, value)) {
2110 switch (value->opcode()) {
2111 case IrOpcode::kInt32Add:
2112 return VisitWordCompare(this, value, kArm64Cmn32, &cont, true,
2113 kArithmeticImm);
2114 case IrOpcode::kInt32Sub:
2115 return VisitWordCompare(this, value, kArm64Cmp32, &cont, false,
2116 kArithmeticImm);
2117 case IrOpcode::kWord32And:
2118 return VisitWordCompare(this, value, kArm64Tst32, &cont, true,
2119 kLogical32Imm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002120 case IrOpcode::kWord32Equal: {
2121 // Word32Equal(Word32Equal(x, y), 0) => Word32Compare(x, y, ne).
2122 Int32BinopMatcher mequal(value);
2123 node->ReplaceInput(0, mequal.left().node());
2124 node->ReplaceInput(1, mequal.right().node());
2125 cont.Negate();
2126 return VisitWord32Compare(this, node, &cont);
2127 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002128 default:
2129 break;
2130 }
2131 return VisitWord32Test(this, value, &cont);
2132 }
2133 }
2134 VisitWord32Compare(this, node, &cont);
2135}
2136
2137
2138void InstructionSelector::VisitInt32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002139 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002140 VisitWord32Compare(this, node, &cont);
2141}
2142
2143
2144void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002145 FlagsContinuation cont =
2146 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002147 VisitWord32Compare(this, node, &cont);
2148}
2149
2150
2151void InstructionSelector::VisitUint32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002152 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002153 VisitWord32Compare(this, node, &cont);
2154}
2155
2156
2157void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002158 FlagsContinuation cont =
2159 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002160 VisitWord32Compare(this, node, &cont);
2161}
2162
2163
2164void InstructionSelector::VisitWord64Equal(Node* const node) {
2165 Node* const user = node;
Ben Murdochda12d292016-06-02 14:46:10 +01002166 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002167 Int64BinopMatcher m(user);
2168 if (m.right().Is(0)) {
2169 Node* const value = m.left().node();
2170 if (CanCover(user, value)) {
2171 switch (value->opcode()) {
2172 case IrOpcode::kWord64And:
2173 return VisitWordCompare(this, value, kArm64Tst, &cont, true,
2174 kLogical64Imm);
2175 default:
2176 break;
2177 }
2178 return VisitWord64Test(this, value, &cont);
2179 }
2180 }
2181 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2182}
2183
2184
2185void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002186 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002187 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002188 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32,
2189 kArithmeticImm, &cont);
2190 }
2191 FlagsContinuation cont;
2192 VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm, &cont);
2193}
2194
2195
2196void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002197 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002198 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002199 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32,
2200 kArithmeticImm, &cont);
2201 }
2202 FlagsContinuation cont;
2203 VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm, &cont);
2204}
2205
2206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002207void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
2208 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002209 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm,
2211 &cont);
2212 }
2213 FlagsContinuation cont;
2214 VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm, &cont);
2215}
2216
2217
2218void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
2219 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002220 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002221 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm,
2222 &cont);
2223 }
2224 FlagsContinuation cont;
2225 VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm, &cont);
2226}
2227
2228
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002229void InstructionSelector::VisitInt64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002230 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002231 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2232}
2233
2234
2235void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002236 FlagsContinuation cont =
2237 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002238 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2239}
2240
2241
2242void InstructionSelector::VisitUint64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002243 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002244 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2245}
2246
2247
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002248void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002249 FlagsContinuation cont =
2250 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2252}
2253
2254
2255void InstructionSelector::VisitFloat32Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002256 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 VisitFloat32Compare(this, node, &cont);
2258}
2259
2260
2261void InstructionSelector::VisitFloat32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002262 FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002263 VisitFloat32Compare(this, node, &cont);
2264}
2265
2266
2267void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002268 FlagsContinuation cont =
2269 FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002270 VisitFloat32Compare(this, node, &cont);
2271}
2272
2273
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002274void InstructionSelector::VisitFloat64Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002275 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002276 VisitFloat64Compare(this, node, &cont);
2277}
2278
2279
2280void InstructionSelector::VisitFloat64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002281 FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002282 VisitFloat64Compare(this, node, &cont);
2283}
2284
2285
2286void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002287 FlagsContinuation cont =
2288 FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002289 VisitFloat64Compare(this, node, &cont);
2290}
2291
2292
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002293void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
2294 Arm64OperandGenerator g(this);
2295 Emit(kArm64Float64ExtractLowWord32, g.DefineAsRegister(node),
2296 g.UseRegister(node->InputAt(0)));
2297}
2298
2299
2300void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
2301 Arm64OperandGenerator g(this);
2302 Emit(kArm64Float64ExtractHighWord32, g.DefineAsRegister(node),
2303 g.UseRegister(node->InputAt(0)));
2304}
2305
2306
2307void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
2308 Arm64OperandGenerator g(this);
2309 Node* left = node->InputAt(0);
2310 Node* right = node->InputAt(1);
2311 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
2312 CanCover(node, left)) {
2313 Node* right_of_left = left->InputAt(1);
2314 Emit(kArm64Bfi, g.DefineSameAsFirst(right), g.UseRegister(right),
2315 g.UseRegister(right_of_left), g.TempImmediate(32),
2316 g.TempImmediate(32));
2317 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(right));
2318 return;
2319 }
2320 Emit(kArm64Float64InsertLowWord32, g.DefineAsRegister(node),
2321 g.UseRegister(left), g.UseRegister(right));
2322}
2323
2324
2325void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2326 Arm64OperandGenerator g(this);
2327 Node* left = node->InputAt(0);
2328 Node* right = node->InputAt(1);
2329 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
2330 CanCover(node, left)) {
2331 Node* right_of_left = left->InputAt(1);
2332 Emit(kArm64Bfi, g.DefineSameAsFirst(left), g.UseRegister(right_of_left),
2333 g.UseRegister(right), g.TempImmediate(32), g.TempImmediate(32));
2334 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(left));
2335 return;
2336 }
2337 Emit(kArm64Float64InsertHighWord32, g.DefineAsRegister(node),
2338 g.UseRegister(left), g.UseRegister(right));
2339}
2340
Ben Murdochc5610432016-08-08 18:44:38 +01002341void InstructionSelector::VisitAtomicLoad(Node* node) {
2342 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2343 Arm64OperandGenerator g(this);
2344 Node* base = node->InputAt(0);
2345 Node* index = node->InputAt(1);
2346 ArchOpcode opcode = kArchNop;
2347 switch (load_rep.representation()) {
2348 case MachineRepresentation::kWord8:
2349 opcode = load_rep.IsSigned() ? kAtomicLoadInt8 : kAtomicLoadUint8;
2350 break;
2351 case MachineRepresentation::kWord16:
2352 opcode = load_rep.IsSigned() ? kAtomicLoadInt16 : kAtomicLoadUint16;
2353 break;
2354 case MachineRepresentation::kWord32:
2355 opcode = kAtomicLoadWord32;
2356 break;
2357 default:
2358 UNREACHABLE();
2359 return;
2360 }
2361 Emit(opcode | AddressingModeField::encode(kMode_MRR),
2362 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
2363}
2364
2365void InstructionSelector::VisitAtomicStore(Node* node) {
2366 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2367 Arm64OperandGenerator g(this);
2368 Node* base = node->InputAt(0);
2369 Node* index = node->InputAt(1);
2370 Node* value = node->InputAt(2);
2371 ArchOpcode opcode = kArchNop;
2372 switch (rep) {
2373 case MachineRepresentation::kWord8:
2374 opcode = kAtomicStoreWord8;
2375 break;
2376 case MachineRepresentation::kWord16:
2377 opcode = kAtomicStoreWord16;
2378 break;
2379 case MachineRepresentation::kWord32:
2380 opcode = kAtomicStoreWord32;
2381 break;
2382 default:
2383 UNREACHABLE();
2384 return;
2385 }
2386
2387 AddressingMode addressing_mode = kMode_MRR;
2388 InstructionOperand inputs[3];
2389 size_t input_count = 0;
2390 inputs[input_count++] = g.UseUniqueRegister(base);
2391 inputs[input_count++] = g.UseUniqueRegister(index);
2392 inputs[input_count++] = g.UseUniqueRegister(value);
2393 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2394 Emit(code, 0, nullptr, input_count, inputs);
2395}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002396
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002397// static
2398MachineOperatorBuilder::Flags
2399InstructionSelector::SupportedMachineOperatorFlags() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400 return MachineOperatorBuilder::kFloat32Max |
2401 MachineOperatorBuilder::kFloat32Min |
2402 MachineOperatorBuilder::kFloat32RoundDown |
2403 MachineOperatorBuilder::kFloat64Max |
2404 MachineOperatorBuilder::kFloat64Min |
2405 MachineOperatorBuilder::kFloat64RoundDown |
2406 MachineOperatorBuilder::kFloat32RoundUp |
2407 MachineOperatorBuilder::kFloat64RoundUp |
2408 MachineOperatorBuilder::kFloat32RoundTruncate |
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002409 MachineOperatorBuilder::kFloat64RoundTruncate |
2410 MachineOperatorBuilder::kFloat64RoundTiesAway |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002411 MachineOperatorBuilder::kFloat32RoundTiesEven |
2412 MachineOperatorBuilder::kFloat64RoundTiesEven |
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002413 MachineOperatorBuilder::kWord32ShiftIsSafe |
2414 MachineOperatorBuilder::kInt32DivIsSafe |
Ben Murdoch097c5b22016-05-18 11:27:45 +01002415 MachineOperatorBuilder::kUint32DivIsSafe |
2416 MachineOperatorBuilder::kWord32ReverseBits |
2417 MachineOperatorBuilder::kWord64ReverseBits;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002418}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002419
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002420} // namespace compiler
2421} // namespace internal
2422} // namespace v8