blob: 637acac58bb43f45ee547d33870c9028980bcbcc [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/instruction-selector-impl.h"
6#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/compiler/node-properties.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13enum ImmediateMode {
14 kArithmeticImm, // 12 bit unsigned immediate shifted left 0 or 12 bits
15 kShift32Imm, // 0 - 31
16 kShift64Imm, // 0 - 63
17 kLogical32Imm,
18 kLogical64Imm,
19 kLoadStoreImm8, // signed 8 bit or 12 bit unsigned scaled by access size
20 kLoadStoreImm16,
21 kLoadStoreImm32,
22 kLoadStoreImm64,
23 kNoImmediate
24};
25
26
27// Adds Arm64-specific methods for generating operands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028class Arm64OperandGenerator final : public OperandGenerator {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 public:
30 explicit Arm64OperandGenerator(InstructionSelector* selector)
31 : OperandGenerator(selector) {}
32
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034 if (CanBeImmediate(node, mode)) {
35 return UseImmediate(node);
36 }
37 return UseRegister(node);
38 }
39
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 // Use the zero register if the node has the immediate value zero, otherwise
41 // assign a register.
42 InstructionOperand UseRegisterOrImmediateZero(Node* node) {
Ben Murdochc5610432016-08-08 18:44:38 +010043 if ((IsIntegerConstant(node) && (GetIntegerConstantValue(node) == 0)) ||
44 (IsFloatConstant(node) &&
45 (bit_cast<int64_t>(GetFloatConstantValue(node)) == V8_INT64_C(0)))) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 return UseImmediate(node);
47 }
48 return UseRegister(node);
49 }
50
51 // Use the provided node if it has the required value, or create a
52 // TempImmediate otherwise.
53 InstructionOperand UseImmediateOrTemp(Node* node, int32_t value) {
54 if (GetIntegerConstantValue(node) == value) {
55 return UseImmediate(node);
56 }
57 return TempImmediate(value);
58 }
59
60 bool IsIntegerConstant(Node* node) {
61 return (node->opcode() == IrOpcode::kInt32Constant) ||
62 (node->opcode() == IrOpcode::kInt64Constant);
63 }
64
65 int64_t GetIntegerConstantValue(Node* node) {
66 if (node->opcode() == IrOpcode::kInt32Constant) {
67 return OpParameter<int32_t>(node);
68 }
69 DCHECK(node->opcode() == IrOpcode::kInt64Constant);
70 return OpParameter<int64_t>(node);
71 }
72
Ben Murdochc5610432016-08-08 18:44:38 +010073 bool IsFloatConstant(Node* node) {
74 return (node->opcode() == IrOpcode::kFloat32Constant) ||
75 (node->opcode() == IrOpcode::kFloat64Constant);
76 }
77
78 double GetFloatConstantValue(Node* node) {
79 if (node->opcode() == IrOpcode::kFloat32Constant) {
80 return OpParameter<float>(node);
81 }
82 DCHECK_EQ(IrOpcode::kFloat64Constant, node->opcode());
83 return OpParameter<double>(node);
84 }
85
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086 bool CanBeImmediate(Node* node, ImmediateMode mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087 return IsIntegerConstant(node) &&
88 CanBeImmediate(GetIntegerConstantValue(node), mode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040089 }
90
91 bool CanBeImmediate(int64_t value, ImmediateMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092 unsigned ignored;
93 switch (mode) {
94 case kLogical32Imm:
95 // TODO(dcarney): some unencodable values can be handled by
96 // switching instructions.
97 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 32,
98 &ignored, &ignored, &ignored);
99 case kLogical64Imm:
100 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 64,
101 &ignored, &ignored, &ignored);
102 case kArithmeticImm:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 return Assembler::IsImmAddSub(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 case kLoadStoreImm8:
105 return IsLoadStoreImmediate(value, LSByte);
106 case kLoadStoreImm16:
107 return IsLoadStoreImmediate(value, LSHalfword);
108 case kLoadStoreImm32:
109 return IsLoadStoreImmediate(value, LSWord);
110 case kLoadStoreImm64:
111 return IsLoadStoreImmediate(value, LSDoubleWord);
112 case kNoImmediate:
113 return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 case kShift32Imm: // Fall through.
115 case kShift64Imm:
116 // Shift operations only observe the bottom 5 or 6 bits of the value.
117 // All possible shifts can be encoded by discarding bits which have no
118 // effect.
119 return true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 }
121 return false;
122 }
123
Ben Murdochc5610432016-08-08 18:44:38 +0100124 bool CanBeLoadStoreShiftImmediate(Node* node, MachineRepresentation rep) {
125 // TODO(arm64): Load and Store on 128 bit Q registers is not supported yet.
126 DCHECK_NE(MachineRepresentation::kSimd128, rep);
127 return IsIntegerConstant(node) &&
128 (GetIntegerConstantValue(node) == ElementSizeLog2Of(rep));
129 }
130
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 private:
132 bool IsLoadStoreImmediate(int64_t value, LSDataSize size) {
133 return Assembler::IsImmLSScaled(value, size) ||
134 Assembler::IsImmLSUnscaled(value);
135 }
136};
137
138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139namespace {
140
141void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400142 Arm64OperandGenerator g(selector);
143 selector->Emit(opcode, g.DefineAsRegister(node),
144 g.UseRegister(node->InputAt(0)));
145}
146
147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000148void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 Arm64OperandGenerator g(selector);
150 selector->Emit(opcode, g.DefineAsRegister(node),
151 g.UseRegister(node->InputAt(0)),
152 g.UseRegister(node->InputAt(1)));
153}
154
155
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
157 ImmediateMode operand_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000158 Arm64OperandGenerator g(selector);
159 selector->Emit(opcode, g.DefineAsRegister(node),
160 g.UseRegister(node->InputAt(0)),
161 g.UseOperand(node->InputAt(1), operand_mode));
162}
163
164
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000165bool TryMatchAnyShift(InstructionSelector* selector, Node* node,
166 Node* input_node, InstructionCode* opcode, bool try_ror) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400167 Arm64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168
169 if (!selector->CanCover(node, input_node)) return false;
170 if (input_node->InputCount() != 2) return false;
171 if (!g.IsIntegerConstant(input_node->InputAt(1))) return false;
172
173 switch (input_node->opcode()) {
174 case IrOpcode::kWord32Shl:
175 case IrOpcode::kWord64Shl:
176 *opcode |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
177 return true;
178 case IrOpcode::kWord32Shr:
179 case IrOpcode::kWord64Shr:
180 *opcode |= AddressingModeField::encode(kMode_Operand2_R_LSR_I);
181 return true;
182 case IrOpcode::kWord32Sar:
183 case IrOpcode::kWord64Sar:
184 *opcode |= AddressingModeField::encode(kMode_Operand2_R_ASR_I);
185 return true;
186 case IrOpcode::kWord32Ror:
187 case IrOpcode::kWord64Ror:
188 if (try_ror) {
189 *opcode |= AddressingModeField::encode(kMode_Operand2_R_ROR_I);
190 return true;
191 }
192 return false;
193 default:
194 return false;
195 }
196}
197
198
199bool TryMatchAnyExtend(Arm64OperandGenerator* g, InstructionSelector* selector,
200 Node* node, Node* left_node, Node* right_node,
201 InstructionOperand* left_op,
202 InstructionOperand* right_op, InstructionCode* opcode) {
203 if (!selector->CanCover(node, right_node)) return false;
204
205 NodeMatcher nm(right_node);
206
207 if (nm.IsWord32And()) {
208 Int32BinopMatcher mright(right_node);
209 if (mright.right().Is(0xff) || mright.right().Is(0xffff)) {
210 int32_t mask = mright.right().Value();
211 *left_op = g->UseRegister(left_node);
212 *right_op = g->UseRegister(mright.left().node());
213 *opcode |= AddressingModeField::encode(
214 (mask == 0xff) ? kMode_Operand2_R_UXTB : kMode_Operand2_R_UXTH);
215 return true;
216 }
217 } else if (nm.IsWord32Sar()) {
218 Int32BinopMatcher mright(right_node);
219 if (selector->CanCover(mright.node(), mright.left().node()) &&
220 mright.left().IsWord32Shl()) {
221 Int32BinopMatcher mleft_of_right(mright.left().node());
222 if ((mright.right().Is(16) && mleft_of_right.right().Is(16)) ||
223 (mright.right().Is(24) && mleft_of_right.right().Is(24))) {
224 int32_t shift = mright.right().Value();
225 *left_op = g->UseRegister(left_node);
226 *right_op = g->UseRegister(mleft_of_right.left().node());
227 *opcode |= AddressingModeField::encode(
228 (shift == 24) ? kMode_Operand2_R_SXTB : kMode_Operand2_R_SXTH);
229 return true;
230 }
231 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400232 }
233 return false;
234}
235
Ben Murdochc5610432016-08-08 18:44:38 +0100236bool TryMatchLoadStoreShift(Arm64OperandGenerator* g,
237 InstructionSelector* selector,
238 MachineRepresentation rep, Node* node, Node* index,
239 InstructionOperand* index_op,
240 InstructionOperand* shift_immediate_op) {
241 if (!selector->CanCover(node, index)) return false;
242 if (index->InputCount() != 2) return false;
243 Node* left = index->InputAt(0);
244 Node* right = index->InputAt(1);
245 switch (index->opcode()) {
246 case IrOpcode::kWord32Shl:
247 case IrOpcode::kWord64Shl:
248 if (!g->CanBeLoadStoreShiftImmediate(right, rep)) {
249 return false;
250 }
251 *index_op = g->UseRegister(left);
252 *shift_immediate_op = g->UseImmediate(right);
253 return true;
254 default:
255 return false;
256 }
257}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400258
Ben Murdoch61f157c2016-09-16 13:49:30 +0100259// Bitfields describing binary operator properties:
260// CanCommuteField is true if we can switch the two operands, potentially
261// requiring commuting the flags continuation condition.
262typedef BitField8<bool, 1, 1> CanCommuteField;
263// MustCommuteCondField is true when we need to commute the flags continuation
264// condition in order to switch the operands.
265typedef BitField8<bool, 2, 1> MustCommuteCondField;
266// IsComparisonField is true when the operation is a comparison and has no other
267// result other than the condition.
268typedef BitField8<bool, 3, 1> IsComparisonField;
269// IsAddSubField is true when an instruction is encoded as ADD or SUB.
270typedef BitField8<bool, 4, 1> IsAddSubField;
271
272// Get properties of a binary operator.
273uint8_t GetBinopProperties(InstructionCode opcode) {
274 uint8_t result = 0;
275 switch (opcode) {
276 case kArm64Cmp32:
277 case kArm64Cmp:
278 // We can commute CMP by switching the inputs and commuting
279 // the flags continuation.
280 result = CanCommuteField::update(result, true);
281 result = MustCommuteCondField::update(result, true);
282 result = IsComparisonField::update(result, true);
283 // The CMP and CMN instructions are encoded as SUB or ADD
284 // with zero output register, and therefore support the same
285 // operand modes.
286 result = IsAddSubField::update(result, true);
287 break;
288 case kArm64Cmn32:
289 case kArm64Cmn:
290 result = CanCommuteField::update(result, true);
291 result = IsComparisonField::update(result, true);
292 result = IsAddSubField::update(result, true);
293 break;
294 case kArm64Add32:
295 case kArm64Add:
296 result = CanCommuteField::update(result, true);
297 result = IsAddSubField::update(result, true);
298 break;
299 case kArm64Sub32:
300 case kArm64Sub:
301 result = IsAddSubField::update(result, true);
302 break;
303 case kArm64Tst32:
304 case kArm64Tst:
305 result = CanCommuteField::update(result, true);
306 result = IsComparisonField::update(result, true);
307 break;
308 case kArm64And32:
309 case kArm64And:
310 case kArm64Or32:
311 case kArm64Or:
312 case kArm64Eor32:
313 case kArm64Eor:
314 result = CanCommuteField::update(result, true);
315 break;
316 default:
317 UNREACHABLE();
318 return 0;
319 }
320 DCHECK_IMPLIES(MustCommuteCondField::decode(result),
321 CanCommuteField::decode(result));
322 return result;
323}
324
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325// Shared routine for multiple binary operations.
326template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000327void VisitBinop(InstructionSelector* selector, Node* node,
328 InstructionCode opcode, ImmediateMode operand_mode,
329 FlagsContinuation* cont) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 Arm64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000331 InstructionOperand inputs[5];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000332 size_t input_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333 InstructionOperand outputs[2];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000334 size_t output_count = 0;
335
Ben Murdoch61f157c2016-09-16 13:49:30 +0100336 Node* left_node = node->InputAt(0);
337 Node* right_node = node->InputAt(1);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400338
Ben Murdoch61f157c2016-09-16 13:49:30 +0100339 uint8_t properties = GetBinopProperties(opcode);
340 bool can_commute = CanCommuteField::decode(properties);
341 bool must_commute_cond = MustCommuteCondField::decode(properties);
342 bool is_add_sub = IsAddSubField::decode(properties);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343
344 if (g.CanBeImmediate(right_node, operand_mode)) {
345 inputs[input_count++] = g.UseRegister(left_node);
346 inputs[input_count++] = g.UseImmediate(right_node);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100347 } else if (can_commute && g.CanBeImmediate(left_node, operand_mode)) {
348 if (must_commute_cond) cont->Commute();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000349 inputs[input_count++] = g.UseRegister(right_node);
350 inputs[input_count++] = g.UseImmediate(left_node);
351 } else if (is_add_sub &&
352 TryMatchAnyExtend(&g, selector, node, left_node, right_node,
353 &inputs[0], &inputs[1], &opcode)) {
354 input_count += 2;
355 } else if (is_add_sub && can_commute &&
356 TryMatchAnyExtend(&g, selector, node, right_node, left_node,
357 &inputs[0], &inputs[1], &opcode)) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100358 if (must_commute_cond) cont->Commute();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000359 input_count += 2;
360 } else if (TryMatchAnyShift(selector, node, right_node, &opcode,
361 !is_add_sub)) {
362 Matcher m_shift(right_node);
363 inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400364 inputs[input_count++] = g.UseRegister(m_shift.left().node());
365 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000366 } else if (can_commute && TryMatchAnyShift(selector, node, left_node, &opcode,
367 !is_add_sub)) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100368 if (must_commute_cond) cont->Commute();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000369 Matcher m_shift(left_node);
370 inputs[input_count++] = g.UseRegisterOrImmediateZero(right_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400371 inputs[input_count++] = g.UseRegister(m_shift.left().node());
372 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
373 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
375 inputs[input_count++] = g.UseRegister(right_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400376 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377
378 if (cont->IsBranch()) {
379 inputs[input_count++] = g.Label(cont->true_block());
380 inputs[input_count++] = g.Label(cont->false_block());
381 }
382
Ben Murdoch61f157c2016-09-16 13:49:30 +0100383 if (!IsComparisonField::decode(properties)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000384 outputs[output_count++] = g.DefineAsRegister(node);
385 }
386
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000387 if (cont->IsSet()) {
388 outputs[output_count++] = g.DefineAsRegister(cont->result());
389 }
390
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000391 DCHECK_NE(0u, input_count);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100392 DCHECK((output_count != 0) || IsComparisonField::decode(properties));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000393 DCHECK_GE(arraysize(inputs), input_count);
394 DCHECK_GE(arraysize(outputs), output_count);
395
Ben Murdochda12d292016-06-02 14:46:10 +0100396 opcode = cont->Encode(opcode);
397 if (cont->IsDeoptimize()) {
398 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
399 cont->frame_state());
400 } else {
401 selector->Emit(opcode, output_count, outputs, input_count, inputs);
402 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000403}
404
405
406// Shared routine for multiple binary operations.
407template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
409 ImmediateMode operand_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000410 FlagsContinuation cont;
411 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
412}
413
414
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400415template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000416void VisitAddSub(InstructionSelector* selector, Node* node, ArchOpcode opcode,
417 ArchOpcode negate_opcode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400418 Arm64OperandGenerator g(selector);
419 Matcher m(node);
420 if (m.right().HasValue() && (m.right().Value() < 0) &&
421 g.CanBeImmediate(-m.right().Value(), kArithmeticImm)) {
422 selector->Emit(negate_opcode, g.DefineAsRegister(node),
423 g.UseRegister(m.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000424 g.TempImmediate(static_cast<int32_t>(-m.right().Value())));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400425 } else {
426 VisitBinop<Matcher>(selector, node, opcode, kArithmeticImm);
427 }
428}
429
430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431// For multiplications by immediate of the form x * (2^k + 1), where k > 0,
432// return the value of k, otherwise return zero. This is used to reduce the
433// multiplication to addition with left shift: x + (x << k).
434template <typename Matcher>
435int32_t LeftShiftForReducedMultiply(Matcher* m) {
436 DCHECK(m->IsInt32Mul() || m->IsInt64Mul());
437 if (m->right().HasValue() && m->right().Value() >= 3) {
438 uint64_t value_minus_one = m->right().Value() - 1;
439 if (base::bits::IsPowerOfTwo64(value_minus_one)) {
440 return WhichPowerOf2_64(value_minus_one);
441 }
442 }
443 return 0;
444}
445
446} // namespace
447
448
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449void InstructionSelector::VisitLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000450 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
Ben Murdochc5610432016-08-08 18:44:38 +0100451 MachineRepresentation rep = load_rep.representation();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 Arm64OperandGenerator g(this);
453 Node* base = node->InputAt(0);
454 Node* index = node->InputAt(1);
Ben Murdochc5610432016-08-08 18:44:38 +0100455 InstructionCode opcode = kArchNop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456 ImmediateMode immediate_mode = kNoImmediate;
Ben Murdochc5610432016-08-08 18:44:38 +0100457 InstructionOperand inputs[3];
458 size_t input_count = 0;
459 InstructionOperand outputs[1];
460 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000461 case MachineRepresentation::kFloat32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 opcode = kArm64LdrS;
463 immediate_mode = kLoadStoreImm32;
464 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000465 case MachineRepresentation::kFloat64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 opcode = kArm64LdrD;
467 immediate_mode = kLoadStoreImm64;
468 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 case MachineRepresentation::kBit: // Fall through.
470 case MachineRepresentation::kWord8:
471 opcode = load_rep.IsSigned() ? kArm64Ldrsb : kArm64Ldrb;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 immediate_mode = kLoadStoreImm8;
473 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000474 case MachineRepresentation::kWord16:
475 opcode = load_rep.IsSigned() ? kArm64Ldrsh : kArm64Ldrh;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 immediate_mode = kLoadStoreImm16;
477 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478 case MachineRepresentation::kWord32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 opcode = kArm64LdrW;
480 immediate_mode = kLoadStoreImm32;
481 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 case MachineRepresentation::kTagged: // Fall through.
483 case MachineRepresentation::kWord64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000484 opcode = kArm64Ldr;
485 immediate_mode = kLoadStoreImm64;
486 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100487 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000488 case MachineRepresentation::kNone:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000489 UNREACHABLE();
490 return;
491 }
Ben Murdochc5610432016-08-08 18:44:38 +0100492
493 outputs[0] = g.DefineAsRegister(node);
494 inputs[0] = g.UseRegister(base);
495
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 if (g.CanBeImmediate(index, immediate_mode)) {
Ben Murdochc5610432016-08-08 18:44:38 +0100497 input_count = 2;
498 inputs[1] = g.UseImmediate(index);
499 opcode |= AddressingModeField::encode(kMode_MRI);
500 } else if (TryMatchLoadStoreShift(&g, this, rep, node, index, &inputs[1],
501 &inputs[2])) {
502 input_count = 3;
503 opcode |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100505 input_count = 2;
506 inputs[1] = g.UseRegister(index);
507 opcode |= AddressingModeField::encode(kMode_MRR);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000508 }
Ben Murdochc5610432016-08-08 18:44:38 +0100509
510 Emit(opcode, arraysize(outputs), outputs, input_count, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000511}
512
513
514void InstructionSelector::VisitStore(Node* node) {
515 Arm64OperandGenerator g(this);
516 Node* base = node->InputAt(0);
517 Node* index = node->InputAt(1);
518 Node* value = node->InputAt(2);
519
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
521 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
522 MachineRepresentation rep = store_rep.representation();
523
524 // TODO(arm64): I guess this could be done in a better way.
525 if (write_barrier_kind != kNoWriteBarrier) {
526 DCHECK_EQ(MachineRepresentation::kTagged, rep);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100527 AddressingMode addressing_mode;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000528 InstructionOperand inputs[3];
529 size_t input_count = 0;
530 inputs[input_count++] = g.UseUniqueRegister(base);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100531 // OutOfLineRecordWrite uses the index in an arithmetic instruction, so we
532 // must check kArithmeticImm as well as kLoadStoreImm64.
533 if (g.CanBeImmediate(index, kArithmeticImm) &&
534 g.CanBeImmediate(index, kLoadStoreImm64)) {
535 inputs[input_count++] = g.UseImmediate(index);
536 addressing_mode = kMode_MRI;
537 } else {
538 inputs[input_count++] = g.UseUniqueRegister(index);
539 addressing_mode = kMode_MRR;
540 }
Ben Murdochda12d292016-06-02 14:46:10 +0100541 inputs[input_count++] = g.UseUniqueRegister(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000542 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
543 switch (write_barrier_kind) {
544 case kNoWriteBarrier:
545 UNREACHABLE();
546 break;
547 case kMapWriteBarrier:
548 record_write_mode = RecordWriteMode::kValueIsMap;
549 break;
550 case kPointerWriteBarrier:
551 record_write_mode = RecordWriteMode::kValueIsPointer;
552 break;
553 case kFullWriteBarrier:
554 record_write_mode = RecordWriteMode::kValueIsAny;
555 break;
556 }
557 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
558 size_t const temp_count = arraysize(temps);
559 InstructionCode code = kArchStoreWithWriteBarrier;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100560 code |= AddressingModeField::encode(addressing_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561 code |= MiscField::encode(static_cast<int>(record_write_mode));
562 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100564 InstructionOperand inputs[4];
565 size_t input_count = 0;
566 InstructionCode opcode = kArchNop;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 ImmediateMode immediate_mode = kNoImmediate;
568 switch (rep) {
569 case MachineRepresentation::kFloat32:
570 opcode = kArm64StrS;
571 immediate_mode = kLoadStoreImm32;
572 break;
573 case MachineRepresentation::kFloat64:
574 opcode = kArm64StrD;
575 immediate_mode = kLoadStoreImm64;
576 break;
577 case MachineRepresentation::kBit: // Fall through.
578 case MachineRepresentation::kWord8:
579 opcode = kArm64Strb;
580 immediate_mode = kLoadStoreImm8;
581 break;
582 case MachineRepresentation::kWord16:
583 opcode = kArm64Strh;
584 immediate_mode = kLoadStoreImm16;
585 break;
586 case MachineRepresentation::kWord32:
587 opcode = kArm64StrW;
588 immediate_mode = kLoadStoreImm32;
589 break;
590 case MachineRepresentation::kTagged: // Fall through.
591 case MachineRepresentation::kWord64:
592 opcode = kArm64Str;
593 immediate_mode = kLoadStoreImm64;
594 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100595 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000596 case MachineRepresentation::kNone:
597 UNREACHABLE();
598 return;
599 }
Ben Murdochc5610432016-08-08 18:44:38 +0100600
601 inputs[0] = g.UseRegisterOrImmediateZero(value);
602 inputs[1] = g.UseRegister(base);
603
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000604 if (g.CanBeImmediate(index, immediate_mode)) {
Ben Murdochc5610432016-08-08 18:44:38 +0100605 input_count = 3;
606 inputs[2] = g.UseImmediate(index);
607 opcode |= AddressingModeField::encode(kMode_MRI);
608 } else if (TryMatchLoadStoreShift(&g, this, rep, node, index, &inputs[2],
609 &inputs[3])) {
610 input_count = 4;
611 opcode |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000612 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100613 input_count = 3;
614 inputs[2] = g.UseRegister(index);
615 opcode |= AddressingModeField::encode(kMode_MRR);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616 }
Ben Murdochc5610432016-08-08 18:44:38 +0100617
618 Emit(opcode, 0, nullptr, input_count, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619 }
620}
621
622
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400623void InstructionSelector::VisitCheckedLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400625 Arm64OperandGenerator g(this);
626 Node* const buffer = node->InputAt(0);
627 Node* const offset = node->InputAt(1);
628 Node* const length = node->InputAt(2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629 ArchOpcode opcode = kArchNop;
630 switch (load_rep.representation()) {
631 case MachineRepresentation::kWord8:
632 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400633 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 case MachineRepresentation::kWord16:
635 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400636 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000637 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400638 opcode = kCheckedLoadWord32;
639 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000640 case MachineRepresentation::kWord64:
641 opcode = kCheckedLoadWord64;
642 break;
643 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400644 opcode = kCheckedLoadFloat32;
645 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400647 opcode = kCheckedLoadFloat64;
648 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100649 case MachineRepresentation::kBit: // Fall through.
650 case MachineRepresentation::kTagged: // Fall through.
651 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000652 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400653 UNREACHABLE();
654 return;
655 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100656 // If the length is a constant power of two, allow the code generator to
657 // pick a more efficient bounds check sequence by passing the length as an
658 // immediate.
659 if (length->opcode() == IrOpcode::kInt32Constant) {
660 Int32Matcher m(length);
661 if (m.IsPowerOf2()) {
662 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
663 g.UseRegister(offset), g.UseImmediate(length));
664 return;
665 }
666 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400667 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
668 g.UseRegister(offset), g.UseOperand(length, kArithmeticImm));
669}
670
671
672void InstructionSelector::VisitCheckedStore(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000673 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400674 Arm64OperandGenerator g(this);
675 Node* const buffer = node->InputAt(0);
676 Node* const offset = node->InputAt(1);
677 Node* const length = node->InputAt(2);
678 Node* const value = node->InputAt(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000679 ArchOpcode opcode = kArchNop;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400680 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000681 case MachineRepresentation::kWord8:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400682 opcode = kCheckedStoreWord8;
683 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000684 case MachineRepresentation::kWord16:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400685 opcode = kCheckedStoreWord16;
686 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400688 opcode = kCheckedStoreWord32;
689 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000690 case MachineRepresentation::kWord64:
691 opcode = kCheckedStoreWord64;
692 break;
693 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400694 opcode = kCheckedStoreFloat32;
695 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000696 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400697 opcode = kCheckedStoreFloat64;
698 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100699 case MachineRepresentation::kBit: // Fall through.
700 case MachineRepresentation::kTagged: // Fall through.
701 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400703 UNREACHABLE();
704 return;
705 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100706 // If the length is a constant power of two, allow the code generator to
707 // pick a more efficient bounds check sequence by passing the length as an
708 // immediate.
709 if (length->opcode() == IrOpcode::kInt32Constant) {
710 Int32Matcher m(length);
711 if (m.IsPowerOf2()) {
712 Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
713 g.UseImmediate(length), g.UseRegisterOrImmediateZero(value));
714 return;
715 }
716 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000717 Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
Ben Murdochc5610432016-08-08 18:44:38 +0100718 g.UseOperand(length, kArithmeticImm),
719 g.UseRegisterOrImmediateZero(value));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400720}
721
722
723template <typename Matcher>
724static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
725 ArchOpcode opcode, bool left_can_cover,
726 bool right_can_cover, ImmediateMode imm_mode) {
727 Arm64OperandGenerator g(selector);
728
729 // Map instruction to equivalent operation with inverted right input.
730 ArchOpcode inv_opcode = opcode;
731 switch (opcode) {
732 case kArm64And32:
733 inv_opcode = kArm64Bic32;
734 break;
735 case kArm64And:
736 inv_opcode = kArm64Bic;
737 break;
738 case kArm64Or32:
739 inv_opcode = kArm64Orn32;
740 break;
741 case kArm64Or:
742 inv_opcode = kArm64Orn;
743 break;
744 case kArm64Eor32:
745 inv_opcode = kArm64Eon32;
746 break;
747 case kArm64Eor:
748 inv_opcode = kArm64Eon;
749 break;
750 default:
751 UNREACHABLE();
752 }
753
754 // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
755 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
756 Matcher mleft(m->left().node());
757 if (mleft.right().Is(-1)) {
758 // TODO(all): support shifted operand on right.
759 selector->Emit(inv_opcode, g.DefineAsRegister(node),
760 g.UseRegister(m->right().node()),
761 g.UseRegister(mleft.left().node()));
762 return;
763 }
764 }
765
766 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
767 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
768 right_can_cover) {
769 Matcher mright(m->right().node());
770 if (mright.right().Is(-1)) {
771 // TODO(all): support shifted operand on right.
772 selector->Emit(inv_opcode, g.DefineAsRegister(node),
773 g.UseRegister(m->left().node()),
774 g.UseRegister(mright.left().node()));
775 return;
776 }
777 }
778
779 if (m->IsWord32Xor() && m->right().Is(-1)) {
780 selector->Emit(kArm64Not32, g.DefineAsRegister(node),
781 g.UseRegister(m->left().node()));
782 } else if (m->IsWord64Xor() && m->right().Is(-1)) {
783 selector->Emit(kArm64Not, g.DefineAsRegister(node),
784 g.UseRegister(m->left().node()));
785 } else {
786 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
787 }
788}
789
790
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791void InstructionSelector::VisitWord32And(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400792 Arm64OperandGenerator g(this);
793 Int32BinopMatcher m(node);
794 if (m.left().IsWord32Shr() && CanCover(node, m.left().node()) &&
795 m.right().HasValue()) {
796 uint32_t mask = m.right().Value();
797 uint32_t mask_width = base::bits::CountPopulation32(mask);
798 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
799 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
800 // The mask must be contiguous, and occupy the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400802
803 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
804 // significant bits.
805 Int32BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000806 if (mleft.right().HasValue()) {
807 // Any shift value can match; int32 shifts use `value % 32`.
808 uint32_t lsb = mleft.right().Value() & 0x1f;
809
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400810 // Ubfx cannot extract bits past the register size, however since
811 // shifting the original value would have introduced some zeros we can
812 // still use ubfx with a smaller mask and the remaining bits will be
813 // zeros.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400814 if (lsb + mask_width > 32) mask_width = 32 - lsb;
815
816 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
817 g.UseRegister(mleft.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000818 g.UseImmediateOrTemp(mleft.right().node(), lsb),
819 g.TempImmediate(mask_width));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400820 return;
821 }
822 // Other cases fall through to the normal And operation.
823 }
824 }
825 VisitLogical<Int32BinopMatcher>(
826 this, node, &m, kArm64And32, CanCover(node, m.left().node()),
827 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828}
829
830
831void InstructionSelector::VisitWord64And(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400832 Arm64OperandGenerator g(this);
833 Int64BinopMatcher m(node);
834 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) &&
835 m.right().HasValue()) {
836 uint64_t mask = m.right().Value();
837 uint64_t mask_width = base::bits::CountPopulation64(mask);
838 uint64_t mask_msb = base::bits::CountLeadingZeros64(mask);
839 if ((mask_width != 0) && (mask_msb + mask_width == 64)) {
840 // The mask must be contiguous, and occupy the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000841 DCHECK_EQ(0u, base::bits::CountTrailingZeros64(mask));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400842
843 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
844 // significant bits.
845 Int64BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000846 if (mleft.right().HasValue()) {
847 // Any shift value can match; int64 shifts use `value % 64`.
848 uint32_t lsb = static_cast<uint32_t>(mleft.right().Value() & 0x3f);
849
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400850 // Ubfx cannot extract bits past the register size, however since
851 // shifting the original value would have introduced some zeros we can
852 // still use ubfx with a smaller mask and the remaining bits will be
853 // zeros.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400854 if (lsb + mask_width > 64) mask_width = 64 - lsb;
855
856 Emit(kArm64Ubfx, g.DefineAsRegister(node),
857 g.UseRegister(mleft.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000858 g.UseImmediateOrTemp(mleft.right().node(), lsb),
859 g.TempImmediate(static_cast<int32_t>(mask_width)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400860 return;
861 }
862 // Other cases fall through to the normal And operation.
863 }
864 }
865 VisitLogical<Int64BinopMatcher>(
866 this, node, &m, kArm64And, CanCover(node, m.left().node()),
867 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868}
869
870
871void InstructionSelector::VisitWord32Or(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400872 Int32BinopMatcher m(node);
873 VisitLogical<Int32BinopMatcher>(
874 this, node, &m, kArm64Or32, CanCover(node, m.left().node()),
875 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000876}
877
878
879void InstructionSelector::VisitWord64Or(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400880 Int64BinopMatcher m(node);
881 VisitLogical<Int64BinopMatcher>(
882 this, node, &m, kArm64Or, CanCover(node, m.left().node()),
883 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884}
885
886
887void InstructionSelector::VisitWord32Xor(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000888 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400889 VisitLogical<Int32BinopMatcher>(
890 this, node, &m, kArm64Eor32, CanCover(node, m.left().node()),
891 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000892}
893
894
895void InstructionSelector::VisitWord64Xor(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400897 VisitLogical<Int64BinopMatcher>(
898 this, node, &m, kArm64Eor, CanCover(node, m.left().node()),
899 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000900}
901
902
903void InstructionSelector::VisitWord32Shl(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000904 Int32BinopMatcher m(node);
905 if (m.left().IsWord32And() && CanCover(node, m.left().node()) &&
906 m.right().IsInRange(1, 31)) {
907 Arm64OperandGenerator g(this);
908 Int32BinopMatcher mleft(m.left().node());
909 if (mleft.right().HasValue()) {
910 uint32_t mask = mleft.right().Value();
911 uint32_t mask_width = base::bits::CountPopulation32(mask);
912 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
913 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
914 uint32_t shift = m.right().Value();
915 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
916 DCHECK_NE(0u, shift);
917
918 if ((shift + mask_width) >= 32) {
919 // If the mask is contiguous and reaches or extends beyond the top
920 // bit, only the shift is needed.
921 Emit(kArm64Lsl32, g.DefineAsRegister(node),
922 g.UseRegister(mleft.left().node()),
923 g.UseImmediate(m.right().node()));
924 return;
925 } else {
926 // Select Ubfiz for Shl(And(x, mask), imm) where the mask is
927 // contiguous, and the shift immediate non-zero.
928 Emit(kArm64Ubfiz32, g.DefineAsRegister(node),
929 g.UseRegister(mleft.left().node()),
930 g.UseImmediate(m.right().node()), g.TempImmediate(mask_width));
931 return;
932 }
933 }
934 }
935 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400936 VisitRRO(this, kArm64Lsl32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000937}
938
939
940void InstructionSelector::VisitWord64Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400941 Arm64OperandGenerator g(this);
942 Int64BinopMatcher m(node);
943 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) &&
944 m.right().IsInRange(32, 63)) {
945 // There's no need to sign/zero-extend to 64-bit if we shift out the upper
946 // 32 bits anyway.
947 Emit(kArm64Lsl, g.DefineAsRegister(node),
948 g.UseRegister(m.left().node()->InputAt(0)),
949 g.UseImmediate(m.right().node()));
950 return;
951 }
952 VisitRRO(this, kArm64Lsl, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953}
954
955
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000956namespace {
957
958bool TryEmitBitfieldExtract32(InstructionSelector* selector, Node* node) {
959 Arm64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400960 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000961 if (selector->CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
962 // Select Ubfx or Sbfx for (x << (K & 0x1f)) OP (K & 0x1f), where
963 // OP is >>> or >> and (K & 0x1f) != 0.
964 Int32BinopMatcher mleft(m.left().node());
965 if (mleft.right().HasValue() && m.right().HasValue() &&
966 (mleft.right().Value() & 0x1f) == (m.right().Value() & 0x1f)) {
967 DCHECK(m.IsWord32Shr() || m.IsWord32Sar());
968 ArchOpcode opcode = m.IsWord32Sar() ? kArm64Sbfx32 : kArm64Ubfx32;
969
970 int right_val = m.right().Value() & 0x1f;
971 DCHECK_NE(right_val, 0);
972
973 selector->Emit(opcode, g.DefineAsRegister(node),
974 g.UseRegister(mleft.left().node()), g.TempImmediate(0),
975 g.TempImmediate(32 - right_val));
976 return true;
977 }
978 }
979 return false;
980}
981
982} // namespace
983
984
985void InstructionSelector::VisitWord32Shr(Node* node) {
986 Int32BinopMatcher m(node);
987 if (m.left().IsWord32And() && m.right().HasValue()) {
988 uint32_t lsb = m.right().Value() & 0x1f;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400989 Int32BinopMatcher mleft(m.left().node());
990 if (mleft.right().HasValue()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400991 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is
992 // shifted into the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000993 uint32_t mask = (mleft.right().Value() >> lsb) << lsb;
994 unsigned mask_width = base::bits::CountPopulation32(mask);
995 unsigned mask_msb = base::bits::CountLeadingZeros32(mask);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400996 if ((mask_msb + mask_width + lsb) == 32) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000997 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400998 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(mask));
999 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001000 g.UseRegister(mleft.left().node()),
1001 g.UseImmediateOrTemp(m.right().node(), lsb),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001002 g.TempImmediate(mask_width));
1003 return;
1004 }
1005 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 } else if (TryEmitBitfieldExtract32(this, node)) {
1007 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001008 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001009
1010 if (m.left().IsUint32MulHigh() && m.right().HasValue() &&
1011 CanCover(node, node->InputAt(0))) {
1012 // Combine this shift with the multiply and shift that would be generated
1013 // by Uint32MulHigh.
1014 Arm64OperandGenerator g(this);
1015 Node* left = m.left().node();
1016 int shift = m.right().Value() & 0x1f;
1017 InstructionOperand const smull_operand = g.TempRegister();
1018 Emit(kArm64Umull, smull_operand, g.UseRegister(left->InputAt(0)),
1019 g.UseRegister(left->InputAt(1)));
1020 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand,
1021 g.TempImmediate(32 + shift));
1022 return;
1023 }
1024
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001025 VisitRRO(this, kArm64Lsr32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001026}
1027
1028
1029void InstructionSelector::VisitWord64Shr(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001030 Int64BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001031 if (m.left().IsWord64And() && m.right().HasValue()) {
1032 uint32_t lsb = m.right().Value() & 0x3f;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001033 Int64BinopMatcher mleft(m.left().node());
1034 if (mleft.right().HasValue()) {
1035 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is
1036 // shifted into the least-significant bits.
1037 uint64_t mask = (mleft.right().Value() >> lsb) << lsb;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 unsigned mask_width = base::bits::CountPopulation64(mask);
1039 unsigned mask_msb = base::bits::CountLeadingZeros64(mask);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001040 if ((mask_msb + mask_width + lsb) == 64) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001041 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001042 DCHECK_EQ(lsb, base::bits::CountTrailingZeros64(mask));
1043 Emit(kArm64Ubfx, g.DefineAsRegister(node),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044 g.UseRegister(mleft.left().node()),
1045 g.UseImmediateOrTemp(m.right().node(), lsb),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001046 g.TempImmediate(mask_width));
1047 return;
1048 }
1049 }
1050 }
1051 VisitRRO(this, kArm64Lsr, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001052}
1053
1054
1055void InstructionSelector::VisitWord32Sar(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001056 if (TryEmitBitfieldExtract32(this, node)) {
1057 return;
1058 }
1059
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001060 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001061 if (m.left().IsInt32MulHigh() && m.right().HasValue() &&
1062 CanCover(node, node->InputAt(0))) {
1063 // Combine this shift with the multiply and shift that would be generated
1064 // by Int32MulHigh.
1065 Arm64OperandGenerator g(this);
1066 Node* left = m.left().node();
1067 int shift = m.right().Value() & 0x1f;
1068 InstructionOperand const smull_operand = g.TempRegister();
1069 Emit(kArm64Smull, smull_operand, g.UseRegister(left->InputAt(0)),
1070 g.UseRegister(left->InputAt(1)));
1071 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand,
1072 g.TempImmediate(32 + shift));
1073 return;
1074 }
1075
1076 if (m.left().IsInt32Add() && m.right().HasValue() &&
1077 CanCover(node, node->InputAt(0))) {
1078 Node* add_node = m.left().node();
1079 Int32BinopMatcher madd_node(add_node);
1080 if (madd_node.left().IsInt32MulHigh() &&
1081 CanCover(add_node, madd_node.left().node())) {
1082 // Combine the shift that would be generated by Int32MulHigh with the add
1083 // on the left of this Sar operation. We do it here, as the result of the
1084 // add potentially has 33 bits, so we have to ensure the result is
1085 // truncated by being the input to this 32-bit Sar operation.
1086 Arm64OperandGenerator g(this);
1087 Node* mul_node = madd_node.left().node();
1088
1089 InstructionOperand const smull_operand = g.TempRegister();
1090 Emit(kArm64Smull, smull_operand, g.UseRegister(mul_node->InputAt(0)),
1091 g.UseRegister(mul_node->InputAt(1)));
1092
1093 InstructionOperand const add_operand = g.TempRegister();
1094 Emit(kArm64Add | AddressingModeField::encode(kMode_Operand2_R_ASR_I),
1095 add_operand, g.UseRegister(add_node->InputAt(1)), smull_operand,
1096 g.TempImmediate(32));
1097
1098 Emit(kArm64Asr32, g.DefineAsRegister(node), add_operand,
1099 g.UseImmediate(node->InputAt(1)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001100 return;
1101 }
1102 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001103
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001104 VisitRRO(this, kArm64Asr32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001105}
1106
1107
1108void InstructionSelector::VisitWord64Sar(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001109 VisitRRO(this, kArm64Asr, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110}
1111
1112
1113void InstructionSelector::VisitWord32Ror(Node* node) {
1114 VisitRRO(this, kArm64Ror32, node, kShift32Imm);
1115}
1116
1117
1118void InstructionSelector::VisitWord64Ror(Node* node) {
1119 VisitRRO(this, kArm64Ror, node, kShift64Imm);
1120}
1121
1122
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001123void InstructionSelector::VisitWord64Clz(Node* node) {
1124 Arm64OperandGenerator g(this);
1125 Emit(kArm64Clz, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1126}
1127
1128
1129void InstructionSelector::VisitWord32Clz(Node* node) {
1130 Arm64OperandGenerator g(this);
1131 Emit(kArm64Clz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1132}
1133
1134
1135void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
1136
1137
1138void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
1139
1140
Ben Murdoch097c5b22016-05-18 11:27:45 +01001141void InstructionSelector::VisitWord32ReverseBits(Node* node) {
1142 VisitRR(this, kArm64Rbit32, node);
1143}
1144
1145
1146void InstructionSelector::VisitWord64ReverseBits(Node* node) {
1147 VisitRR(this, kArm64Rbit, node);
1148}
1149
1150
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151void InstructionSelector::VisitWord32Popcnt(Node* node) { UNREACHABLE(); }
1152
1153
1154void InstructionSelector::VisitWord64Popcnt(Node* node) { UNREACHABLE(); }
1155
1156
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001157void InstructionSelector::VisitInt32Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001158 Arm64OperandGenerator g(this);
1159 Int32BinopMatcher m(node);
1160 // Select Madd(x, y, z) for Add(Mul(x, y), z).
1161 if (m.left().IsInt32Mul() && CanCover(node, m.left().node())) {
1162 Int32BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001163 // Check multiply can't be later reduced to addition with shift.
1164 if (LeftShiftForReducedMultiply(&mleft) == 0) {
1165 Emit(kArm64Madd32, g.DefineAsRegister(node),
1166 g.UseRegister(mleft.left().node()),
1167 g.UseRegister(mleft.right().node()),
1168 g.UseRegister(m.right().node()));
1169 return;
1170 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001171 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001172 // Select Madd(x, y, z) for Add(z, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001173 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
1174 Int32BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001175 // Check multiply can't be later reduced to addition with shift.
1176 if (LeftShiftForReducedMultiply(&mright) == 0) {
1177 Emit(kArm64Madd32, g.DefineAsRegister(node),
1178 g.UseRegister(mright.left().node()),
1179 g.UseRegister(mright.right().node()),
1180 g.UseRegister(m.left().node()));
1181 return;
1182 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001183 }
1184 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Add32, kArm64Sub32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001185}
1186
1187
1188void InstructionSelector::VisitInt64Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001189 Arm64OperandGenerator g(this);
1190 Int64BinopMatcher m(node);
1191 // Select Madd(x, y, z) for Add(Mul(x, y), z).
1192 if (m.left().IsInt64Mul() && CanCover(node, m.left().node())) {
1193 Int64BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 // Check multiply can't be later reduced to addition with shift.
1195 if (LeftShiftForReducedMultiply(&mleft) == 0) {
1196 Emit(kArm64Madd, g.DefineAsRegister(node),
1197 g.UseRegister(mleft.left().node()),
1198 g.UseRegister(mleft.right().node()),
1199 g.UseRegister(m.right().node()));
1200 return;
1201 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001202 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001203 // Select Madd(x, y, z) for Add(z, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001204 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1205 Int64BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001206 // Check multiply can't be later reduced to addition with shift.
1207 if (LeftShiftForReducedMultiply(&mright) == 0) {
1208 Emit(kArm64Madd, g.DefineAsRegister(node),
1209 g.UseRegister(mright.left().node()),
1210 g.UseRegister(mright.right().node()),
1211 g.UseRegister(m.left().node()));
1212 return;
1213 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001214 }
1215 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Add, kArm64Sub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001216}
1217
1218
1219void InstructionSelector::VisitInt32Sub(Node* node) {
1220 Arm64OperandGenerator g(this);
1221 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001222
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001223 // Select Msub(x, y, a) for Sub(a, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001224 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
1225 Int32BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001226 // Check multiply can't be later reduced to addition with shift.
1227 if (LeftShiftForReducedMultiply(&mright) == 0) {
1228 Emit(kArm64Msub32, g.DefineAsRegister(node),
1229 g.UseRegister(mright.left().node()),
1230 g.UseRegister(mright.right().node()),
1231 g.UseRegister(m.left().node()));
1232 return;
1233 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001234 }
1235
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001236 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Sub32, kArm64Add32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001237}
1238
1239
1240void InstructionSelector::VisitInt64Sub(Node* node) {
1241 Arm64OperandGenerator g(this);
1242 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001243
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001244 // Select Msub(x, y, a) for Sub(a, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001245 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1246 Int64BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001247 // Check multiply can't be later reduced to addition with shift.
1248 if (LeftShiftForReducedMultiply(&mright) == 0) {
1249 Emit(kArm64Msub, g.DefineAsRegister(node),
1250 g.UseRegister(mright.left().node()),
1251 g.UseRegister(mright.right().node()),
1252 g.UseRegister(m.left().node()));
1253 return;
1254 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001255 }
1256
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001257 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Sub, kArm64Add);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258}
1259
1260
1261void InstructionSelector::VisitInt32Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001262 Arm64OperandGenerator g(this);
1263 Int32BinopMatcher m(node);
1264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001265 // First, try to reduce the multiplication to addition with left shift.
1266 // x * (2^k + 1) -> x + (x << k)
1267 int32_t shift = LeftShiftForReducedMultiply(&m);
1268 if (shift > 0) {
1269 Emit(kArm64Add32 | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
1270 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1271 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1272 return;
1273 }
1274
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001275 if (m.left().IsInt32Sub() && CanCover(node, m.left().node())) {
1276 Int32BinopMatcher mleft(m.left().node());
1277
1278 // Select Mneg(x, y) for Mul(Sub(0, x), y).
1279 if (mleft.left().Is(0)) {
1280 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1281 g.UseRegister(mleft.right().node()),
1282 g.UseRegister(m.right().node()));
1283 return;
1284 }
1285 }
1286
1287 if (m.right().IsInt32Sub() && CanCover(node, m.right().node())) {
1288 Int32BinopMatcher mright(m.right().node());
1289
1290 // Select Mneg(x, y) for Mul(x, Sub(0, y)).
1291 if (mright.left().Is(0)) {
1292 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1293 g.UseRegister(m.left().node()),
1294 g.UseRegister(mright.right().node()));
1295 return;
1296 }
1297 }
1298
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001299 VisitRRR(this, kArm64Mul32, node);
1300}
1301
1302
1303void InstructionSelector::VisitInt64Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001304 Arm64OperandGenerator g(this);
1305 Int64BinopMatcher m(node);
1306
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307 // First, try to reduce the multiplication to addition with left shift.
1308 // x * (2^k + 1) -> x + (x << k)
1309 int32_t shift = LeftShiftForReducedMultiply(&m);
1310 if (shift > 0) {
1311 Emit(kArm64Add | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
1312 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1313 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1314 return;
1315 }
1316
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001317 if (m.left().IsInt64Sub() && CanCover(node, m.left().node())) {
1318 Int64BinopMatcher mleft(m.left().node());
1319
1320 // Select Mneg(x, y) for Mul(Sub(0, x), y).
1321 if (mleft.left().Is(0)) {
1322 Emit(kArm64Mneg, g.DefineAsRegister(node),
1323 g.UseRegister(mleft.right().node()),
1324 g.UseRegister(m.right().node()));
1325 return;
1326 }
1327 }
1328
1329 if (m.right().IsInt64Sub() && CanCover(node, m.right().node())) {
1330 Int64BinopMatcher mright(m.right().node());
1331
1332 // Select Mneg(x, y) for Mul(x, Sub(0, y)).
1333 if (mright.left().Is(0)) {
1334 Emit(kArm64Mneg, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1335 g.UseRegister(mright.right().node()));
1336 return;
1337 }
1338 }
1339
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001340 VisitRRR(this, kArm64Mul, node);
1341}
1342
1343
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001344void InstructionSelector::VisitInt32MulHigh(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001345 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001346 InstructionOperand const smull_operand = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001347 Emit(kArm64Smull, smull_operand, g.UseRegister(node->InputAt(0)),
1348 g.UseRegister(node->InputAt(1)));
1349 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1350}
1351
1352
1353void InstructionSelector::VisitUint32MulHigh(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001354 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001355 InstructionOperand const smull_operand = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001356 Emit(kArm64Umull, smull_operand, g.UseRegister(node->InputAt(0)),
1357 g.UseRegister(node->InputAt(1)));
1358 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1359}
1360
1361
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362void InstructionSelector::VisitInt32Div(Node* node) {
1363 VisitRRR(this, kArm64Idiv32, node);
1364}
1365
1366
1367void InstructionSelector::VisitInt64Div(Node* node) {
1368 VisitRRR(this, kArm64Idiv, node);
1369}
1370
1371
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001372void InstructionSelector::VisitUint32Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001373 VisitRRR(this, kArm64Udiv32, node);
1374}
1375
1376
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001377void InstructionSelector::VisitUint64Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378 VisitRRR(this, kArm64Udiv, node);
1379}
1380
1381
1382void InstructionSelector::VisitInt32Mod(Node* node) {
1383 VisitRRR(this, kArm64Imod32, node);
1384}
1385
1386
1387void InstructionSelector::VisitInt64Mod(Node* node) {
1388 VisitRRR(this, kArm64Imod, node);
1389}
1390
1391
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001392void InstructionSelector::VisitUint32Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001393 VisitRRR(this, kArm64Umod32, node);
1394}
1395
1396
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001397void InstructionSelector::VisitUint64Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001398 VisitRRR(this, kArm64Umod, node);
1399}
1400
1401
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001402void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001403 VisitRR(this, kArm64Float32ToFloat64, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001404}
1405
1406
Ben Murdoch097c5b22016-05-18 11:27:45 +01001407void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1408 VisitRR(this, kArm64Int32ToFloat32, node);
1409}
1410
1411
1412void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1413 VisitRR(this, kArm64Uint32ToFloat32, node);
1414}
1415
1416
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001417void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 VisitRR(this, kArm64Int32ToFloat64, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419}
1420
1421
1422void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001423 VisitRR(this, kArm64Uint32ToFloat64, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424}
1425
1426
Ben Murdoch097c5b22016-05-18 11:27:45 +01001427void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1428 VisitRR(this, kArm64Float32ToInt32, node);
1429}
1430
1431
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001432void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001433 VisitRR(this, kArm64Float64ToInt32, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001434}
1435
1436
Ben Murdoch097c5b22016-05-18 11:27:45 +01001437void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1438 VisitRR(this, kArm64Float32ToUint32, node);
1439}
1440
1441
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001442void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001443 VisitRR(this, kArm64Float64ToUint32, node);
1444}
1445
Ben Murdochda12d292016-06-02 14:46:10 +01001446void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1447 VisitRR(this, kArm64Float64ToUint32, node);
1448}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001449
1450void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452
1453 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1454 InstructionOperand outputs[2];
1455 size_t output_count = 0;
1456 outputs[output_count++] = g.DefineAsRegister(node);
1457
1458 Node* success_output = NodeProperties::FindProjection(node, 1);
1459 if (success_output) {
1460 outputs[output_count++] = g.DefineAsRegister(success_output);
1461 }
1462
1463 Emit(kArm64Float32ToInt64, output_count, outputs, 1, inputs);
1464}
1465
1466
1467void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1468 Arm64OperandGenerator g(this);
1469
1470 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1471 InstructionOperand outputs[2];
1472 size_t output_count = 0;
1473 outputs[output_count++] = g.DefineAsRegister(node);
1474
1475 Node* success_output = NodeProperties::FindProjection(node, 1);
1476 if (success_output) {
1477 outputs[output_count++] = g.DefineAsRegister(success_output);
1478 }
1479
1480 Emit(kArm64Float64ToInt64, output_count, outputs, 1, inputs);
1481}
1482
1483
1484void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1485 Arm64OperandGenerator g(this);
1486
1487 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1488 InstructionOperand outputs[2];
1489 size_t output_count = 0;
1490 outputs[output_count++] = g.DefineAsRegister(node);
1491
1492 Node* success_output = NodeProperties::FindProjection(node, 1);
1493 if (success_output) {
1494 outputs[output_count++] = g.DefineAsRegister(success_output);
1495 }
1496
1497 Emit(kArm64Float32ToUint64, output_count, outputs, 1, inputs);
1498}
1499
1500
1501void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1502 Arm64OperandGenerator g(this);
1503
1504 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1505 InstructionOperand outputs[2];
1506 size_t output_count = 0;
1507 outputs[output_count++] = g.DefineAsRegister(node);
1508
1509 Node* success_output = NodeProperties::FindProjection(node, 1);
1510 if (success_output) {
1511 outputs[output_count++] = g.DefineAsRegister(success_output);
1512 }
1513
1514 Emit(kArm64Float64ToUint64, output_count, outputs, 1, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515}
1516
1517
1518void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001519 VisitRR(this, kArm64Sxtw, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001520}
1521
1522
1523void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1524 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001525 Node* value = node->InputAt(0);
1526 switch (value->opcode()) {
1527 case IrOpcode::kWord32And:
1528 case IrOpcode::kWord32Or:
1529 case IrOpcode::kWord32Xor:
1530 case IrOpcode::kWord32Shl:
1531 case IrOpcode::kWord32Shr:
1532 case IrOpcode::kWord32Sar:
1533 case IrOpcode::kWord32Ror:
1534 case IrOpcode::kWord32Equal:
1535 case IrOpcode::kInt32Add:
1536 case IrOpcode::kInt32AddWithOverflow:
1537 case IrOpcode::kInt32Sub:
1538 case IrOpcode::kInt32SubWithOverflow:
1539 case IrOpcode::kInt32Mul:
1540 case IrOpcode::kInt32MulHigh:
1541 case IrOpcode::kInt32Div:
1542 case IrOpcode::kInt32Mod:
1543 case IrOpcode::kInt32LessThan:
1544 case IrOpcode::kInt32LessThanOrEqual:
1545 case IrOpcode::kUint32Div:
1546 case IrOpcode::kUint32LessThan:
1547 case IrOpcode::kUint32LessThanOrEqual:
1548 case IrOpcode::kUint32Mod:
1549 case IrOpcode::kUint32MulHigh: {
1550 // 32-bit operations will write their result in a W register (implicitly
1551 // clearing the top 32-bit of the corresponding X register) so the
1552 // zero-extension is a no-op.
1553 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1554 return;
1555 }
Ben Murdochc5610432016-08-08 18:44:38 +01001556 case IrOpcode::kLoad: {
1557 // As for the operations above, a 32-bit load will implicitly clear the
1558 // top 32 bits of the destination register.
1559 LoadRepresentation load_rep = LoadRepresentationOf(value->op());
1560 switch (load_rep.representation()) {
1561 case MachineRepresentation::kWord8:
1562 case MachineRepresentation::kWord16:
1563 case MachineRepresentation::kWord32:
1564 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1565 return;
1566 default:
1567 break;
1568 }
1569 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001570 default:
1571 break;
1572 }
1573 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(value));
1574}
1575
1576
1577void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001578 VisitRR(this, kArm64Float64ToFloat32, node);
1579}
1580
Ben Murdochc5610432016-08-08 18:44:38 +01001581void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1582 VisitRR(this, kArchTruncateDoubleToI, node);
1583}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001584
Ben Murdochc5610432016-08-08 18:44:38 +01001585void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1586 VisitRR(this, kArm64Float64ToInt32, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001587}
1588
1589
1590void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1591 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001592 Node* value = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 if (CanCover(node, value) && value->InputCount() >= 2) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001594 Int64BinopMatcher m(value);
1595 if ((m.IsWord64Sar() && m.right().HasValue() &&
1596 (m.right().Value() == 32)) ||
1597 (m.IsWord64Shr() && m.right().IsInRange(32, 63))) {
1598 Emit(kArm64Lsr, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1599 g.UseImmediate(m.right().node()));
1600 return;
1601 }
1602 }
1603
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001604 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1605}
1606
1607
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001608void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1609 VisitRR(this, kArm64Int64ToFloat32, node);
1610}
1611
1612
1613void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1614 VisitRR(this, kArm64Int64ToFloat64, node);
1615}
1616
1617
1618void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1619 VisitRR(this, kArm64Uint64ToFloat32, node);
1620}
1621
1622
1623void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1624 VisitRR(this, kArm64Uint64ToFloat64, node);
1625}
1626
1627
1628void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1629 VisitRR(this, kArm64Float64ExtractLowWord32, node);
1630}
1631
1632
1633void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1634 VisitRR(this, kArm64U64MoveFloat64, node);
1635}
1636
1637
1638void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1639 VisitRR(this, kArm64Float64MoveU64, node);
1640}
1641
1642
1643void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1644 VisitRR(this, kArm64Float64MoveU64, node);
1645}
1646
1647
1648void InstructionSelector::VisitFloat32Add(Node* node) {
1649 VisitRRR(this, kArm64Float32Add, node);
1650}
1651
1652
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653void InstructionSelector::VisitFloat64Add(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654 VisitRRR(this, kArm64Float64Add, node);
1655}
1656
1657
1658void InstructionSelector::VisitFloat32Sub(Node* node) {
1659 VisitRRR(this, kArm64Float32Sub, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001660}
1661
Ben Murdochc5610432016-08-08 18:44:38 +01001662void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) {
1663 VisitRRR(this, kArm64Float32Sub, node);
1664}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665
1666void InstructionSelector::VisitFloat64Sub(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001667 Arm64OperandGenerator g(this);
1668 Float64BinopMatcher m(node);
1669 if (m.left().IsMinusZero()) {
1670 if (m.right().IsFloat64RoundDown() &&
1671 CanCover(m.node(), m.right().node())) {
1672 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1673 CanCover(m.right().node(), m.right().InputAt(0))) {
1674 Float64BinopMatcher mright0(m.right().InputAt(0));
1675 if (mright0.left().IsMinusZero()) {
1676 Emit(kArm64Float64RoundUp, g.DefineAsRegister(node),
1677 g.UseRegister(mright0.right().node()));
1678 return;
1679 }
1680 }
1681 }
1682 Emit(kArm64Float64Neg, g.DefineAsRegister(node),
1683 g.UseRegister(m.right().node()));
1684 return;
1685 }
1686 VisitRRR(this, kArm64Float64Sub, node);
1687}
1688
Ben Murdochc5610432016-08-08 18:44:38 +01001689void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) {
1690 VisitRRR(this, kArm64Float64Sub, node);
1691}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001692
1693void InstructionSelector::VisitFloat32Mul(Node* node) {
1694 VisitRRR(this, kArm64Float32Mul, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001695}
1696
1697
1698void InstructionSelector::VisitFloat64Mul(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001699 VisitRRR(this, kArm64Float64Mul, node);
1700}
1701
1702
1703void InstructionSelector::VisitFloat32Div(Node* node) {
1704 VisitRRR(this, kArm64Float32Div, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001705}
1706
1707
1708void InstructionSelector::VisitFloat64Div(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 VisitRRR(this, kArm64Float64Div, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001710}
1711
1712
1713void InstructionSelector::VisitFloat64Mod(Node* node) {
1714 Arm64OperandGenerator g(this);
1715 Emit(kArm64Float64Mod, g.DefineAsFixed(node, d0),
1716 g.UseFixed(node->InputAt(0), d0),
1717 g.UseFixed(node->InputAt(1), d1))->MarkAsCall();
1718}
1719
1720
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721void InstructionSelector::VisitFloat32Max(Node* node) {
1722 VisitRRR(this, kArm64Float32Max, node);
1723}
1724
1725
1726void InstructionSelector::VisitFloat64Max(Node* node) {
1727 VisitRRR(this, kArm64Float64Max, node);
1728}
1729
1730
1731void InstructionSelector::VisitFloat32Min(Node* node) {
1732 VisitRRR(this, kArm64Float32Min, node);
1733}
1734
1735
1736void InstructionSelector::VisitFloat64Min(Node* node) {
1737 VisitRRR(this, kArm64Float64Min, node);
1738}
1739
1740
1741void InstructionSelector::VisitFloat32Abs(Node* node) {
1742 VisitRR(this, kArm64Float32Abs, node);
1743}
1744
1745
1746void InstructionSelector::VisitFloat64Abs(Node* node) {
1747 VisitRR(this, kArm64Float64Abs, node);
1748}
1749
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001750void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1751 VisitRR(this, kArm64Float32Sqrt, node);
1752}
1753
1754
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755void InstructionSelector::VisitFloat64Sqrt(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756 VisitRR(this, kArm64Float64Sqrt, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001757}
1758
1759
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001760void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1761 VisitRR(this, kArm64Float32RoundDown, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001762}
1763
1764
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001765void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1766 VisitRR(this, kArm64Float64RoundDown, node);
1767}
1768
1769
1770void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1771 VisitRR(this, kArm64Float32RoundUp, node);
1772}
1773
1774
1775void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1776 VisitRR(this, kArm64Float64RoundUp, node);
1777}
1778
1779
1780void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1781 VisitRR(this, kArm64Float32RoundTruncate, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001782}
1783
1784
1785void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001786 VisitRR(this, kArm64Float64RoundTruncate, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001787}
1788
1789
1790void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 VisitRR(this, kArm64Float64RoundTiesAway, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001792}
1793
1794
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001795void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1796 VisitRR(this, kArm64Float32RoundTiesEven, node);
1797}
1798
1799
1800void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1801 VisitRR(this, kArm64Float64RoundTiesEven, node);
1802}
1803
Ben Murdoch61f157c2016-09-16 13:49:30 +01001804void InstructionSelector::VisitFloat32Neg(Node* node) {
1805 VisitRR(this, kArm64Float32Neg, node);
1806}
1807
1808void InstructionSelector::VisitFloat64Neg(Node* node) {
1809 VisitRR(this, kArm64Float64Neg, node);
1810}
1811
1812void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1813 InstructionCode opcode) {
1814 Arm64OperandGenerator g(this);
1815 Emit(opcode, g.DefineAsFixed(node, d0), g.UseFixed(node->InputAt(0), d0),
1816 g.UseFixed(node->InputAt(1), d1))
1817 ->MarkAsCall();
1818}
1819
1820void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1821 InstructionCode opcode) {
1822 Arm64OperandGenerator g(this);
1823 Emit(opcode, g.DefineAsFixed(node, d0), g.UseFixed(node->InputAt(0), d0))
1824 ->MarkAsCall();
1825}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001826
1827void InstructionSelector::EmitPrepareArguments(
1828 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1829 Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001830 Arm64OperandGenerator g(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831
Ben Murdochda12d292016-06-02 14:46:10 +01001832 bool from_native_stack = linkage()->GetIncomingDescriptor()->UseNativeStack();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001833 bool to_native_stack = descriptor->UseNativeStack();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001834
Ben Murdochda12d292016-06-02 14:46:10 +01001835 bool always_claim = to_native_stack != from_native_stack;
1836
Ben Murdoch097c5b22016-05-18 11:27:45 +01001837 int claim_count = static_cast<int>(arguments->size());
1838 int slot = claim_count - 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001839 // Bump the stack pointer(s).
Ben Murdochda12d292016-06-02 14:46:10 +01001840 if (claim_count > 0 || always_claim) {
1841 // TODO(titzer): claim and poke probably take small immediates.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001842 // TODO(titzer): it would be better to bump the csp here only
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001843 // and emit paired stores with increment for non c frames.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001844 ArchOpcode claim = to_native_stack ? kArm64ClaimCSP : kArm64ClaimJSSP;
Ben Murdochda12d292016-06-02 14:46:10 +01001845 // Claim(0) isn't a nop if there is a mismatch between CSP and JSSP.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001846 Emit(claim, g.NoOutput(), g.TempImmediate(claim_count));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001847 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001848
Ben Murdoch097c5b22016-05-18 11:27:45 +01001849 // Poke the arguments into the stack.
1850 ArchOpcode poke = to_native_stack ? kArm64PokeCSP : kArm64PokeJSSP;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 while (slot >= 0) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001852 Emit(poke, g.NoOutput(), g.UseRegister((*arguments)[slot].node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001853 g.TempImmediate(slot));
1854 slot--;
1855 // TODO(ahaas): Poke arguments in pairs if two subsequent arguments have the
1856 // same type.
1857 // Emit(kArm64PokePair, g.NoOutput(), g.UseRegister((*arguments)[slot]),
1858 // g.UseRegister((*arguments)[slot - 1]), g.TempImmediate(slot));
1859 // slot -= 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001860 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001861}
1862
1863
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001864bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1865
Ben Murdochda12d292016-06-02 14:46:10 +01001866int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001867
1868namespace {
1869
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001870// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001871void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1872 InstructionOperand left, InstructionOperand right,
1873 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001874 Arm64OperandGenerator g(selector);
1875 opcode = cont->Encode(opcode);
1876 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877 selector->Emit(opcode, g.NoOutput(), left, right,
1878 g.Label(cont->true_block()), g.Label(cont->false_block()));
Ben Murdochda12d292016-06-02 14:46:10 +01001879 } else if (cont->IsDeoptimize()) {
1880 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1881 cont->frame_state());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001882 } else {
1883 DCHECK(cont->IsSet());
1884 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001885 }
1886}
1887
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001888
1889// Shared routine for multiple word compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001890void VisitWordCompare(InstructionSelector* selector, Node* node,
1891 InstructionCode opcode, FlagsContinuation* cont,
1892 bool commutative, ImmediateMode immediate_mode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001893 Arm64OperandGenerator g(selector);
1894 Node* left = node->InputAt(0);
1895 Node* right = node->InputAt(1);
1896
1897 // Match immediates on left or right side of comparison.
1898 if (g.CanBeImmediate(right, immediate_mode)) {
1899 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1900 cont);
1901 } else if (g.CanBeImmediate(left, immediate_mode)) {
1902 if (!commutative) cont->Commute();
1903 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1904 cont);
1905 } else {
1906 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1907 cont);
1908 }
1909}
1910
1911
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001912void VisitWord32Compare(InstructionSelector* selector, Node* node,
1913 FlagsContinuation* cont) {
1914 Int32BinopMatcher m(node);
1915 ArchOpcode opcode = kArm64Cmp32;
1916
1917 // Select negated compare for comparisons with negated right input.
1918 if (m.right().IsInt32Sub()) {
1919 Node* sub = m.right().node();
1920 Int32BinopMatcher msub(sub);
1921 if (msub.left().Is(0)) {
1922 bool can_cover = selector->CanCover(node, sub);
1923 node->ReplaceInput(1, msub.right().node());
1924 // Even if the comparison node covers the subtraction, after the input
1925 // replacement above, the node still won't cover the input to the
1926 // subtraction; the subtraction still uses it.
1927 // In order to get shifted operations to work, we must remove the rhs
1928 // input to the subtraction, as TryMatchAnyShift requires this node to
1929 // cover the input shift. We do this by setting it to the lhs input,
1930 // as we know it's zero, and the result of the subtraction isn't used by
1931 // any other node.
1932 if (can_cover) sub->ReplaceInput(1, msub.left().node());
1933 opcode = kArm64Cmn32;
1934 }
1935 }
1936 VisitBinop<Int32BinopMatcher>(selector, node, opcode, kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001937}
1938
1939
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001940void VisitWordTest(InstructionSelector* selector, Node* node,
1941 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001942 Arm64OperandGenerator g(selector);
1943 VisitCompare(selector, opcode, g.UseRegister(node), g.UseRegister(node),
1944 cont);
1945}
1946
1947
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001948void VisitWord32Test(InstructionSelector* selector, Node* node,
1949 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001950 VisitWordTest(selector, node, kArm64Tst32, cont);
1951}
1952
1953
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001954void VisitWord64Test(InstructionSelector* selector, Node* node,
1955 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001956 VisitWordTest(selector, node, kArm64Tst, cont);
1957}
1958
Ben Murdoch61f157c2016-09-16 13:49:30 +01001959template <typename Matcher, ArchOpcode kOpcode>
1960bool TryEmitTestAndBranch(InstructionSelector* selector, Node* node,
1961 FlagsContinuation* cont) {
1962 Arm64OperandGenerator g(selector);
1963 Matcher m(node);
1964 if (cont->IsBranch() && m.right().HasValue() &&
1965 (base::bits::CountPopulation(m.right().Value()) == 1)) {
1966 // If the mask has only one bit set, we can use tbz/tbnz.
1967 DCHECK((cont->condition() == kEqual) || (cont->condition() == kNotEqual));
1968 selector->Emit(
1969 cont->Encode(kOpcode), g.NoOutput(), g.UseRegister(m.left().node()),
1970 g.TempImmediate(base::bits::CountTrailingZeros(m.right().Value())),
1971 g.Label(cont->true_block()), g.Label(cont->false_block()));
1972 return true;
1973 }
1974 return false;
1975}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001976
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001977// Shared routine for multiple float32 compare operations.
1978void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1979 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001980 Arm64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001981 Float32BinopMatcher m(node);
1982 if (m.right().Is(0.0f)) {
1983 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1984 g.UseImmediate(m.right().node()), cont);
1985 } else if (m.left().Is(0.0f)) {
1986 cont->Commute();
1987 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.right().node()),
1988 g.UseImmediate(m.left().node()), cont);
1989 } else {
1990 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1991 g.UseRegister(m.right().node()), cont);
1992 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001993}
1994
1995
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996// Shared routine for multiple float64 compare operations.
1997void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1998 FlagsContinuation* cont) {
1999 Arm64OperandGenerator g(selector);
2000 Float64BinopMatcher m(node);
2001 if (m.right().Is(0.0)) {
2002 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
2003 g.UseImmediate(m.right().node()), cont);
2004 } else if (m.left().Is(0.0)) {
2005 cont->Commute();
2006 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.right().node()),
2007 g.UseImmediate(m.left().node()), cont);
2008 } else {
2009 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
2010 g.UseRegister(m.right().node()), cont);
2011 }
2012}
2013
Ben Murdochda12d292016-06-02 14:46:10 +01002014void VisitWordCompareZero(InstructionSelector* selector, Node* user,
2015 Node* value, FlagsContinuation* cont) {
2016 Arm64OperandGenerator g(selector);
2017 while (selector->CanCover(user, value)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002018 switch (value->opcode()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002019 case IrOpcode::kWord32Equal: {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002020 // Combine with comparisons against 0 by simply inverting the
2021 // continuation.
Ben Murdochda12d292016-06-02 14:46:10 +01002022 Int32BinopMatcher m(value);
2023 if (m.right().Is(0)) {
2024 user = value;
2025 value = m.left().node();
2026 cont->Negate();
2027 continue;
2028 }
2029 cont->OverwriteAndNegateIfEqual(kEqual);
2030 return VisitWord32Compare(selector, value, cont);
2031 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002032 case IrOpcode::kInt32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01002033 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
2034 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002035 case IrOpcode::kInt32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01002036 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
2037 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002038 case IrOpcode::kUint32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01002039 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
2040 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002041 case IrOpcode::kUint32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01002042 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
2043 return VisitWord32Compare(selector, value, cont);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002044 case IrOpcode::kWord64Equal: {
Ben Murdochda12d292016-06-02 14:46:10 +01002045 cont->OverwriteAndNegateIfEqual(kEqual);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002046 Int64BinopMatcher m(value);
2047 if (m.right().Is(0)) {
2048 Node* const left = m.left().node();
2049 if (selector->CanCover(value, left) &&
2050 left->opcode() == IrOpcode::kWord64And) {
2051 // Attempt to merge the Word64Equal(Word64And(x, y), 0) comparison
2052 // into a tbz/tbnz instruction.
2053 if (TryEmitTestAndBranch<Uint64BinopMatcher, kArm64TestAndBranch>(
2054 selector, left, cont)) {
2055 return;
2056 }
2057 return VisitWordCompare(selector, left, kArm64Tst, cont, true,
2058 kLogical64Imm);
2059 }
2060 // Merge the Word64Equal(x, 0) comparison into a cbz instruction.
2061 if (cont->IsBranch()) {
2062 selector->Emit(cont->Encode(kArm64CompareAndBranch), g.NoOutput(),
2063 g.UseRegister(left), g.Label(cont->true_block()),
2064 g.Label(cont->false_block()));
2065 return;
2066 }
2067 }
Ben Murdochda12d292016-06-02 14:46:10 +01002068 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002069 kArithmeticImm);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002070 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002071 case IrOpcode::kInt64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01002072 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
2073 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002074 kArithmeticImm);
2075 case IrOpcode::kInt64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01002076 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
2077 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002078 kArithmeticImm);
2079 case IrOpcode::kUint64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01002080 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
2081 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002082 kArithmeticImm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002083 case IrOpcode::kUint64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01002084 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
2085 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002086 kArithmeticImm);
2087 case IrOpcode::kFloat32Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01002088 cont->OverwriteAndNegateIfEqual(kEqual);
2089 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002090 case IrOpcode::kFloat32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01002091 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
2092 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002093 case IrOpcode::kFloat32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01002094 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
2095 return VisitFloat32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002096 case IrOpcode::kFloat64Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01002097 cont->OverwriteAndNegateIfEqual(kEqual);
2098 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002099 case IrOpcode::kFloat64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01002100 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
2101 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002102 case IrOpcode::kFloat64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01002103 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
2104 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002105 case IrOpcode::kProjection:
2106 // Check if this is the overflow output projection of an
2107 // <Operation>WithOverflow node.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002108 if (ProjectionIndexOf(value->op()) == 1u) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002109 // We cannot combine the <Operation>WithOverflow with this branch
2110 // unless the 0th projection (the use of the actual value of the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002111 // <Operation> is either nullptr, which means there's no use of the
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002112 // actual value, or was already defined, which means it is scheduled
2113 // *AFTER* this branch).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002114 Node* const node = value->InputAt(0);
2115 Node* const result = NodeProperties::FindProjection(node, 0);
Ben Murdochda12d292016-06-02 14:46:10 +01002116 if (result == nullptr || selector->IsDefined(result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002117 switch (node->opcode()) {
2118 case IrOpcode::kInt32AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01002119 cont->OverwriteAndNegateIfEqual(kOverflow);
2120 return VisitBinop<Int32BinopMatcher>(
2121 selector, node, kArm64Add32, kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002122 case IrOpcode::kInt32SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01002123 cont->OverwriteAndNegateIfEqual(kOverflow);
2124 return VisitBinop<Int32BinopMatcher>(
2125 selector, node, kArm64Sub32, kArithmeticImm, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002126 case IrOpcode::kInt64AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01002127 cont->OverwriteAndNegateIfEqual(kOverflow);
2128 return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Add,
2129 kArithmeticImm, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002130 case IrOpcode::kInt64SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01002131 cont->OverwriteAndNegateIfEqual(kOverflow);
2132 return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Sub,
2133 kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002134 default:
2135 break;
2136 }
2137 }
2138 }
2139 break;
2140 case IrOpcode::kInt32Add:
Ben Murdochda12d292016-06-02 14:46:10 +01002141 return VisitWordCompare(selector, value, kArm64Cmn32, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002142 kArithmeticImm);
2143 case IrOpcode::kInt32Sub:
Ben Murdochda12d292016-06-02 14:46:10 +01002144 return VisitWord32Compare(selector, value, cont);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002145 case IrOpcode::kWord32And:
2146 if (TryEmitTestAndBranch<Uint32BinopMatcher, kArm64TestAndBranch32>(
2147 selector, value, cont)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002148 return;
2149 }
Ben Murdochda12d292016-06-02 14:46:10 +01002150 return VisitWordCompare(selector, value, kArm64Tst32, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002151 kLogical32Imm);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002152 case IrOpcode::kWord64And:
2153 if (TryEmitTestAndBranch<Uint64BinopMatcher, kArm64TestAndBranch>(
2154 selector, value, cont)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002155 return;
2156 }
Ben Murdochda12d292016-06-02 14:46:10 +01002157 return VisitWordCompare(selector, value, kArm64Tst, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002158 kLogical64Imm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002159 default:
2160 break;
2161 }
Ben Murdochda12d292016-06-02 14:46:10 +01002162 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002163 }
2164
2165 // Branch could not be combined with a compare, compare against 0 and branch.
Ben Murdochda12d292016-06-02 14:46:10 +01002166 if (cont->IsBranch()) {
2167 selector->Emit(cont->Encode(kArm64CompareAndBranch32), g.NoOutput(),
2168 g.UseRegister(value), g.Label(cont->true_block()),
2169 g.Label(cont->false_block()));
2170 } else {
2171 DCHECK(cont->IsDeoptimize());
2172 selector->EmitDeoptimize(cont->Encode(kArm64Tst32), g.NoOutput(),
2173 g.UseRegister(value), g.UseRegister(value),
2174 cont->frame_state());
2175 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002176}
2177
Ben Murdochda12d292016-06-02 14:46:10 +01002178} // namespace
2179
2180void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
2181 BasicBlock* fbranch) {
2182 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
2183 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
2184}
2185
2186void InstructionSelector::VisitDeoptimizeIf(Node* node) {
2187 FlagsContinuation cont =
2188 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
2189 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2190}
2191
2192void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
2193 FlagsContinuation cont =
2194 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
2195 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
2196}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002197
2198void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
2199 Arm64OperandGenerator g(this);
2200 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
2201
2202 // Emit either ArchTableSwitch or ArchLookupSwitch.
2203 size_t table_space_cost = 4 + sw.value_range;
2204 size_t table_time_cost = 3;
2205 size_t lookup_space_cost = 3 + 2 * sw.case_count;
2206 size_t lookup_time_cost = sw.case_count;
2207 if (sw.case_count > 0 &&
2208 table_space_cost + 3 * table_time_cost <=
2209 lookup_space_cost + 3 * lookup_time_cost &&
2210 sw.min_value > std::numeric_limits<int32_t>::min()) {
2211 InstructionOperand index_operand = value_operand;
2212 if (sw.min_value) {
2213 index_operand = g.TempRegister();
2214 Emit(kArm64Sub32, index_operand, value_operand,
2215 g.TempImmediate(sw.min_value));
2216 }
2217 // Generate a table lookup.
2218 return EmitTableSwitch(sw, index_operand);
2219 }
2220
2221 // Generate a sequence of conditional jumps.
2222 return EmitLookupSwitch(sw, value_operand);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002223}
2224
2225
2226void InstructionSelector::VisitWord32Equal(Node* const node) {
2227 Node* const user = node;
Ben Murdochda12d292016-06-02 14:46:10 +01002228 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002229 Int32BinopMatcher m(user);
2230 if (m.right().Is(0)) {
2231 Node* const value = m.left().node();
2232 if (CanCover(user, value)) {
2233 switch (value->opcode()) {
2234 case IrOpcode::kInt32Add:
2235 return VisitWordCompare(this, value, kArm64Cmn32, &cont, true,
2236 kArithmeticImm);
2237 case IrOpcode::kInt32Sub:
2238 return VisitWordCompare(this, value, kArm64Cmp32, &cont, false,
2239 kArithmeticImm);
2240 case IrOpcode::kWord32And:
2241 return VisitWordCompare(this, value, kArm64Tst32, &cont, true,
2242 kLogical32Imm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002243 case IrOpcode::kWord32Equal: {
2244 // Word32Equal(Word32Equal(x, y), 0) => Word32Compare(x, y, ne).
2245 Int32BinopMatcher mequal(value);
2246 node->ReplaceInput(0, mequal.left().node());
2247 node->ReplaceInput(1, mequal.right().node());
2248 cont.Negate();
2249 return VisitWord32Compare(this, node, &cont);
2250 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002251 default:
2252 break;
2253 }
2254 return VisitWord32Test(this, value, &cont);
2255 }
2256 }
2257 VisitWord32Compare(this, node, &cont);
2258}
2259
2260
2261void InstructionSelector::VisitInt32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002262 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002263 VisitWord32Compare(this, node, &cont);
2264}
2265
2266
2267void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002268 FlagsContinuation cont =
2269 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002270 VisitWord32Compare(this, node, &cont);
2271}
2272
2273
2274void InstructionSelector::VisitUint32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002275 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002276 VisitWord32Compare(this, node, &cont);
2277}
2278
2279
2280void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002281 FlagsContinuation cont =
2282 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002283 VisitWord32Compare(this, node, &cont);
2284}
2285
2286
2287void InstructionSelector::VisitWord64Equal(Node* const node) {
2288 Node* const user = node;
Ben Murdochda12d292016-06-02 14:46:10 +01002289 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002290 Int64BinopMatcher m(user);
2291 if (m.right().Is(0)) {
2292 Node* const value = m.left().node();
2293 if (CanCover(user, value)) {
2294 switch (value->opcode()) {
2295 case IrOpcode::kWord64And:
2296 return VisitWordCompare(this, value, kArm64Tst, &cont, true,
2297 kLogical64Imm);
2298 default:
2299 break;
2300 }
2301 return VisitWord64Test(this, value, &cont);
2302 }
2303 }
2304 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2305}
2306
2307
2308void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002309 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002310 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002311 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32,
2312 kArithmeticImm, &cont);
2313 }
2314 FlagsContinuation cont;
2315 VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm, &cont);
2316}
2317
2318
2319void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002320 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002321 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002322 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32,
2323 kArithmeticImm, &cont);
2324 }
2325 FlagsContinuation cont;
2326 VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm, &cont);
2327}
2328
2329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002330void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
2331 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002332 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002333 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm,
2334 &cont);
2335 }
2336 FlagsContinuation cont;
2337 VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm, &cont);
2338}
2339
2340
2341void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
2342 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002343 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002344 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm,
2345 &cont);
2346 }
2347 FlagsContinuation cont;
2348 VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm, &cont);
2349}
2350
2351
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002352void InstructionSelector::VisitInt64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002353 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002354 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2355}
2356
2357
2358void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002359 FlagsContinuation cont =
2360 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002361 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2362}
2363
2364
2365void InstructionSelector::VisitUint64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002366 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002367 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2368}
2369
2370
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002371void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002372 FlagsContinuation cont =
2373 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002374 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2375}
2376
2377
2378void InstructionSelector::VisitFloat32Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002379 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002380 VisitFloat32Compare(this, node, &cont);
2381}
2382
2383
2384void InstructionSelector::VisitFloat32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002385 FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002386 VisitFloat32Compare(this, node, &cont);
2387}
2388
2389
2390void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002391 FlagsContinuation cont =
2392 FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002393 VisitFloat32Compare(this, node, &cont);
2394}
2395
2396
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002397void InstructionSelector::VisitFloat64Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002398 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002399 VisitFloat64Compare(this, node, &cont);
2400}
2401
2402
2403void InstructionSelector::VisitFloat64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002404 FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002405 VisitFloat64Compare(this, node, &cont);
2406}
2407
2408
2409void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002410 FlagsContinuation cont =
2411 FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002412 VisitFloat64Compare(this, node, &cont);
2413}
2414
2415
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002416void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
2417 Arm64OperandGenerator g(this);
2418 Emit(kArm64Float64ExtractLowWord32, g.DefineAsRegister(node),
2419 g.UseRegister(node->InputAt(0)));
2420}
2421
2422
2423void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
2424 Arm64OperandGenerator g(this);
2425 Emit(kArm64Float64ExtractHighWord32, g.DefineAsRegister(node),
2426 g.UseRegister(node->InputAt(0)));
2427}
2428
2429
2430void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
2431 Arm64OperandGenerator g(this);
2432 Node* left = node->InputAt(0);
2433 Node* right = node->InputAt(1);
2434 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
2435 CanCover(node, left)) {
2436 Node* right_of_left = left->InputAt(1);
2437 Emit(kArm64Bfi, g.DefineSameAsFirst(right), g.UseRegister(right),
2438 g.UseRegister(right_of_left), g.TempImmediate(32),
2439 g.TempImmediate(32));
2440 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(right));
2441 return;
2442 }
2443 Emit(kArm64Float64InsertLowWord32, g.DefineAsRegister(node),
2444 g.UseRegister(left), g.UseRegister(right));
2445}
2446
2447
2448void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2449 Arm64OperandGenerator g(this);
2450 Node* left = node->InputAt(0);
2451 Node* right = node->InputAt(1);
2452 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
2453 CanCover(node, left)) {
2454 Node* right_of_left = left->InputAt(1);
2455 Emit(kArm64Bfi, g.DefineSameAsFirst(left), g.UseRegister(right_of_left),
2456 g.UseRegister(right), g.TempImmediate(32), g.TempImmediate(32));
2457 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(left));
2458 return;
2459 }
2460 Emit(kArm64Float64InsertHighWord32, g.DefineAsRegister(node),
2461 g.UseRegister(left), g.UseRegister(right));
2462}
2463
Ben Murdoch61f157c2016-09-16 13:49:30 +01002464void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
2465 VisitRR(this, kArm64Float64SilenceNaN, node);
2466}
2467
Ben Murdochc5610432016-08-08 18:44:38 +01002468void InstructionSelector::VisitAtomicLoad(Node* node) {
2469 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2470 Arm64OperandGenerator g(this);
2471 Node* base = node->InputAt(0);
2472 Node* index = node->InputAt(1);
2473 ArchOpcode opcode = kArchNop;
2474 switch (load_rep.representation()) {
2475 case MachineRepresentation::kWord8:
2476 opcode = load_rep.IsSigned() ? kAtomicLoadInt8 : kAtomicLoadUint8;
2477 break;
2478 case MachineRepresentation::kWord16:
2479 opcode = load_rep.IsSigned() ? kAtomicLoadInt16 : kAtomicLoadUint16;
2480 break;
2481 case MachineRepresentation::kWord32:
2482 opcode = kAtomicLoadWord32;
2483 break;
2484 default:
2485 UNREACHABLE();
2486 return;
2487 }
2488 Emit(opcode | AddressingModeField::encode(kMode_MRR),
2489 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
2490}
2491
2492void InstructionSelector::VisitAtomicStore(Node* node) {
2493 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2494 Arm64OperandGenerator g(this);
2495 Node* base = node->InputAt(0);
2496 Node* index = node->InputAt(1);
2497 Node* value = node->InputAt(2);
2498 ArchOpcode opcode = kArchNop;
2499 switch (rep) {
2500 case MachineRepresentation::kWord8:
2501 opcode = kAtomicStoreWord8;
2502 break;
2503 case MachineRepresentation::kWord16:
2504 opcode = kAtomicStoreWord16;
2505 break;
2506 case MachineRepresentation::kWord32:
2507 opcode = kAtomicStoreWord32;
2508 break;
2509 default:
2510 UNREACHABLE();
2511 return;
2512 }
2513
2514 AddressingMode addressing_mode = kMode_MRR;
2515 InstructionOperand inputs[3];
2516 size_t input_count = 0;
2517 inputs[input_count++] = g.UseUniqueRegister(base);
2518 inputs[input_count++] = g.UseUniqueRegister(index);
2519 inputs[input_count++] = g.UseUniqueRegister(value);
2520 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2521 Emit(code, 0, nullptr, input_count, inputs);
2522}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002523
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002524// static
2525MachineOperatorBuilder::Flags
2526InstructionSelector::SupportedMachineOperatorFlags() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002527 return MachineOperatorBuilder::kFloat32Max |
2528 MachineOperatorBuilder::kFloat32Min |
2529 MachineOperatorBuilder::kFloat32RoundDown |
2530 MachineOperatorBuilder::kFloat64Max |
2531 MachineOperatorBuilder::kFloat64Min |
2532 MachineOperatorBuilder::kFloat64RoundDown |
2533 MachineOperatorBuilder::kFloat32RoundUp |
2534 MachineOperatorBuilder::kFloat64RoundUp |
2535 MachineOperatorBuilder::kFloat32RoundTruncate |
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002536 MachineOperatorBuilder::kFloat64RoundTruncate |
2537 MachineOperatorBuilder::kFloat64RoundTiesAway |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002538 MachineOperatorBuilder::kFloat32RoundTiesEven |
2539 MachineOperatorBuilder::kFloat64RoundTiesEven |
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002540 MachineOperatorBuilder::kWord32ShiftIsSafe |
2541 MachineOperatorBuilder::kInt32DivIsSafe |
Ben Murdoch097c5b22016-05-18 11:27:45 +01002542 MachineOperatorBuilder::kUint32DivIsSafe |
2543 MachineOperatorBuilder::kWord32ReverseBits |
Ben Murdoch61f157c2016-09-16 13:49:30 +01002544 MachineOperatorBuilder::kWord64ReverseBits |
2545 MachineOperatorBuilder::kFloat32Neg |
2546 MachineOperatorBuilder::kFloat64Neg;
2547}
2548
2549// static
2550MachineOperatorBuilder::AlignmentRequirements
2551InstructionSelector::AlignmentRequirements() {
2552 return MachineOperatorBuilder::AlignmentRequirements::
2553 FullUnalignedAccessSupport();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002554}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002556} // namespace compiler
2557} // namespace internal
2558} // namespace v8