blob: d90deaeecbf940a90b87f192b82eead86864f8eb [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/instruction-selector-impl.h"
6#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/compiler/node-properties.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13enum ImmediateMode {
14 kArithmeticImm, // 12 bit unsigned immediate shifted left 0 or 12 bits
15 kShift32Imm, // 0 - 31
16 kShift64Imm, // 0 - 63
17 kLogical32Imm,
18 kLogical64Imm,
19 kLoadStoreImm8, // signed 8 bit or 12 bit unsigned scaled by access size
20 kLoadStoreImm16,
21 kLoadStoreImm32,
22 kLoadStoreImm64,
23 kNoImmediate
24};
25
26
27// Adds Arm64-specific methods for generating operands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028class Arm64OperandGenerator final : public OperandGenerator {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 public:
30 explicit Arm64OperandGenerator(InstructionSelector* selector)
31 : OperandGenerator(selector) {}
32
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034 if (CanBeImmediate(node, mode)) {
35 return UseImmediate(node);
36 }
37 return UseRegister(node);
38 }
39
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 // Use the zero register if the node has the immediate value zero, otherwise
41 // assign a register.
42 InstructionOperand UseRegisterOrImmediateZero(Node* node) {
43 if (IsIntegerConstant(node) && (GetIntegerConstantValue(node) == 0)) {
44 return UseImmediate(node);
45 }
46 return UseRegister(node);
47 }
48
49 // Use the provided node if it has the required value, or create a
50 // TempImmediate otherwise.
51 InstructionOperand UseImmediateOrTemp(Node* node, int32_t value) {
52 if (GetIntegerConstantValue(node) == value) {
53 return UseImmediate(node);
54 }
55 return TempImmediate(value);
56 }
57
58 bool IsIntegerConstant(Node* node) {
59 return (node->opcode() == IrOpcode::kInt32Constant) ||
60 (node->opcode() == IrOpcode::kInt64Constant);
61 }
62
63 int64_t GetIntegerConstantValue(Node* node) {
64 if (node->opcode() == IrOpcode::kInt32Constant) {
65 return OpParameter<int32_t>(node);
66 }
67 DCHECK(node->opcode() == IrOpcode::kInt64Constant);
68 return OpParameter<int64_t>(node);
69 }
70
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 bool CanBeImmediate(Node* node, ImmediateMode mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000072 return IsIntegerConstant(node) &&
73 CanBeImmediate(GetIntegerConstantValue(node), mode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 }
75
76 bool CanBeImmediate(int64_t value, ImmediateMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 unsigned ignored;
78 switch (mode) {
79 case kLogical32Imm:
80 // TODO(dcarney): some unencodable values can be handled by
81 // switching instructions.
82 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 32,
83 &ignored, &ignored, &ignored);
84 case kLogical64Imm:
85 return Assembler::IsImmLogical(static_cast<uint64_t>(value), 64,
86 &ignored, &ignored, &ignored);
87 case kArithmeticImm:
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 return Assembler::IsImmAddSub(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 case kLoadStoreImm8:
90 return IsLoadStoreImmediate(value, LSByte);
91 case kLoadStoreImm16:
92 return IsLoadStoreImmediate(value, LSHalfword);
93 case kLoadStoreImm32:
94 return IsLoadStoreImmediate(value, LSWord);
95 case kLoadStoreImm64:
96 return IsLoadStoreImmediate(value, LSDoubleWord);
97 case kNoImmediate:
98 return false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099 case kShift32Imm: // Fall through.
100 case kShift64Imm:
101 // Shift operations only observe the bottom 5 or 6 bits of the value.
102 // All possible shifts can be encoded by discarding bits which have no
103 // effect.
104 return true;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 }
106 return false;
107 }
108
109 private:
110 bool IsLoadStoreImmediate(int64_t value, LSDataSize size) {
111 return Assembler::IsImmLSScaled(value, size) ||
112 Assembler::IsImmLSUnscaled(value);
113 }
114};
115
116
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000117namespace {
118
119void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400120 Arm64OperandGenerator g(selector);
121 selector->Emit(opcode, g.DefineAsRegister(node),
122 g.UseRegister(node->InputAt(0)));
123}
124
125
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 Arm64OperandGenerator g(selector);
128 selector->Emit(opcode, g.DefineAsRegister(node),
129 g.UseRegister(node->InputAt(0)),
130 g.UseRegister(node->InputAt(1)));
131}
132
133
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
135 ImmediateMode operand_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000136 Arm64OperandGenerator g(selector);
137 selector->Emit(opcode, g.DefineAsRegister(node),
138 g.UseRegister(node->InputAt(0)),
139 g.UseOperand(node->InputAt(1), operand_mode));
140}
141
142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143bool TryMatchAnyShift(InstructionSelector* selector, Node* node,
144 Node* input_node, InstructionCode* opcode, bool try_ror) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400145 Arm64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000146
147 if (!selector->CanCover(node, input_node)) return false;
148 if (input_node->InputCount() != 2) return false;
149 if (!g.IsIntegerConstant(input_node->InputAt(1))) return false;
150
151 switch (input_node->opcode()) {
152 case IrOpcode::kWord32Shl:
153 case IrOpcode::kWord64Shl:
154 *opcode |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
155 return true;
156 case IrOpcode::kWord32Shr:
157 case IrOpcode::kWord64Shr:
158 *opcode |= AddressingModeField::encode(kMode_Operand2_R_LSR_I);
159 return true;
160 case IrOpcode::kWord32Sar:
161 case IrOpcode::kWord64Sar:
162 *opcode |= AddressingModeField::encode(kMode_Operand2_R_ASR_I);
163 return true;
164 case IrOpcode::kWord32Ror:
165 case IrOpcode::kWord64Ror:
166 if (try_ror) {
167 *opcode |= AddressingModeField::encode(kMode_Operand2_R_ROR_I);
168 return true;
169 }
170 return false;
171 default:
172 return false;
173 }
174}
175
176
177bool TryMatchAnyExtend(Arm64OperandGenerator* g, InstructionSelector* selector,
178 Node* node, Node* left_node, Node* right_node,
179 InstructionOperand* left_op,
180 InstructionOperand* right_op, InstructionCode* opcode) {
181 if (!selector->CanCover(node, right_node)) return false;
182
183 NodeMatcher nm(right_node);
184
185 if (nm.IsWord32And()) {
186 Int32BinopMatcher mright(right_node);
187 if (mright.right().Is(0xff) || mright.right().Is(0xffff)) {
188 int32_t mask = mright.right().Value();
189 *left_op = g->UseRegister(left_node);
190 *right_op = g->UseRegister(mright.left().node());
191 *opcode |= AddressingModeField::encode(
192 (mask == 0xff) ? kMode_Operand2_R_UXTB : kMode_Operand2_R_UXTH);
193 return true;
194 }
195 } else if (nm.IsWord32Sar()) {
196 Int32BinopMatcher mright(right_node);
197 if (selector->CanCover(mright.node(), mright.left().node()) &&
198 mright.left().IsWord32Shl()) {
199 Int32BinopMatcher mleft_of_right(mright.left().node());
200 if ((mright.right().Is(16) && mleft_of_right.right().Is(16)) ||
201 (mright.right().Is(24) && mleft_of_right.right().Is(24))) {
202 int32_t shift = mright.right().Value();
203 *left_op = g->UseRegister(left_node);
204 *right_op = g->UseRegister(mleft_of_right.left().node());
205 *opcode |= AddressingModeField::encode(
206 (shift == 24) ? kMode_Operand2_R_SXTB : kMode_Operand2_R_SXTH);
207 return true;
208 }
209 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400210 }
211 return false;
212}
213
214
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000215// Shared routine for multiple binary operations.
216template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000217void VisitBinop(InstructionSelector* selector, Node* node,
218 InstructionCode opcode, ImmediateMode operand_mode,
219 FlagsContinuation* cont) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000220 Arm64OperandGenerator g(selector);
221 Matcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 InstructionOperand inputs[5];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 size_t input_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224 InstructionOperand outputs[2];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000225 size_t output_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000226 bool is_cmp = (opcode == kArm64Cmp32) || (opcode == kArm64Cmn32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000228 // We can commute cmp by switching the inputs and commuting the flags
229 // continuation.
230 bool can_commute = m.HasProperty(Operator::kCommutative) || is_cmp;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400231
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000232 // The cmp and cmn instructions are encoded as sub or add with zero output
233 // register, and therefore support the same operand modes.
234 bool is_add_sub = m.IsInt32Add() || m.IsInt64Add() || m.IsInt32Sub() ||
235 m.IsInt64Sub() || is_cmp;
236
237 Node* left_node = m.left().node();
238 Node* right_node = m.right().node();
239
240 if (g.CanBeImmediate(right_node, operand_mode)) {
241 inputs[input_count++] = g.UseRegister(left_node);
242 inputs[input_count++] = g.UseImmediate(right_node);
243 } else if (is_cmp && g.CanBeImmediate(left_node, operand_mode)) {
244 cont->Commute();
245 inputs[input_count++] = g.UseRegister(right_node);
246 inputs[input_count++] = g.UseImmediate(left_node);
247 } else if (is_add_sub &&
248 TryMatchAnyExtend(&g, selector, node, left_node, right_node,
249 &inputs[0], &inputs[1], &opcode)) {
250 input_count += 2;
251 } else if (is_add_sub && can_commute &&
252 TryMatchAnyExtend(&g, selector, node, right_node, left_node,
253 &inputs[0], &inputs[1], &opcode)) {
254 if (is_cmp) cont->Commute();
255 input_count += 2;
256 } else if (TryMatchAnyShift(selector, node, right_node, &opcode,
257 !is_add_sub)) {
258 Matcher m_shift(right_node);
259 inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400260 inputs[input_count++] = g.UseRegister(m_shift.left().node());
261 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262 } else if (can_commute && TryMatchAnyShift(selector, node, left_node, &opcode,
263 !is_add_sub)) {
264 if (is_cmp) cont->Commute();
265 Matcher m_shift(left_node);
266 inputs[input_count++] = g.UseRegisterOrImmediateZero(right_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400267 inputs[input_count++] = g.UseRegister(m_shift.left().node());
268 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
269 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
271 inputs[input_count++] = g.UseRegister(right_node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400272 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000273
274 if (cont->IsBranch()) {
275 inputs[input_count++] = g.Label(cont->true_block());
276 inputs[input_count++] = g.Label(cont->false_block());
277 }
278
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 if (!is_cmp) {
280 outputs[output_count++] = g.DefineAsRegister(node);
281 }
282
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 if (cont->IsSet()) {
284 outputs[output_count++] = g.DefineAsRegister(cont->result());
285 }
286
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000287 DCHECK_NE(0u, input_count);
288 DCHECK((output_count != 0) || is_cmp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 DCHECK_GE(arraysize(inputs), input_count);
290 DCHECK_GE(arraysize(outputs), output_count);
291
Ben Murdochda12d292016-06-02 14:46:10 +0100292 opcode = cont->Encode(opcode);
293 if (cont->IsDeoptimize()) {
294 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
295 cont->frame_state());
296 } else {
297 selector->Emit(opcode, output_count, outputs, input_count, inputs);
298 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000299}
300
301
302// Shared routine for multiple binary operations.
303template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000304void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
305 ImmediateMode operand_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306 FlagsContinuation cont;
307 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
308}
309
310
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400311template <typename Matcher>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000312void VisitAddSub(InstructionSelector* selector, Node* node, ArchOpcode opcode,
313 ArchOpcode negate_opcode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400314 Arm64OperandGenerator g(selector);
315 Matcher m(node);
316 if (m.right().HasValue() && (m.right().Value() < 0) &&
317 g.CanBeImmediate(-m.right().Value(), kArithmeticImm)) {
318 selector->Emit(negate_opcode, g.DefineAsRegister(node),
319 g.UseRegister(m.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000320 g.TempImmediate(static_cast<int32_t>(-m.right().Value())));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400321 } else {
322 VisitBinop<Matcher>(selector, node, opcode, kArithmeticImm);
323 }
324}
325
326
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000327// For multiplications by immediate of the form x * (2^k + 1), where k > 0,
328// return the value of k, otherwise return zero. This is used to reduce the
329// multiplication to addition with left shift: x + (x << k).
330template <typename Matcher>
331int32_t LeftShiftForReducedMultiply(Matcher* m) {
332 DCHECK(m->IsInt32Mul() || m->IsInt64Mul());
333 if (m->right().HasValue() && m->right().Value() >= 3) {
334 uint64_t value_minus_one = m->right().Value() - 1;
335 if (base::bits::IsPowerOfTwo64(value_minus_one)) {
336 return WhichPowerOf2_64(value_minus_one);
337 }
338 }
339 return 0;
340}
341
342} // namespace
343
344
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345void InstructionSelector::VisitLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000346 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 Arm64OperandGenerator g(this);
348 Node* base = node->InputAt(0);
349 Node* index = node->InputAt(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000350 ArchOpcode opcode = kArchNop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000351 ImmediateMode immediate_mode = kNoImmediate;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000352 switch (load_rep.representation()) {
353 case MachineRepresentation::kFloat32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 opcode = kArm64LdrS;
355 immediate_mode = kLoadStoreImm32;
356 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000357 case MachineRepresentation::kFloat64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 opcode = kArm64LdrD;
359 immediate_mode = kLoadStoreImm64;
360 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000361 case MachineRepresentation::kBit: // Fall through.
362 case MachineRepresentation::kWord8:
363 opcode = load_rep.IsSigned() ? kArm64Ldrsb : kArm64Ldrb;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000364 immediate_mode = kLoadStoreImm8;
365 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000366 case MachineRepresentation::kWord16:
367 opcode = load_rep.IsSigned() ? kArm64Ldrsh : kArm64Ldrh;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000368 immediate_mode = kLoadStoreImm16;
369 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 case MachineRepresentation::kWord32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371 opcode = kArm64LdrW;
372 immediate_mode = kLoadStoreImm32;
373 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 case MachineRepresentation::kTagged: // Fall through.
375 case MachineRepresentation::kWord64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000376 opcode = kArm64Ldr;
377 immediate_mode = kLoadStoreImm64;
378 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100379 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 case MachineRepresentation::kNone:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000381 UNREACHABLE();
382 return;
383 }
384 if (g.CanBeImmediate(index, immediate_mode)) {
385 Emit(opcode | AddressingModeField::encode(kMode_MRI),
386 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
387 } else {
388 Emit(opcode | AddressingModeField::encode(kMode_MRR),
389 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
390 }
391}
392
393
394void InstructionSelector::VisitStore(Node* node) {
395 Arm64OperandGenerator g(this);
396 Node* base = node->InputAt(0);
397 Node* index = node->InputAt(1);
398 Node* value = node->InputAt(2);
399
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000400 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
401 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
402 MachineRepresentation rep = store_rep.representation();
403
404 // TODO(arm64): I guess this could be done in a better way.
405 if (write_barrier_kind != kNoWriteBarrier) {
406 DCHECK_EQ(MachineRepresentation::kTagged, rep);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100407 AddressingMode addressing_mode;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408 InstructionOperand inputs[3];
409 size_t input_count = 0;
410 inputs[input_count++] = g.UseUniqueRegister(base);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100411 // OutOfLineRecordWrite uses the index in an arithmetic instruction, so we
412 // must check kArithmeticImm as well as kLoadStoreImm64.
413 if (g.CanBeImmediate(index, kArithmeticImm) &&
414 g.CanBeImmediate(index, kLoadStoreImm64)) {
415 inputs[input_count++] = g.UseImmediate(index);
416 addressing_mode = kMode_MRI;
417 } else {
418 inputs[input_count++] = g.UseUniqueRegister(index);
419 addressing_mode = kMode_MRR;
420 }
Ben Murdochda12d292016-06-02 14:46:10 +0100421 inputs[input_count++] = g.UseUniqueRegister(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000422 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
423 switch (write_barrier_kind) {
424 case kNoWriteBarrier:
425 UNREACHABLE();
426 break;
427 case kMapWriteBarrier:
428 record_write_mode = RecordWriteMode::kValueIsMap;
429 break;
430 case kPointerWriteBarrier:
431 record_write_mode = RecordWriteMode::kValueIsPointer;
432 break;
433 case kFullWriteBarrier:
434 record_write_mode = RecordWriteMode::kValueIsAny;
435 break;
436 }
437 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
438 size_t const temp_count = arraysize(temps);
439 InstructionCode code = kArchStoreWithWriteBarrier;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100440 code |= AddressingModeField::encode(addressing_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000441 code |= MiscField::encode(static_cast<int>(record_write_mode));
442 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000444 ArchOpcode opcode = kArchNop;
445 ImmediateMode immediate_mode = kNoImmediate;
446 switch (rep) {
447 case MachineRepresentation::kFloat32:
448 opcode = kArm64StrS;
449 immediate_mode = kLoadStoreImm32;
450 break;
451 case MachineRepresentation::kFloat64:
452 opcode = kArm64StrD;
453 immediate_mode = kLoadStoreImm64;
454 break;
455 case MachineRepresentation::kBit: // Fall through.
456 case MachineRepresentation::kWord8:
457 opcode = kArm64Strb;
458 immediate_mode = kLoadStoreImm8;
459 break;
460 case MachineRepresentation::kWord16:
461 opcode = kArm64Strh;
462 immediate_mode = kLoadStoreImm16;
463 break;
464 case MachineRepresentation::kWord32:
465 opcode = kArm64StrW;
466 immediate_mode = kLoadStoreImm32;
467 break;
468 case MachineRepresentation::kTagged: // Fall through.
469 case MachineRepresentation::kWord64:
470 opcode = kArm64Str;
471 immediate_mode = kLoadStoreImm64;
472 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100473 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000474 case MachineRepresentation::kNone:
475 UNREACHABLE();
476 return;
477 }
478 if (g.CanBeImmediate(index, immediate_mode)) {
479 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
480 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
481 } else {
482 Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
483 g.UseRegister(base), g.UseRegister(index), g.UseRegister(value));
484 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000485 }
486}
487
488
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400489void InstructionSelector::VisitCheckedLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000490 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400491 Arm64OperandGenerator g(this);
492 Node* const buffer = node->InputAt(0);
493 Node* const offset = node->InputAt(1);
494 Node* const length = node->InputAt(2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000495 ArchOpcode opcode = kArchNop;
496 switch (load_rep.representation()) {
497 case MachineRepresentation::kWord8:
498 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400499 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000500 case MachineRepresentation::kWord16:
501 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400502 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000503 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504 opcode = kCheckedLoadWord32;
505 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000506 case MachineRepresentation::kWord64:
507 opcode = kCheckedLoadWord64;
508 break;
509 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400510 opcode = kCheckedLoadFloat32;
511 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000512 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400513 opcode = kCheckedLoadFloat64;
514 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100515 case MachineRepresentation::kBit: // Fall through.
516 case MachineRepresentation::kTagged: // Fall through.
517 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400519 UNREACHABLE();
520 return;
521 }
522 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
523 g.UseRegister(offset), g.UseOperand(length, kArithmeticImm));
524}
525
526
527void InstructionSelector::VisitCheckedStore(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000528 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400529 Arm64OperandGenerator g(this);
530 Node* const buffer = node->InputAt(0);
531 Node* const offset = node->InputAt(1);
532 Node* const length = node->InputAt(2);
533 Node* const value = node->InputAt(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534 ArchOpcode opcode = kArchNop;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400535 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 case MachineRepresentation::kWord8:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400537 opcode = kCheckedStoreWord8;
538 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000539 case MachineRepresentation::kWord16:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400540 opcode = kCheckedStoreWord16;
541 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000542 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400543 opcode = kCheckedStoreWord32;
544 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 case MachineRepresentation::kWord64:
546 opcode = kCheckedStoreWord64;
547 break;
548 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400549 opcode = kCheckedStoreFloat32;
550 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400552 opcode = kCheckedStoreFloat64;
553 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100554 case MachineRepresentation::kBit: // Fall through.
555 case MachineRepresentation::kTagged: // Fall through.
556 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000557 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400558 UNREACHABLE();
559 return;
560 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561 Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400562 g.UseOperand(length, kArithmeticImm), g.UseRegister(value));
563}
564
565
566template <typename Matcher>
567static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
568 ArchOpcode opcode, bool left_can_cover,
569 bool right_can_cover, ImmediateMode imm_mode) {
570 Arm64OperandGenerator g(selector);
571
572 // Map instruction to equivalent operation with inverted right input.
573 ArchOpcode inv_opcode = opcode;
574 switch (opcode) {
575 case kArm64And32:
576 inv_opcode = kArm64Bic32;
577 break;
578 case kArm64And:
579 inv_opcode = kArm64Bic;
580 break;
581 case kArm64Or32:
582 inv_opcode = kArm64Orn32;
583 break;
584 case kArm64Or:
585 inv_opcode = kArm64Orn;
586 break;
587 case kArm64Eor32:
588 inv_opcode = kArm64Eon32;
589 break;
590 case kArm64Eor:
591 inv_opcode = kArm64Eon;
592 break;
593 default:
594 UNREACHABLE();
595 }
596
597 // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
598 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
599 Matcher mleft(m->left().node());
600 if (mleft.right().Is(-1)) {
601 // TODO(all): support shifted operand on right.
602 selector->Emit(inv_opcode, g.DefineAsRegister(node),
603 g.UseRegister(m->right().node()),
604 g.UseRegister(mleft.left().node()));
605 return;
606 }
607 }
608
609 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
610 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
611 right_can_cover) {
612 Matcher mright(m->right().node());
613 if (mright.right().Is(-1)) {
614 // TODO(all): support shifted operand on right.
615 selector->Emit(inv_opcode, g.DefineAsRegister(node),
616 g.UseRegister(m->left().node()),
617 g.UseRegister(mright.left().node()));
618 return;
619 }
620 }
621
622 if (m->IsWord32Xor() && m->right().Is(-1)) {
623 selector->Emit(kArm64Not32, g.DefineAsRegister(node),
624 g.UseRegister(m->left().node()));
625 } else if (m->IsWord64Xor() && m->right().Is(-1)) {
626 selector->Emit(kArm64Not, g.DefineAsRegister(node),
627 g.UseRegister(m->left().node()));
628 } else {
629 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
630 }
631}
632
633
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634void InstructionSelector::VisitWord32And(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400635 Arm64OperandGenerator g(this);
636 Int32BinopMatcher m(node);
637 if (m.left().IsWord32Shr() && CanCover(node, m.left().node()) &&
638 m.right().HasValue()) {
639 uint32_t mask = m.right().Value();
640 uint32_t mask_width = base::bits::CountPopulation32(mask);
641 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
642 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
643 // The mask must be contiguous, and occupy the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400645
646 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
647 // significant bits.
648 Int32BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000649 if (mleft.right().HasValue()) {
650 // Any shift value can match; int32 shifts use `value % 32`.
651 uint32_t lsb = mleft.right().Value() & 0x1f;
652
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400653 // Ubfx cannot extract bits past the register size, however since
654 // shifting the original value would have introduced some zeros we can
655 // still use ubfx with a smaller mask and the remaining bits will be
656 // zeros.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400657 if (lsb + mask_width > 32) mask_width = 32 - lsb;
658
659 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
660 g.UseRegister(mleft.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000661 g.UseImmediateOrTemp(mleft.right().node(), lsb),
662 g.TempImmediate(mask_width));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400663 return;
664 }
665 // Other cases fall through to the normal And operation.
666 }
667 }
668 VisitLogical<Int32BinopMatcher>(
669 this, node, &m, kArm64And32, CanCover(node, m.left().node()),
670 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671}
672
673
674void InstructionSelector::VisitWord64And(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400675 Arm64OperandGenerator g(this);
676 Int64BinopMatcher m(node);
677 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) &&
678 m.right().HasValue()) {
679 uint64_t mask = m.right().Value();
680 uint64_t mask_width = base::bits::CountPopulation64(mask);
681 uint64_t mask_msb = base::bits::CountLeadingZeros64(mask);
682 if ((mask_width != 0) && (mask_msb + mask_width == 64)) {
683 // The mask must be contiguous, and occupy the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000684 DCHECK_EQ(0u, base::bits::CountTrailingZeros64(mask));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400685
686 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
687 // significant bits.
688 Int64BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000689 if (mleft.right().HasValue()) {
690 // Any shift value can match; int64 shifts use `value % 64`.
691 uint32_t lsb = static_cast<uint32_t>(mleft.right().Value() & 0x3f);
692
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400693 // Ubfx cannot extract bits past the register size, however since
694 // shifting the original value would have introduced some zeros we can
695 // still use ubfx with a smaller mask and the remaining bits will be
696 // zeros.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400697 if (lsb + mask_width > 64) mask_width = 64 - lsb;
698
699 Emit(kArm64Ubfx, g.DefineAsRegister(node),
700 g.UseRegister(mleft.left().node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701 g.UseImmediateOrTemp(mleft.right().node(), lsb),
702 g.TempImmediate(static_cast<int32_t>(mask_width)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400703 return;
704 }
705 // Other cases fall through to the normal And operation.
706 }
707 }
708 VisitLogical<Int64BinopMatcher>(
709 this, node, &m, kArm64And, CanCover(node, m.left().node()),
710 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711}
712
713
714void InstructionSelector::VisitWord32Or(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400715 Int32BinopMatcher m(node);
716 VisitLogical<Int32BinopMatcher>(
717 this, node, &m, kArm64Or32, CanCover(node, m.left().node()),
718 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719}
720
721
722void InstructionSelector::VisitWord64Or(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400723 Int64BinopMatcher m(node);
724 VisitLogical<Int64BinopMatcher>(
725 this, node, &m, kArm64Or, CanCover(node, m.left().node()),
726 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000727}
728
729
730void InstructionSelector::VisitWord32Xor(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000731 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400732 VisitLogical<Int32BinopMatcher>(
733 this, node, &m, kArm64Eor32, CanCover(node, m.left().node()),
734 CanCover(node, m.right().node()), kLogical32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735}
736
737
738void InstructionSelector::VisitWord64Xor(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000739 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400740 VisitLogical<Int64BinopMatcher>(
741 this, node, &m, kArm64Eor, CanCover(node, m.left().node()),
742 CanCover(node, m.right().node()), kLogical64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000743}
744
745
746void InstructionSelector::VisitWord32Shl(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 Int32BinopMatcher m(node);
748 if (m.left().IsWord32And() && CanCover(node, m.left().node()) &&
749 m.right().IsInRange(1, 31)) {
750 Arm64OperandGenerator g(this);
751 Int32BinopMatcher mleft(m.left().node());
752 if (mleft.right().HasValue()) {
753 uint32_t mask = mleft.right().Value();
754 uint32_t mask_width = base::bits::CountPopulation32(mask);
755 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
756 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
757 uint32_t shift = m.right().Value();
758 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
759 DCHECK_NE(0u, shift);
760
761 if ((shift + mask_width) >= 32) {
762 // If the mask is contiguous and reaches or extends beyond the top
763 // bit, only the shift is needed.
764 Emit(kArm64Lsl32, g.DefineAsRegister(node),
765 g.UseRegister(mleft.left().node()),
766 g.UseImmediate(m.right().node()));
767 return;
768 } else {
769 // Select Ubfiz for Shl(And(x, mask), imm) where the mask is
770 // contiguous, and the shift immediate non-zero.
771 Emit(kArm64Ubfiz32, g.DefineAsRegister(node),
772 g.UseRegister(mleft.left().node()),
773 g.UseImmediate(m.right().node()), g.TempImmediate(mask_width));
774 return;
775 }
776 }
777 }
778 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400779 VisitRRO(this, kArm64Lsl32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780}
781
782
783void InstructionSelector::VisitWord64Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400784 Arm64OperandGenerator g(this);
785 Int64BinopMatcher m(node);
786 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) &&
787 m.right().IsInRange(32, 63)) {
788 // There's no need to sign/zero-extend to 64-bit if we shift out the upper
789 // 32 bits anyway.
790 Emit(kArm64Lsl, g.DefineAsRegister(node),
791 g.UseRegister(m.left().node()->InputAt(0)),
792 g.UseImmediate(m.right().node()));
793 return;
794 }
795 VisitRRO(this, kArm64Lsl, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796}
797
798
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000799namespace {
800
801bool TryEmitBitfieldExtract32(InstructionSelector* selector, Node* node) {
802 Arm64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400803 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 if (selector->CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
805 // Select Ubfx or Sbfx for (x << (K & 0x1f)) OP (K & 0x1f), where
806 // OP is >>> or >> and (K & 0x1f) != 0.
807 Int32BinopMatcher mleft(m.left().node());
808 if (mleft.right().HasValue() && m.right().HasValue() &&
809 (mleft.right().Value() & 0x1f) == (m.right().Value() & 0x1f)) {
810 DCHECK(m.IsWord32Shr() || m.IsWord32Sar());
811 ArchOpcode opcode = m.IsWord32Sar() ? kArm64Sbfx32 : kArm64Ubfx32;
812
813 int right_val = m.right().Value() & 0x1f;
814 DCHECK_NE(right_val, 0);
815
816 selector->Emit(opcode, g.DefineAsRegister(node),
817 g.UseRegister(mleft.left().node()), g.TempImmediate(0),
818 g.TempImmediate(32 - right_val));
819 return true;
820 }
821 }
822 return false;
823}
824
825} // namespace
826
827
828void InstructionSelector::VisitWord32Shr(Node* node) {
829 Int32BinopMatcher m(node);
830 if (m.left().IsWord32And() && m.right().HasValue()) {
831 uint32_t lsb = m.right().Value() & 0x1f;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400832 Int32BinopMatcher mleft(m.left().node());
833 if (mleft.right().HasValue()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400834 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is
835 // shifted into the least-significant bits.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000836 uint32_t mask = (mleft.right().Value() >> lsb) << lsb;
837 unsigned mask_width = base::bits::CountPopulation32(mask);
838 unsigned mask_msb = base::bits::CountLeadingZeros32(mask);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400839 if ((mask_msb + mask_width + lsb) == 32) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000840 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400841 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(mask));
842 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000843 g.UseRegister(mleft.left().node()),
844 g.UseImmediateOrTemp(m.right().node(), lsb),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400845 g.TempImmediate(mask_width));
846 return;
847 }
848 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000849 } else if (TryEmitBitfieldExtract32(this, node)) {
850 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400851 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000852
853 if (m.left().IsUint32MulHigh() && m.right().HasValue() &&
854 CanCover(node, node->InputAt(0))) {
855 // Combine this shift with the multiply and shift that would be generated
856 // by Uint32MulHigh.
857 Arm64OperandGenerator g(this);
858 Node* left = m.left().node();
859 int shift = m.right().Value() & 0x1f;
860 InstructionOperand const smull_operand = g.TempRegister();
861 Emit(kArm64Umull, smull_operand, g.UseRegister(left->InputAt(0)),
862 g.UseRegister(left->InputAt(1)));
863 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand,
864 g.TempImmediate(32 + shift));
865 return;
866 }
867
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400868 VisitRRO(this, kArm64Lsr32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000869}
870
871
872void InstructionSelector::VisitWord64Shr(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400873 Int64BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000874 if (m.left().IsWord64And() && m.right().HasValue()) {
875 uint32_t lsb = m.right().Value() & 0x3f;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400876 Int64BinopMatcher mleft(m.left().node());
877 if (mleft.right().HasValue()) {
878 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is
879 // shifted into the least-significant bits.
880 uint64_t mask = (mleft.right().Value() >> lsb) << lsb;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000881 unsigned mask_width = base::bits::CountPopulation64(mask);
882 unsigned mask_msb = base::bits::CountLeadingZeros64(mask);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400883 if ((mask_msb + mask_width + lsb) == 64) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000884 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400885 DCHECK_EQ(lsb, base::bits::CountTrailingZeros64(mask));
886 Emit(kArm64Ubfx, g.DefineAsRegister(node),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000887 g.UseRegister(mleft.left().node()),
888 g.UseImmediateOrTemp(m.right().node(), lsb),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400889 g.TempImmediate(mask_width));
890 return;
891 }
892 }
893 }
894 VisitRRO(this, kArm64Lsr, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000895}
896
897
898void InstructionSelector::VisitWord32Sar(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000899 if (TryEmitBitfieldExtract32(this, node)) {
900 return;
901 }
902
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400903 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000904 if (m.left().IsInt32MulHigh() && m.right().HasValue() &&
905 CanCover(node, node->InputAt(0))) {
906 // Combine this shift with the multiply and shift that would be generated
907 // by Int32MulHigh.
908 Arm64OperandGenerator g(this);
909 Node* left = m.left().node();
910 int shift = m.right().Value() & 0x1f;
911 InstructionOperand const smull_operand = g.TempRegister();
912 Emit(kArm64Smull, smull_operand, g.UseRegister(left->InputAt(0)),
913 g.UseRegister(left->InputAt(1)));
914 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand,
915 g.TempImmediate(32 + shift));
916 return;
917 }
918
919 if (m.left().IsInt32Add() && m.right().HasValue() &&
920 CanCover(node, node->InputAt(0))) {
921 Node* add_node = m.left().node();
922 Int32BinopMatcher madd_node(add_node);
923 if (madd_node.left().IsInt32MulHigh() &&
924 CanCover(add_node, madd_node.left().node())) {
925 // Combine the shift that would be generated by Int32MulHigh with the add
926 // on the left of this Sar operation. We do it here, as the result of the
927 // add potentially has 33 bits, so we have to ensure the result is
928 // truncated by being the input to this 32-bit Sar operation.
929 Arm64OperandGenerator g(this);
930 Node* mul_node = madd_node.left().node();
931
932 InstructionOperand const smull_operand = g.TempRegister();
933 Emit(kArm64Smull, smull_operand, g.UseRegister(mul_node->InputAt(0)),
934 g.UseRegister(mul_node->InputAt(1)));
935
936 InstructionOperand const add_operand = g.TempRegister();
937 Emit(kArm64Add | AddressingModeField::encode(kMode_Operand2_R_ASR_I),
938 add_operand, g.UseRegister(add_node->InputAt(1)), smull_operand,
939 g.TempImmediate(32));
940
941 Emit(kArm64Asr32, g.DefineAsRegister(node), add_operand,
942 g.UseImmediate(node->InputAt(1)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400943 return;
944 }
945 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000946
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400947 VisitRRO(this, kArm64Asr32, node, kShift32Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000948}
949
950
951void InstructionSelector::VisitWord64Sar(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400952 VisitRRO(this, kArm64Asr, node, kShift64Imm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953}
954
955
956void InstructionSelector::VisitWord32Ror(Node* node) {
957 VisitRRO(this, kArm64Ror32, node, kShift32Imm);
958}
959
960
961void InstructionSelector::VisitWord64Ror(Node* node) {
962 VisitRRO(this, kArm64Ror, node, kShift64Imm);
963}
964
965
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966void InstructionSelector::VisitWord64Clz(Node* node) {
967 Arm64OperandGenerator g(this);
968 Emit(kArm64Clz, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
969}
970
971
972void InstructionSelector::VisitWord32Clz(Node* node) {
973 Arm64OperandGenerator g(this);
974 Emit(kArm64Clz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
975}
976
977
978void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
979
980
981void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
982
983
Ben Murdoch097c5b22016-05-18 11:27:45 +0100984void InstructionSelector::VisitWord32ReverseBits(Node* node) {
985 VisitRR(this, kArm64Rbit32, node);
986}
987
988
989void InstructionSelector::VisitWord64ReverseBits(Node* node) {
990 VisitRR(this, kArm64Rbit, node);
991}
992
993
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000994void InstructionSelector::VisitWord32Popcnt(Node* node) { UNREACHABLE(); }
995
996
997void InstructionSelector::VisitWord64Popcnt(Node* node) { UNREACHABLE(); }
998
999
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001000void InstructionSelector::VisitInt32Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001001 Arm64OperandGenerator g(this);
1002 Int32BinopMatcher m(node);
1003 // Select Madd(x, y, z) for Add(Mul(x, y), z).
1004 if (m.left().IsInt32Mul() && CanCover(node, m.left().node())) {
1005 Int32BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 // Check multiply can't be later reduced to addition with shift.
1007 if (LeftShiftForReducedMultiply(&mleft) == 0) {
1008 Emit(kArm64Madd32, g.DefineAsRegister(node),
1009 g.UseRegister(mleft.left().node()),
1010 g.UseRegister(mleft.right().node()),
1011 g.UseRegister(m.right().node()));
1012 return;
1013 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001014 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015 // Select Madd(x, y, z) for Add(z, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001016 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
1017 Int32BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001018 // Check multiply can't be later reduced to addition with shift.
1019 if (LeftShiftForReducedMultiply(&mright) == 0) {
1020 Emit(kArm64Madd32, g.DefineAsRegister(node),
1021 g.UseRegister(mright.left().node()),
1022 g.UseRegister(mright.right().node()),
1023 g.UseRegister(m.left().node()));
1024 return;
1025 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001026 }
1027 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Add32, kArm64Sub32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028}
1029
1030
1031void InstructionSelector::VisitInt64Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001032 Arm64OperandGenerator g(this);
1033 Int64BinopMatcher m(node);
1034 // Select Madd(x, y, z) for Add(Mul(x, y), z).
1035 if (m.left().IsInt64Mul() && CanCover(node, m.left().node())) {
1036 Int64BinopMatcher mleft(m.left().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001037 // Check multiply can't be later reduced to addition with shift.
1038 if (LeftShiftForReducedMultiply(&mleft) == 0) {
1039 Emit(kArm64Madd, g.DefineAsRegister(node),
1040 g.UseRegister(mleft.left().node()),
1041 g.UseRegister(mleft.right().node()),
1042 g.UseRegister(m.right().node()));
1043 return;
1044 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001045 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001046 // Select Madd(x, y, z) for Add(z, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001047 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1048 Int64BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049 // Check multiply can't be later reduced to addition with shift.
1050 if (LeftShiftForReducedMultiply(&mright) == 0) {
1051 Emit(kArm64Madd, g.DefineAsRegister(node),
1052 g.UseRegister(mright.left().node()),
1053 g.UseRegister(mright.right().node()),
1054 g.UseRegister(m.left().node()));
1055 return;
1056 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001057 }
1058 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Add, kArm64Sub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059}
1060
1061
1062void InstructionSelector::VisitInt32Sub(Node* node) {
1063 Arm64OperandGenerator g(this);
1064 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001065
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001066 // Select Msub(x, y, a) for Sub(a, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001067 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
1068 Int32BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001069 // Check multiply can't be later reduced to addition with shift.
1070 if (LeftShiftForReducedMultiply(&mright) == 0) {
1071 Emit(kArm64Msub32, g.DefineAsRegister(node),
1072 g.UseRegister(mright.left().node()),
1073 g.UseRegister(mright.right().node()),
1074 g.UseRegister(m.left().node()));
1075 return;
1076 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001077 }
1078
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Sub32, kArm64Add32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080}
1081
1082
1083void InstructionSelector::VisitInt64Sub(Node* node) {
1084 Arm64OperandGenerator g(this);
1085 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001086
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087 // Select Msub(x, y, a) for Sub(a, Mul(x, y)).
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001088 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1089 Int64BinopMatcher mright(m.right().node());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001090 // Check multiply can't be later reduced to addition with shift.
1091 if (LeftShiftForReducedMultiply(&mright) == 0) {
1092 Emit(kArm64Msub, g.DefineAsRegister(node),
1093 g.UseRegister(mright.left().node()),
1094 g.UseRegister(mright.right().node()),
1095 g.UseRegister(m.left().node()));
1096 return;
1097 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001098 }
1099
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001100 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Sub, kArm64Add);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001101}
1102
1103
1104void InstructionSelector::VisitInt32Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001105 Arm64OperandGenerator g(this);
1106 Int32BinopMatcher m(node);
1107
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108 // First, try to reduce the multiplication to addition with left shift.
1109 // x * (2^k + 1) -> x + (x << k)
1110 int32_t shift = LeftShiftForReducedMultiply(&m);
1111 if (shift > 0) {
1112 Emit(kArm64Add32 | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
1113 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1114 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1115 return;
1116 }
1117
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001118 if (m.left().IsInt32Sub() && CanCover(node, m.left().node())) {
1119 Int32BinopMatcher mleft(m.left().node());
1120
1121 // Select Mneg(x, y) for Mul(Sub(0, x), y).
1122 if (mleft.left().Is(0)) {
1123 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1124 g.UseRegister(mleft.right().node()),
1125 g.UseRegister(m.right().node()));
1126 return;
1127 }
1128 }
1129
1130 if (m.right().IsInt32Sub() && CanCover(node, m.right().node())) {
1131 Int32BinopMatcher mright(m.right().node());
1132
1133 // Select Mneg(x, y) for Mul(x, Sub(0, y)).
1134 if (mright.left().Is(0)) {
1135 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1136 g.UseRegister(m.left().node()),
1137 g.UseRegister(mright.right().node()));
1138 return;
1139 }
1140 }
1141
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001142 VisitRRR(this, kArm64Mul32, node);
1143}
1144
1145
1146void InstructionSelector::VisitInt64Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001147 Arm64OperandGenerator g(this);
1148 Int64BinopMatcher m(node);
1149
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001150 // First, try to reduce the multiplication to addition with left shift.
1151 // x * (2^k + 1) -> x + (x << k)
1152 int32_t shift = LeftShiftForReducedMultiply(&m);
1153 if (shift > 0) {
1154 Emit(kArm64Add | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
1155 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1156 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1157 return;
1158 }
1159
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001160 if (m.left().IsInt64Sub() && CanCover(node, m.left().node())) {
1161 Int64BinopMatcher mleft(m.left().node());
1162
1163 // Select Mneg(x, y) for Mul(Sub(0, x), y).
1164 if (mleft.left().Is(0)) {
1165 Emit(kArm64Mneg, g.DefineAsRegister(node),
1166 g.UseRegister(mleft.right().node()),
1167 g.UseRegister(m.right().node()));
1168 return;
1169 }
1170 }
1171
1172 if (m.right().IsInt64Sub() && CanCover(node, m.right().node())) {
1173 Int64BinopMatcher mright(m.right().node());
1174
1175 // Select Mneg(x, y) for Mul(x, Sub(0, y)).
1176 if (mright.left().Is(0)) {
1177 Emit(kArm64Mneg, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1178 g.UseRegister(mright.right().node()));
1179 return;
1180 }
1181 }
1182
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001183 VisitRRR(this, kArm64Mul, node);
1184}
1185
1186
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001187void InstructionSelector::VisitInt32MulHigh(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001188 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 InstructionOperand const smull_operand = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001190 Emit(kArm64Smull, smull_operand, g.UseRegister(node->InputAt(0)),
1191 g.UseRegister(node->InputAt(1)));
1192 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1193}
1194
1195
1196void InstructionSelector::VisitUint32MulHigh(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001197 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001198 InstructionOperand const smull_operand = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001199 Emit(kArm64Umull, smull_operand, g.UseRegister(node->InputAt(0)),
1200 g.UseRegister(node->InputAt(1)));
1201 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1202}
1203
1204
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205void InstructionSelector::VisitInt32Div(Node* node) {
1206 VisitRRR(this, kArm64Idiv32, node);
1207}
1208
1209
1210void InstructionSelector::VisitInt64Div(Node* node) {
1211 VisitRRR(this, kArm64Idiv, node);
1212}
1213
1214
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001215void InstructionSelector::VisitUint32Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001216 VisitRRR(this, kArm64Udiv32, node);
1217}
1218
1219
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001220void InstructionSelector::VisitUint64Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221 VisitRRR(this, kArm64Udiv, node);
1222}
1223
1224
1225void InstructionSelector::VisitInt32Mod(Node* node) {
1226 VisitRRR(this, kArm64Imod32, node);
1227}
1228
1229
1230void InstructionSelector::VisitInt64Mod(Node* node) {
1231 VisitRRR(this, kArm64Imod, node);
1232}
1233
1234
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001235void InstructionSelector::VisitUint32Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001236 VisitRRR(this, kArm64Umod32, node);
1237}
1238
1239
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001240void InstructionSelector::VisitUint64Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241 VisitRRR(this, kArm64Umod, node);
1242}
1243
1244
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001245void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 VisitRR(this, kArm64Float32ToFloat64, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001247}
1248
1249
Ben Murdoch097c5b22016-05-18 11:27:45 +01001250void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1251 VisitRR(this, kArm64Int32ToFloat32, node);
1252}
1253
1254
1255void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1256 VisitRR(this, kArm64Uint32ToFloat32, node);
1257}
1258
1259
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001260void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001261 VisitRR(this, kArm64Int32ToFloat64, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001262}
1263
1264
1265void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001266 VisitRR(this, kArm64Uint32ToFloat64, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001267}
1268
1269
Ben Murdoch097c5b22016-05-18 11:27:45 +01001270void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1271 VisitRR(this, kArm64Float32ToInt32, node);
1272}
1273
1274
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001275void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001276 VisitRR(this, kArm64Float64ToInt32, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001277}
1278
1279
Ben Murdoch097c5b22016-05-18 11:27:45 +01001280void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1281 VisitRR(this, kArm64Float32ToUint32, node);
1282}
1283
1284
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001286 VisitRR(this, kArm64Float64ToUint32, node);
1287}
1288
Ben Murdochda12d292016-06-02 14:46:10 +01001289void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1290 VisitRR(this, kArm64Float64ToUint32, node);
1291}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001292
1293void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001294 Arm64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001295
1296 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1297 InstructionOperand outputs[2];
1298 size_t output_count = 0;
1299 outputs[output_count++] = g.DefineAsRegister(node);
1300
1301 Node* success_output = NodeProperties::FindProjection(node, 1);
1302 if (success_output) {
1303 outputs[output_count++] = g.DefineAsRegister(success_output);
1304 }
1305
1306 Emit(kArm64Float32ToInt64, output_count, outputs, 1, inputs);
1307}
1308
1309
1310void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1311 Arm64OperandGenerator g(this);
1312
1313 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1314 InstructionOperand outputs[2];
1315 size_t output_count = 0;
1316 outputs[output_count++] = g.DefineAsRegister(node);
1317
1318 Node* success_output = NodeProperties::FindProjection(node, 1);
1319 if (success_output) {
1320 outputs[output_count++] = g.DefineAsRegister(success_output);
1321 }
1322
1323 Emit(kArm64Float64ToInt64, output_count, outputs, 1, inputs);
1324}
1325
1326
1327void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1328 Arm64OperandGenerator g(this);
1329
1330 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1331 InstructionOperand outputs[2];
1332 size_t output_count = 0;
1333 outputs[output_count++] = g.DefineAsRegister(node);
1334
1335 Node* success_output = NodeProperties::FindProjection(node, 1);
1336 if (success_output) {
1337 outputs[output_count++] = g.DefineAsRegister(success_output);
1338 }
1339
1340 Emit(kArm64Float32ToUint64, output_count, outputs, 1, inputs);
1341}
1342
1343
1344void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1345 Arm64OperandGenerator g(this);
1346
1347 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1348 InstructionOperand outputs[2];
1349 size_t output_count = 0;
1350 outputs[output_count++] = g.DefineAsRegister(node);
1351
1352 Node* success_output = NodeProperties::FindProjection(node, 1);
1353 if (success_output) {
1354 outputs[output_count++] = g.DefineAsRegister(success_output);
1355 }
1356
1357 Emit(kArm64Float64ToUint64, output_count, outputs, 1, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001358}
1359
1360
1361void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001362 VisitRR(this, kArm64Sxtw, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001363}
1364
1365
1366void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1367 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001368 Node* value = node->InputAt(0);
1369 switch (value->opcode()) {
1370 case IrOpcode::kWord32And:
1371 case IrOpcode::kWord32Or:
1372 case IrOpcode::kWord32Xor:
1373 case IrOpcode::kWord32Shl:
1374 case IrOpcode::kWord32Shr:
1375 case IrOpcode::kWord32Sar:
1376 case IrOpcode::kWord32Ror:
1377 case IrOpcode::kWord32Equal:
1378 case IrOpcode::kInt32Add:
1379 case IrOpcode::kInt32AddWithOverflow:
1380 case IrOpcode::kInt32Sub:
1381 case IrOpcode::kInt32SubWithOverflow:
1382 case IrOpcode::kInt32Mul:
1383 case IrOpcode::kInt32MulHigh:
1384 case IrOpcode::kInt32Div:
1385 case IrOpcode::kInt32Mod:
1386 case IrOpcode::kInt32LessThan:
1387 case IrOpcode::kInt32LessThanOrEqual:
1388 case IrOpcode::kUint32Div:
1389 case IrOpcode::kUint32LessThan:
1390 case IrOpcode::kUint32LessThanOrEqual:
1391 case IrOpcode::kUint32Mod:
1392 case IrOpcode::kUint32MulHigh: {
1393 // 32-bit operations will write their result in a W register (implicitly
1394 // clearing the top 32-bit of the corresponding X register) so the
1395 // zero-extension is a no-op.
1396 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1397 return;
1398 }
1399 default:
1400 break;
1401 }
1402 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(value));
1403}
1404
1405
1406void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001407 VisitRR(this, kArm64Float64ToFloat32, node);
1408}
1409
1410
1411void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
1412 switch (TruncationModeOf(node->op())) {
1413 case TruncationMode::kJavaScript:
1414 return VisitRR(this, kArchTruncateDoubleToI, node);
1415 case TruncationMode::kRoundToZero:
1416 return VisitRR(this, kArm64Float64ToInt32, node);
1417 }
1418 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419}
1420
1421
1422void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1423 Arm64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001424 Node* value = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001425 if (CanCover(node, value) && value->InputCount() >= 2) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001426 Int64BinopMatcher m(value);
1427 if ((m.IsWord64Sar() && m.right().HasValue() &&
1428 (m.right().Value() == 32)) ||
1429 (m.IsWord64Shr() && m.right().IsInRange(32, 63))) {
1430 Emit(kArm64Lsr, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1431 g.UseImmediate(m.right().node()));
1432 return;
1433 }
1434 }
1435
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001436 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1437}
1438
1439
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1441 VisitRR(this, kArm64Int64ToFloat32, node);
1442}
1443
1444
1445void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1446 VisitRR(this, kArm64Int64ToFloat64, node);
1447}
1448
1449
1450void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1451 VisitRR(this, kArm64Uint64ToFloat32, node);
1452}
1453
1454
1455void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1456 VisitRR(this, kArm64Uint64ToFloat64, node);
1457}
1458
1459
1460void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1461 VisitRR(this, kArm64Float64ExtractLowWord32, node);
1462}
1463
1464
1465void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1466 VisitRR(this, kArm64U64MoveFloat64, node);
1467}
1468
1469
1470void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1471 VisitRR(this, kArm64Float64MoveU64, node);
1472}
1473
1474
1475void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1476 VisitRR(this, kArm64Float64MoveU64, node);
1477}
1478
1479
1480void InstructionSelector::VisitFloat32Add(Node* node) {
1481 VisitRRR(this, kArm64Float32Add, node);
1482}
1483
1484
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001485void InstructionSelector::VisitFloat64Add(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001486 VisitRRR(this, kArm64Float64Add, node);
1487}
1488
1489
1490void InstructionSelector::VisitFloat32Sub(Node* node) {
1491 VisitRRR(this, kArm64Float32Sub, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001492}
1493
1494
1495void InstructionSelector::VisitFloat64Sub(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 Arm64OperandGenerator g(this);
1497 Float64BinopMatcher m(node);
1498 if (m.left().IsMinusZero()) {
1499 if (m.right().IsFloat64RoundDown() &&
1500 CanCover(m.node(), m.right().node())) {
1501 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1502 CanCover(m.right().node(), m.right().InputAt(0))) {
1503 Float64BinopMatcher mright0(m.right().InputAt(0));
1504 if (mright0.left().IsMinusZero()) {
1505 Emit(kArm64Float64RoundUp, g.DefineAsRegister(node),
1506 g.UseRegister(mright0.right().node()));
1507 return;
1508 }
1509 }
1510 }
1511 Emit(kArm64Float64Neg, g.DefineAsRegister(node),
1512 g.UseRegister(m.right().node()));
1513 return;
1514 }
1515 VisitRRR(this, kArm64Float64Sub, node);
1516}
1517
1518
1519void InstructionSelector::VisitFloat32Mul(Node* node) {
1520 VisitRRR(this, kArm64Float32Mul, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001521}
1522
1523
1524void InstructionSelector::VisitFloat64Mul(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001525 VisitRRR(this, kArm64Float64Mul, node);
1526}
1527
1528
1529void InstructionSelector::VisitFloat32Div(Node* node) {
1530 VisitRRR(this, kArm64Float32Div, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001531}
1532
1533
1534void InstructionSelector::VisitFloat64Div(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 VisitRRR(this, kArm64Float64Div, node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001536}
1537
1538
1539void InstructionSelector::VisitFloat64Mod(Node* node) {
1540 Arm64OperandGenerator g(this);
1541 Emit(kArm64Float64Mod, g.DefineAsFixed(node, d0),
1542 g.UseFixed(node->InputAt(0), d0),
1543 g.UseFixed(node->InputAt(1), d1))->MarkAsCall();
1544}
1545
1546
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001547void InstructionSelector::VisitFloat32Max(Node* node) {
1548 VisitRRR(this, kArm64Float32Max, node);
1549}
1550
1551
1552void InstructionSelector::VisitFloat64Max(Node* node) {
1553 VisitRRR(this, kArm64Float64Max, node);
1554}
1555
1556
1557void InstructionSelector::VisitFloat32Min(Node* node) {
1558 VisitRRR(this, kArm64Float32Min, node);
1559}
1560
1561
1562void InstructionSelector::VisitFloat64Min(Node* node) {
1563 VisitRRR(this, kArm64Float64Min, node);
1564}
1565
1566
1567void InstructionSelector::VisitFloat32Abs(Node* node) {
1568 VisitRR(this, kArm64Float32Abs, node);
1569}
1570
1571
1572void InstructionSelector::VisitFloat64Abs(Node* node) {
1573 VisitRR(this, kArm64Float64Abs, node);
1574}
1575
1576
1577void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1578 VisitRR(this, kArm64Float32Sqrt, node);
1579}
1580
1581
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582void InstructionSelector::VisitFloat64Sqrt(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583 VisitRR(this, kArm64Float64Sqrt, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001584}
1585
1586
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1588 VisitRR(this, kArm64Float32RoundDown, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001589}
1590
1591
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1593 VisitRR(this, kArm64Float64RoundDown, node);
1594}
1595
1596
1597void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1598 VisitRR(this, kArm64Float32RoundUp, node);
1599}
1600
1601
1602void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1603 VisitRR(this, kArm64Float64RoundUp, node);
1604}
1605
1606
1607void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1608 VisitRR(this, kArm64Float32RoundTruncate, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001609}
1610
1611
1612void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613 VisitRR(this, kArm64Float64RoundTruncate, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001614}
1615
1616
1617void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001618 VisitRR(this, kArm64Float64RoundTiesAway, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001619}
1620
1621
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001622void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1623 VisitRR(this, kArm64Float32RoundTiesEven, node);
1624}
1625
1626
1627void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1628 VisitRR(this, kArm64Float64RoundTiesEven, node);
1629}
1630
1631
1632void InstructionSelector::EmitPrepareArguments(
1633 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1634 Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001635 Arm64OperandGenerator g(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001636
Ben Murdochda12d292016-06-02 14:46:10 +01001637 bool from_native_stack = linkage()->GetIncomingDescriptor()->UseNativeStack();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001638 bool to_native_stack = descriptor->UseNativeStack();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001639
Ben Murdochda12d292016-06-02 14:46:10 +01001640 bool always_claim = to_native_stack != from_native_stack;
1641
Ben Murdoch097c5b22016-05-18 11:27:45 +01001642 int claim_count = static_cast<int>(arguments->size());
1643 int slot = claim_count - 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001644 // Bump the stack pointer(s).
Ben Murdochda12d292016-06-02 14:46:10 +01001645 if (claim_count > 0 || always_claim) {
1646 // TODO(titzer): claim and poke probably take small immediates.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001647 // TODO(titzer): it would be better to bump the csp here only
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 // and emit paired stores with increment for non c frames.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001649 ArchOpcode claim = to_native_stack ? kArm64ClaimCSP : kArm64ClaimJSSP;
Ben Murdochda12d292016-06-02 14:46:10 +01001650 // Claim(0) isn't a nop if there is a mismatch between CSP and JSSP.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001651 Emit(claim, g.NoOutput(), g.TempImmediate(claim_count));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001652 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001653
Ben Murdoch097c5b22016-05-18 11:27:45 +01001654 // Poke the arguments into the stack.
1655 ArchOpcode poke = to_native_stack ? kArm64PokeCSP : kArm64PokeJSSP;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001656 while (slot >= 0) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001657 Emit(poke, g.NoOutput(), g.UseRegister((*arguments)[slot].node()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001658 g.TempImmediate(slot));
1659 slot--;
1660 // TODO(ahaas): Poke arguments in pairs if two subsequent arguments have the
1661 // same type.
1662 // Emit(kArm64PokePair, g.NoOutput(), g.UseRegister((*arguments)[slot]),
1663 // g.UseRegister((*arguments)[slot - 1]), g.TempImmediate(slot));
1664 // slot -= 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001666}
1667
1668
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001669bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1670
Ben Murdochda12d292016-06-02 14:46:10 +01001671int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001672
1673namespace {
1674
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001675// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001676void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1677 InstructionOperand left, InstructionOperand right,
1678 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001679 Arm64OperandGenerator g(selector);
1680 opcode = cont->Encode(opcode);
1681 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682 selector->Emit(opcode, g.NoOutput(), left, right,
1683 g.Label(cont->true_block()), g.Label(cont->false_block()));
Ben Murdochda12d292016-06-02 14:46:10 +01001684 } else if (cont->IsDeoptimize()) {
1685 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1686 cont->frame_state());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001687 } else {
1688 DCHECK(cont->IsSet());
1689 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690 }
1691}
1692
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001693
1694// Shared routine for multiple word compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695void VisitWordCompare(InstructionSelector* selector, Node* node,
1696 InstructionCode opcode, FlagsContinuation* cont,
1697 bool commutative, ImmediateMode immediate_mode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001698 Arm64OperandGenerator g(selector);
1699 Node* left = node->InputAt(0);
1700 Node* right = node->InputAt(1);
1701
1702 // Match immediates on left or right side of comparison.
1703 if (g.CanBeImmediate(right, immediate_mode)) {
1704 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1705 cont);
1706 } else if (g.CanBeImmediate(left, immediate_mode)) {
1707 if (!commutative) cont->Commute();
1708 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1709 cont);
1710 } else {
1711 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1712 cont);
1713 }
1714}
1715
1716
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001717void VisitWord32Compare(InstructionSelector* selector, Node* node,
1718 FlagsContinuation* cont) {
1719 Int32BinopMatcher m(node);
1720 ArchOpcode opcode = kArm64Cmp32;
1721
1722 // Select negated compare for comparisons with negated right input.
1723 if (m.right().IsInt32Sub()) {
1724 Node* sub = m.right().node();
1725 Int32BinopMatcher msub(sub);
1726 if (msub.left().Is(0)) {
1727 bool can_cover = selector->CanCover(node, sub);
1728 node->ReplaceInput(1, msub.right().node());
1729 // Even if the comparison node covers the subtraction, after the input
1730 // replacement above, the node still won't cover the input to the
1731 // subtraction; the subtraction still uses it.
1732 // In order to get shifted operations to work, we must remove the rhs
1733 // input to the subtraction, as TryMatchAnyShift requires this node to
1734 // cover the input shift. We do this by setting it to the lhs input,
1735 // as we know it's zero, and the result of the subtraction isn't used by
1736 // any other node.
1737 if (can_cover) sub->ReplaceInput(1, msub.left().node());
1738 opcode = kArm64Cmn32;
1739 }
1740 }
1741 VisitBinop<Int32BinopMatcher>(selector, node, opcode, kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001742}
1743
1744
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745void VisitWordTest(InstructionSelector* selector, Node* node,
1746 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001747 Arm64OperandGenerator g(selector);
1748 VisitCompare(selector, opcode, g.UseRegister(node), g.UseRegister(node),
1749 cont);
1750}
1751
1752
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001753void VisitWord32Test(InstructionSelector* selector, Node* node,
1754 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001755 VisitWordTest(selector, node, kArm64Tst32, cont);
1756}
1757
1758
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759void VisitWord64Test(InstructionSelector* selector, Node* node,
1760 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001761 VisitWordTest(selector, node, kArm64Tst, cont);
1762}
1763
1764
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001765// Shared routine for multiple float32 compare operations.
1766void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1767 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001768 Arm64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001769 Float32BinopMatcher m(node);
1770 if (m.right().Is(0.0f)) {
1771 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1772 g.UseImmediate(m.right().node()), cont);
1773 } else if (m.left().Is(0.0f)) {
1774 cont->Commute();
1775 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.right().node()),
1776 g.UseImmediate(m.left().node()), cont);
1777 } else {
1778 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1779 g.UseRegister(m.right().node()), cont);
1780 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001781}
1782
1783
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001784// Shared routine for multiple float64 compare operations.
1785void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1786 FlagsContinuation* cont) {
1787 Arm64OperandGenerator g(selector);
1788 Float64BinopMatcher m(node);
1789 if (m.right().Is(0.0)) {
1790 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
1791 g.UseImmediate(m.right().node()), cont);
1792 } else if (m.left().Is(0.0)) {
1793 cont->Commute();
1794 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.right().node()),
1795 g.UseImmediate(m.left().node()), cont);
1796 } else {
1797 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
1798 g.UseRegister(m.right().node()), cont);
1799 }
1800}
1801
Ben Murdochda12d292016-06-02 14:46:10 +01001802void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1803 Node* value, FlagsContinuation* cont) {
1804 Arm64OperandGenerator g(selector);
1805 while (selector->CanCover(user, value)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001806 switch (value->opcode()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001807 case IrOpcode::kWord32Equal: {
1808 Int32BinopMatcher m(value);
1809 if (m.right().Is(0)) {
1810 user = value;
1811 value = m.left().node();
1812 cont->Negate();
1813 continue;
1814 }
1815 cont->OverwriteAndNegateIfEqual(kEqual);
1816 return VisitWord32Compare(selector, value, cont);
1817 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001818 case IrOpcode::kInt32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001819 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1820 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001821 case IrOpcode::kInt32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001822 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1823 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001824 case IrOpcode::kUint32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001825 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1826 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001827 case IrOpcode::kUint32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001828 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1829 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001830 case IrOpcode::kWord64Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001831 cont->OverwriteAndNegateIfEqual(kEqual);
1832 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001833 kArithmeticImm);
1834 case IrOpcode::kInt64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001835 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1836 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001837 kArithmeticImm);
1838 case IrOpcode::kInt64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001839 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1840 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001841 kArithmeticImm);
1842 case IrOpcode::kUint64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001843 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1844 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001845 kArithmeticImm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846 case IrOpcode::kUint64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001847 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1848 return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001849 kArithmeticImm);
1850 case IrOpcode::kFloat32Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001851 cont->OverwriteAndNegateIfEqual(kEqual);
1852 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001853 case IrOpcode::kFloat32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001854 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
1855 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001856 case IrOpcode::kFloat32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001857 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
1858 return VisitFloat32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001859 case IrOpcode::kFloat64Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001860 cont->OverwriteAndNegateIfEqual(kEqual);
1861 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001862 case IrOpcode::kFloat64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001863 cont->OverwriteAndNegateIfEqual(kFloatLessThan);
1864 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001865 case IrOpcode::kFloat64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001866 cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
1867 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001868 case IrOpcode::kProjection:
1869 // Check if this is the overflow output projection of an
1870 // <Operation>WithOverflow node.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001871 if (ProjectionIndexOf(value->op()) == 1u) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001872 // We cannot combine the <Operation>WithOverflow with this branch
1873 // unless the 0th projection (the use of the actual value of the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001874 // <Operation> is either nullptr, which means there's no use of the
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001875 // actual value, or was already defined, which means it is scheduled
1876 // *AFTER* this branch).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877 Node* const node = value->InputAt(0);
1878 Node* const result = NodeProperties::FindProjection(node, 0);
Ben Murdochda12d292016-06-02 14:46:10 +01001879 if (result == nullptr || selector->IsDefined(result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001880 switch (node->opcode()) {
1881 case IrOpcode::kInt32AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001882 cont->OverwriteAndNegateIfEqual(kOverflow);
1883 return VisitBinop<Int32BinopMatcher>(
1884 selector, node, kArm64Add32, kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001885 case IrOpcode::kInt32SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001886 cont->OverwriteAndNegateIfEqual(kOverflow);
1887 return VisitBinop<Int32BinopMatcher>(
1888 selector, node, kArm64Sub32, kArithmeticImm, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001889 case IrOpcode::kInt64AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001890 cont->OverwriteAndNegateIfEqual(kOverflow);
1891 return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Add,
1892 kArithmeticImm, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001893 case IrOpcode::kInt64SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001894 cont->OverwriteAndNegateIfEqual(kOverflow);
1895 return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Sub,
1896 kArithmeticImm, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001897 default:
1898 break;
1899 }
1900 }
1901 }
1902 break;
1903 case IrOpcode::kInt32Add:
Ben Murdochda12d292016-06-02 14:46:10 +01001904 return VisitWordCompare(selector, value, kArm64Cmn32, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001905 kArithmeticImm);
1906 case IrOpcode::kInt32Sub:
Ben Murdochda12d292016-06-02 14:46:10 +01001907 return VisitWord32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001908 case IrOpcode::kWord32And: {
1909 Int32BinopMatcher m(value);
Ben Murdochda12d292016-06-02 14:46:10 +01001910 if (cont->IsBranch() && m.right().HasValue() &&
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001911 (base::bits::CountPopulation32(m.right().Value()) == 1)) {
1912 // If the mask has only one bit set, we can use tbz/tbnz.
Ben Murdochda12d292016-06-02 14:46:10 +01001913 DCHECK((cont->condition() == kEqual) ||
1914 (cont->condition() == kNotEqual));
1915 selector->Emit(
1916 cont->Encode(kArm64TestAndBranch32), g.NoOutput(),
1917 g.UseRegister(m.left().node()),
1918 g.TempImmediate(
1919 base::bits::CountTrailingZeros32(m.right().Value())),
1920 g.Label(cont->true_block()), g.Label(cont->false_block()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001921 return;
1922 }
Ben Murdochda12d292016-06-02 14:46:10 +01001923 return VisitWordCompare(selector, value, kArm64Tst32, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001924 kLogical32Imm);
1925 }
1926 case IrOpcode::kWord64And: {
1927 Int64BinopMatcher m(value);
Ben Murdochda12d292016-06-02 14:46:10 +01001928 if (cont->IsBranch() && m.right().HasValue() &&
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001929 (base::bits::CountPopulation64(m.right().Value()) == 1)) {
1930 // If the mask has only one bit set, we can use tbz/tbnz.
Ben Murdochda12d292016-06-02 14:46:10 +01001931 DCHECK((cont->condition() == kEqual) ||
1932 (cont->condition() == kNotEqual));
1933 selector->Emit(
1934 cont->Encode(kArm64TestAndBranch), g.NoOutput(),
1935 g.UseRegister(m.left().node()),
1936 g.TempImmediate(
1937 base::bits::CountTrailingZeros64(m.right().Value())),
1938 g.Label(cont->true_block()), g.Label(cont->false_block()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001939 return;
1940 }
Ben Murdochda12d292016-06-02 14:46:10 +01001941 return VisitWordCompare(selector, value, kArm64Tst, cont, true,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001942 kLogical64Imm);
1943 }
1944 default:
1945 break;
1946 }
Ben Murdochda12d292016-06-02 14:46:10 +01001947 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001948 }
1949
1950 // Branch could not be combined with a compare, compare against 0 and branch.
Ben Murdochda12d292016-06-02 14:46:10 +01001951 if (cont->IsBranch()) {
1952 selector->Emit(cont->Encode(kArm64CompareAndBranch32), g.NoOutput(),
1953 g.UseRegister(value), g.Label(cont->true_block()),
1954 g.Label(cont->false_block()));
1955 } else {
1956 DCHECK(cont->IsDeoptimize());
1957 selector->EmitDeoptimize(cont->Encode(kArm64Tst32), g.NoOutput(),
1958 g.UseRegister(value), g.UseRegister(value),
1959 cont->frame_state());
1960 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001961}
1962
Ben Murdochda12d292016-06-02 14:46:10 +01001963} // namespace
1964
1965void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1966 BasicBlock* fbranch) {
1967 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1968 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
1969}
1970
1971void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1972 FlagsContinuation cont =
1973 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1974 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1975}
1976
1977void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1978 FlagsContinuation cont =
1979 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1980 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1981}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001982
1983void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1984 Arm64OperandGenerator g(this);
1985 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1986
1987 // Emit either ArchTableSwitch or ArchLookupSwitch.
1988 size_t table_space_cost = 4 + sw.value_range;
1989 size_t table_time_cost = 3;
1990 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1991 size_t lookup_time_cost = sw.case_count;
1992 if (sw.case_count > 0 &&
1993 table_space_cost + 3 * table_time_cost <=
1994 lookup_space_cost + 3 * lookup_time_cost &&
1995 sw.min_value > std::numeric_limits<int32_t>::min()) {
1996 InstructionOperand index_operand = value_operand;
1997 if (sw.min_value) {
1998 index_operand = g.TempRegister();
1999 Emit(kArm64Sub32, index_operand, value_operand,
2000 g.TempImmediate(sw.min_value));
2001 }
2002 // Generate a table lookup.
2003 return EmitTableSwitch(sw, index_operand);
2004 }
2005
2006 // Generate a sequence of conditional jumps.
2007 return EmitLookupSwitch(sw, value_operand);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002008}
2009
2010
2011void InstructionSelector::VisitWord32Equal(Node* const node) {
2012 Node* const user = node;
Ben Murdochda12d292016-06-02 14:46:10 +01002013 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002014 Int32BinopMatcher m(user);
2015 if (m.right().Is(0)) {
2016 Node* const value = m.left().node();
2017 if (CanCover(user, value)) {
2018 switch (value->opcode()) {
2019 case IrOpcode::kInt32Add:
2020 return VisitWordCompare(this, value, kArm64Cmn32, &cont, true,
2021 kArithmeticImm);
2022 case IrOpcode::kInt32Sub:
2023 return VisitWordCompare(this, value, kArm64Cmp32, &cont, false,
2024 kArithmeticImm);
2025 case IrOpcode::kWord32And:
2026 return VisitWordCompare(this, value, kArm64Tst32, &cont, true,
2027 kLogical32Imm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002028 case IrOpcode::kWord32Equal: {
2029 // Word32Equal(Word32Equal(x, y), 0) => Word32Compare(x, y, ne).
2030 Int32BinopMatcher mequal(value);
2031 node->ReplaceInput(0, mequal.left().node());
2032 node->ReplaceInput(1, mequal.right().node());
2033 cont.Negate();
2034 return VisitWord32Compare(this, node, &cont);
2035 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002036 default:
2037 break;
2038 }
2039 return VisitWord32Test(this, value, &cont);
2040 }
2041 }
2042 VisitWord32Compare(this, node, &cont);
2043}
2044
2045
2046void InstructionSelector::VisitInt32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002047 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002048 VisitWord32Compare(this, node, &cont);
2049}
2050
2051
2052void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002053 FlagsContinuation cont =
2054 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002055 VisitWord32Compare(this, node, &cont);
2056}
2057
2058
2059void InstructionSelector::VisitUint32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002060 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002061 VisitWord32Compare(this, node, &cont);
2062}
2063
2064
2065void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002066 FlagsContinuation cont =
2067 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002068 VisitWord32Compare(this, node, &cont);
2069}
2070
2071
2072void InstructionSelector::VisitWord64Equal(Node* const node) {
2073 Node* const user = node;
Ben Murdochda12d292016-06-02 14:46:10 +01002074 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002075 Int64BinopMatcher m(user);
2076 if (m.right().Is(0)) {
2077 Node* const value = m.left().node();
2078 if (CanCover(user, value)) {
2079 switch (value->opcode()) {
2080 case IrOpcode::kWord64And:
2081 return VisitWordCompare(this, value, kArm64Tst, &cont, true,
2082 kLogical64Imm);
2083 default:
2084 break;
2085 }
2086 return VisitWord64Test(this, value, &cont);
2087 }
2088 }
2089 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2090}
2091
2092
2093void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002094 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002095 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002096 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32,
2097 kArithmeticImm, &cont);
2098 }
2099 FlagsContinuation cont;
2100 VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm, &cont);
2101}
2102
2103
2104void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002105 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002106 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002107 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32,
2108 kArithmeticImm, &cont);
2109 }
2110 FlagsContinuation cont;
2111 VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm, &cont);
2112}
2113
2114
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002115void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
2116 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002117 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm,
2119 &cont);
2120 }
2121 FlagsContinuation cont;
2122 VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm, &cont);
2123}
2124
2125
2126void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
2127 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01002128 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002129 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm,
2130 &cont);
2131 }
2132 FlagsContinuation cont;
2133 VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm, &cont);
2134}
2135
2136
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002137void InstructionSelector::VisitInt64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002138 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002139 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2140}
2141
2142
2143void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002144 FlagsContinuation cont =
2145 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002146 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2147}
2148
2149
2150void InstructionSelector::VisitUint64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002151 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002152 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2153}
2154
2155
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002156void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002157 FlagsContinuation cont =
2158 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002159 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2160}
2161
2162
2163void InstructionSelector::VisitFloat32Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002164 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 VisitFloat32Compare(this, node, &cont);
2166}
2167
2168
2169void InstructionSelector::VisitFloat32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002170 FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002171 VisitFloat32Compare(this, node, &cont);
2172}
2173
2174
2175void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002176 FlagsContinuation cont =
2177 FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002178 VisitFloat32Compare(this, node, &cont);
2179}
2180
2181
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002182void InstructionSelector::VisitFloat64Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002183 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002184 VisitFloat64Compare(this, node, &cont);
2185}
2186
2187
2188void InstructionSelector::VisitFloat64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002189 FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002190 VisitFloat64Compare(this, node, &cont);
2191}
2192
2193
2194void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002195 FlagsContinuation cont =
2196 FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002197 VisitFloat64Compare(this, node, &cont);
2198}
2199
2200
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002201void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
2202 Arm64OperandGenerator g(this);
2203 Emit(kArm64Float64ExtractLowWord32, g.DefineAsRegister(node),
2204 g.UseRegister(node->InputAt(0)));
2205}
2206
2207
2208void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
2209 Arm64OperandGenerator g(this);
2210 Emit(kArm64Float64ExtractHighWord32, g.DefineAsRegister(node),
2211 g.UseRegister(node->InputAt(0)));
2212}
2213
2214
2215void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
2216 Arm64OperandGenerator g(this);
2217 Node* left = node->InputAt(0);
2218 Node* right = node->InputAt(1);
2219 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
2220 CanCover(node, left)) {
2221 Node* right_of_left = left->InputAt(1);
2222 Emit(kArm64Bfi, g.DefineSameAsFirst(right), g.UseRegister(right),
2223 g.UseRegister(right_of_left), g.TempImmediate(32),
2224 g.TempImmediate(32));
2225 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(right));
2226 return;
2227 }
2228 Emit(kArm64Float64InsertLowWord32, g.DefineAsRegister(node),
2229 g.UseRegister(left), g.UseRegister(right));
2230}
2231
2232
2233void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2234 Arm64OperandGenerator g(this);
2235 Node* left = node->InputAt(0);
2236 Node* right = node->InputAt(1);
2237 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
2238 CanCover(node, left)) {
2239 Node* right_of_left = left->InputAt(1);
2240 Emit(kArm64Bfi, g.DefineSameAsFirst(left), g.UseRegister(right_of_left),
2241 g.UseRegister(right), g.TempImmediate(32), g.TempImmediate(32));
2242 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(left));
2243 return;
2244 }
2245 Emit(kArm64Float64InsertHighWord32, g.DefineAsRegister(node),
2246 g.UseRegister(left), g.UseRegister(right));
2247}
2248
2249
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002250// static
2251MachineOperatorBuilder::Flags
2252InstructionSelector::SupportedMachineOperatorFlags() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002253 return MachineOperatorBuilder::kFloat32Max |
2254 MachineOperatorBuilder::kFloat32Min |
2255 MachineOperatorBuilder::kFloat32RoundDown |
2256 MachineOperatorBuilder::kFloat64Max |
2257 MachineOperatorBuilder::kFloat64Min |
2258 MachineOperatorBuilder::kFloat64RoundDown |
2259 MachineOperatorBuilder::kFloat32RoundUp |
2260 MachineOperatorBuilder::kFloat64RoundUp |
2261 MachineOperatorBuilder::kFloat32RoundTruncate |
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002262 MachineOperatorBuilder::kFloat64RoundTruncate |
2263 MachineOperatorBuilder::kFloat64RoundTiesAway |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002264 MachineOperatorBuilder::kFloat32RoundTiesEven |
2265 MachineOperatorBuilder::kFloat64RoundTiesEven |
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002266 MachineOperatorBuilder::kWord32ShiftIsSafe |
2267 MachineOperatorBuilder::kInt32DivIsSafe |
Ben Murdoch097c5b22016-05-18 11:27:45 +01002268 MachineOperatorBuilder::kUint32DivIsSafe |
2269 MachineOperatorBuilder::kWord32ReverseBits |
2270 MachineOperatorBuilder::kWord64ReverseBits;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002271}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002272
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002273} // namespace compiler
2274} // namespace internal
2275} // namespace v8