blob: 93d74ee1a20d896075acdbf53f407fec16b51aaa [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "mirror/array.h"
21#include "mirror/art_method.h"
22#include "mirror/object_reference.h"
23#include "thread.h"
24#include "utils/assembler.h"
25#include "utils/x86_64/assembler_x86_64.h"
26#include "utils/x86_64/managed_register_x86_64.h"
27
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028namespace art {
29
30x86_64::X86_64ManagedRegister Location::AsX86_64() const {
31 return reg().AsX86_64();
32}
33
34namespace x86_64 {
35
Nicolas Geoffraye5038322014-07-04 09:41:32 +010036#define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())->
37
38class NullCheckSlowPathX86_64 : public SlowPathCode {
39 public:
40 explicit NullCheckSlowPathX86_64(uint32_t dex_pc) : dex_pc_(dex_pc) {}
41
42 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
43 __ Bind(GetEntryLabel());
44 __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowNullPointer), true));
45 codegen->RecordPcInfo(dex_pc_);
46 }
47
48 private:
49 const uint32_t dex_pc_;
50 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
51};
52
53#undef __
54#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
55
Dave Allison20dfc792014-06-16 20:44:29 -070056inline Condition X86_64Condition(IfCondition cond) {
57 switch (cond) {
58 case kCondEQ: return kEqual;
59 case kCondNE: return kNotEqual;
60 case kCondLT: return kLess;
61 case kCondLE: return kLessEqual;
62 case kCondGT: return kGreater;
63 case kCondGE: return kGreaterEqual;
64 default:
65 LOG(FATAL) << "Unknown if condition";
66 }
67 return kEqual;
68}
69
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000070// Some x86_64 instructions require a register to be available as temp.
71static constexpr Register TMP = R11;
72
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010073static constexpr int kNumberOfPushedRegistersAtEntry = 1;
74static constexpr int kCurrentMethodStackOffset = 0;
75
76void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
77 stream << X86_64ManagedRegister::FromCpuRegister(Register(reg));
78}
79
80void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
81 stream << X86_64ManagedRegister::FromXmmRegister(FloatRegister(reg));
82}
83
84static Location X86_64CpuLocation(Register reg) {
85 return Location::RegisterLocation(X86_64ManagedRegister::FromCpuRegister(reg));
86}
87
88CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph)
89 : CodeGenerator(graph, kNumberOfRegIds),
90 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000091 instruction_visitor_(graph, this),
92 move_resolver_(graph->GetArena(), this) {}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010093
94InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen)
95 : HGraphVisitor(graph),
96 assembler_(codegen->GetAssembler()),
97 codegen_(codegen) {}
98
99ManagedRegister CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type,
100 bool* blocked_registers) const {
101 switch (type) {
102 case Primitive::kPrimLong:
103 case Primitive::kPrimByte:
104 case Primitive::kPrimBoolean:
105 case Primitive::kPrimChar:
106 case Primitive::kPrimShort:
107 case Primitive::kPrimInt:
108 case Primitive::kPrimNot: {
109 size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters);
110 return X86_64ManagedRegister::FromCpuRegister(static_cast<Register>(reg));
111 }
112
113 case Primitive::kPrimFloat:
114 case Primitive::kPrimDouble:
115 LOG(FATAL) << "Unimplemented register type " << type;
116
117 case Primitive::kPrimVoid:
118 LOG(FATAL) << "Unreachable type " << type;
119 }
120
121 return ManagedRegister::NoRegister();
122}
123
124void CodeGeneratorX86_64::SetupBlockedRegisters(bool* blocked_registers) const {
125 // Stack register is always reserved.
126 blocked_registers[RSP] = true;
127
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000128 // Block the register used as TMP.
129 blocked_registers[TMP] = true;
130
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100131 // TODO: We currently don't use Quick's callee saved registers.
132 blocked_registers[RBX] = true;
133 blocked_registers[RBP] = true;
134 blocked_registers[R12] = true;
135 blocked_registers[R13] = true;
136 blocked_registers[R14] = true;
137 blocked_registers[R15] = true;
138}
139
140void CodeGeneratorX86_64::ComputeFrameSize(size_t number_of_spill_slots) {
141 // Add the current ART method to the frame size, the return PC, and the filler.
142 SetFrameSize(RoundUp(
143 number_of_spill_slots * kVRegSize
144 + kVRegSize // filler
145 + kVRegSize // Art method
146 + kNumberOfPushedRegistersAtEntry * kX86_64WordSize,
147 kStackAlignment));
148}
149
150void CodeGeneratorX86_64::GenerateFrameEntry() {
151 // Create a fake register to mimic Quick.
152 static const int kFakeReturnRegister = 16;
153 core_spill_mask_ |= (1 << kFakeReturnRegister);
154
155 // The return PC has already been pushed on the stack.
156 __ subq(CpuRegister(RSP), Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
157 __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
158}
159
160void CodeGeneratorX86_64::GenerateFrameExit() {
161 __ addq(CpuRegister(RSP),
162 Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
163}
164
165void CodeGeneratorX86_64::Bind(Label* label) {
166 __ Bind(label);
167}
168
169void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) {
170 __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
171}
172
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100173Location CodeGeneratorX86_64::GetTemporaryLocation(HTemporary* temp) const {
174 uint16_t number_of_vregs = GetGraph()->GetNumberOfVRegs();
175 // Use the temporary region (right below the dex registers).
176 int32_t slot = GetFrameSize() - (kNumberOfPushedRegistersAtEntry * kX86_64WordSize)
177 - kVRegSize // filler
178 - (number_of_vregs * kVRegSize)
179 - ((1 + temp->GetIndex()) * kVRegSize);
180 return Location::StackSlot(slot);
181}
182
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100183int32_t CodeGeneratorX86_64::GetStackSlot(HLocal* local) const {
184 uint16_t reg_number = local->GetRegNumber();
185 uint16_t number_of_vregs = GetGraph()->GetNumberOfVRegs();
186 uint16_t number_of_in_vregs = GetGraph()->GetNumberOfInVRegs();
187 if (reg_number >= number_of_vregs - number_of_in_vregs) {
188 // Local is a parameter of the method. It is stored in the caller's frame.
189 return GetFrameSize() + kVRegSize // ART method
190 + (reg_number - number_of_vregs + number_of_in_vregs) * kVRegSize;
191 } else {
192 // Local is a temporary in this method. It is stored in this method's frame.
193 return GetFrameSize() - (kNumberOfPushedRegistersAtEntry * kX86_64WordSize)
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100194 - kVRegSize // filler
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100195 - (number_of_vregs * kVRegSize)
196 + (reg_number * kVRegSize);
197 }
198}
199
200Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
201 switch (load->GetType()) {
202 case Primitive::kPrimLong:
203 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
204 break;
205
206 case Primitive::kPrimInt:
207 case Primitive::kPrimNot:
208 return Location::StackSlot(GetStackSlot(load->GetLocal()));
209
210 case Primitive::kPrimFloat:
211 case Primitive::kPrimDouble:
212 LOG(FATAL) << "Unimplemented type " << load->GetType();
213
214 case Primitive::kPrimBoolean:
215 case Primitive::kPrimByte:
216 case Primitive::kPrimChar:
217 case Primitive::kPrimShort:
218 case Primitive::kPrimVoid:
219 LOG(FATAL) << "Unexpected type " << load->GetType();
220 }
221
222 LOG(FATAL) << "Unreachable";
223 return Location();
224}
225
226void CodeGeneratorX86_64::Move(Location destination, Location source) {
227 if (source.Equals(destination)) {
228 return;
229 }
230 if (destination.IsRegister()) {
231 if (source.IsRegister()) {
232 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
233 } else if (source.IsStackSlot()) {
234 __ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
235 } else {
236 DCHECK(source.IsDoubleStackSlot());
237 __ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
238 }
239 } else if (destination.IsStackSlot()) {
240 if (source.IsRegister()) {
241 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
242 } else {
243 DCHECK(source.IsStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000244 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
245 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100246 }
247 } else {
248 DCHECK(destination.IsDoubleStackSlot());
249 if (source.IsRegister()) {
250 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
251 } else {
252 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000253 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
254 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100255 }
256 }
257}
258
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100259void CodeGeneratorX86_64::Move(HInstruction* instruction,
260 Location location,
261 HInstruction* move_for) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100262 if (instruction->AsIntConstant() != nullptr) {
263 Immediate imm(instruction->AsIntConstant()->GetValue());
264 if (location.IsRegister()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000265 __ movl(location.AsX86_64().AsCpuRegister(), imm);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100266 } else {
267 __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
268 }
269 } else if (instruction->AsLongConstant() != nullptr) {
270 int64_t value = instruction->AsLongConstant()->GetValue();
271 if (location.IsRegister()) {
272 __ movq(location.AsX86_64().AsCpuRegister(), Immediate(value));
273 } else {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000274 __ movq(CpuRegister(TMP), Immediate(value));
275 __ movq(Address(CpuRegister(RSP), location.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100276 }
277 } else if (instruction->AsLoadLocal() != nullptr) {
278 switch (instruction->GetType()) {
279 case Primitive::kPrimBoolean:
280 case Primitive::kPrimByte:
281 case Primitive::kPrimChar:
282 case Primitive::kPrimShort:
283 case Primitive::kPrimInt:
284 case Primitive::kPrimNot:
285 Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
286 break;
287
288 case Primitive::kPrimLong:
289 Move(location, Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
290 break;
291
292 default:
293 LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
294 }
295 } else {
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100296 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100297 switch (instruction->GetType()) {
298 case Primitive::kPrimBoolean:
299 case Primitive::kPrimByte:
300 case Primitive::kPrimChar:
301 case Primitive::kPrimShort:
302 case Primitive::kPrimInt:
303 case Primitive::kPrimNot:
304 case Primitive::kPrimLong:
305 Move(location, instruction->GetLocations()->Out());
306 break;
307
308 default:
309 LOG(FATAL) << "Unimplemented type " << instruction->GetType();
310 }
311 }
312}
313
314void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
315 got->SetLocations(nullptr);
316}
317
318void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
319 HBasicBlock* successor = got->GetSuccessor();
320 if (GetGraph()->GetExitBlock() == successor) {
321 codegen_->GenerateFrameExit();
322 } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
323 __ jmp(codegen_->GetLabelOf(successor));
324 }
325}
326
327void LocationsBuilderX86_64::VisitExit(HExit* exit) {
328 exit->SetLocations(nullptr);
329}
330
331void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
332 if (kIsDebugBuild) {
333 __ Comment("Unreachable");
334 __ int3();
335 }
336}
337
338void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
339 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100340 HInstruction* cond = if_instr->InputAt(0);
341 DCHECK(cond->IsCondition());
342 HCondition* condition = cond->AsCondition();
343 if (condition->NeedsMaterialization()) {
344 locations->SetInAt(0, Location::Any());
345 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100346 if_instr->SetLocations(locations);
347}
348
349void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
Dave Allison20dfc792014-06-16 20:44:29 -0700350 HInstruction* cond = if_instr->InputAt(0);
351 DCHECK(cond->IsCondition());
352 HCondition* condition = cond->AsCondition();
353 if (condition->NeedsMaterialization()) {
354 // Materialized condition, compare against 0.
355 Location lhs = if_instr->GetLocations()->InAt(0);
356 if (lhs.IsRegister()) {
357 __ cmpl(lhs.AsX86_64().AsCpuRegister(), Immediate(0));
358 } else {
359 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
360 }
361 __ j(kEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
362 } else {
363 Location lhs = condition->GetLocations()->InAt(0);
364 Location rhs = condition->GetLocations()->InAt(1);
365 __ cmpl(lhs.AsX86_64().AsCpuRegister(), rhs.AsX86_64().AsCpuRegister());
366 __ j(X86_64Condition(condition->GetCondition()),
367 codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
368 }
369 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
370 __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100371 }
372}
373
374void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
375 local->SetLocations(nullptr);
376}
377
378void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
379 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
380}
381
382void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
383 local->SetLocations(nullptr);
384}
385
386void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
387 // Nothing to do, this is driven by the code generator.
388}
389
390void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
391 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
392 switch (store->InputAt(1)->GetType()) {
393 case Primitive::kPrimBoolean:
394 case Primitive::kPrimByte:
395 case Primitive::kPrimChar:
396 case Primitive::kPrimShort:
397 case Primitive::kPrimInt:
398 case Primitive::kPrimNot:
399 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
400 break;
401
402 case Primitive::kPrimLong:
403 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
404 break;
405
406 default:
407 LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
408 }
409 store->SetLocations(locations);
410}
411
412void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
413}
414
Dave Allison20dfc792014-06-16 20:44:29 -0700415void LocationsBuilderX86_64::VisitCondition(HCondition* comp) {
416 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000417 locations->SetInAt(0, Location::RequiresRegister());
418 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100419 if (comp->NeedsMaterialization()) {
420 locations->SetOut(Location::RequiresRegister());
421 }
Dave Allison20dfc792014-06-16 20:44:29 -0700422 comp->SetLocations(locations);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100423}
424
Dave Allison20dfc792014-06-16 20:44:29 -0700425void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* comp) {
426 if (comp->NeedsMaterialization()) {
427 __ cmpq(comp->GetLocations()->InAt(0).AsX86_64().AsCpuRegister(),
428 comp->GetLocations()->InAt(1).AsX86_64().AsCpuRegister());
429 __ setcc(X86_64Condition(comp->GetCondition()),
430 comp->GetLocations()->Out().AsX86_64().AsCpuRegister());
431 }
432}
433
434void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
435 VisitCondition(comp);
436}
437
438void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
439 VisitCondition(comp);
440}
441
442void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
443 VisitCondition(comp);
444}
445
446void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
447 VisitCondition(comp);
448}
449
450void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
451 VisitCondition(comp);
452}
453
454void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
455 VisitCondition(comp);
456}
457
458void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
459 VisitCondition(comp);
460}
461
462void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
463 VisitCondition(comp);
464}
465
466void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
467 VisitCondition(comp);
468}
469
470void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
471 VisitCondition(comp);
472}
473
474void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
475 VisitCondition(comp);
476}
477
478void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
479 VisitCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100480}
481
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100482void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
483 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
484 locations->SetInAt(0, Location::RequiresRegister());
485 locations->SetInAt(1, Location::RequiresRegister());
486 locations->SetOut(Location::RequiresRegister());
487 compare->SetLocations(locations);
488}
489
490void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
491 Label greater, done;
492 LocationSummary* locations = compare->GetLocations();
493 switch (compare->InputAt(0)->GetType()) {
494 case Primitive::kPrimLong:
495 __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
496 locations->InAt(1).AsX86_64().AsCpuRegister());
497 break;
498 default:
499 LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
500 }
501
502 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(0));
503 __ j(kEqual, &done);
504 __ j(kGreater, &greater);
505
506 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(-1));
507 __ jmp(&done);
508
509 __ Bind(&greater);
510 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
511
512 __ Bind(&done);
513}
514
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100515void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
516 // TODO: Support constant locations.
517 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
518 locations->SetOut(Location::RequiresRegister());
519 constant->SetLocations(locations);
520}
521
522void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000523 codegen_->Move(constant, constant->GetLocations()->Out(), nullptr);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100524}
525
526void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
527 // TODO: Support constant locations.
528 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
529 locations->SetOut(Location::RequiresRegister());
530 constant->SetLocations(locations);
531}
532
533void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100534 codegen_->Move(constant, constant->GetLocations()->Out(), nullptr);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100535}
536
537void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
538 ret->SetLocations(nullptr);
539}
540
541void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
542 codegen_->GenerateFrameExit();
543 __ ret();
544}
545
546void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
547 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
548 switch (ret->InputAt(0)->GetType()) {
549 case Primitive::kPrimBoolean:
550 case Primitive::kPrimByte:
551 case Primitive::kPrimChar:
552 case Primitive::kPrimShort:
553 case Primitive::kPrimInt:
554 case Primitive::kPrimNot:
555 case Primitive::kPrimLong:
556 locations->SetInAt(0, X86_64CpuLocation(RAX));
557 break;
558
559 default:
560 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
561 }
562 ret->SetLocations(locations);
563}
564
565void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
566 if (kIsDebugBuild) {
567 switch (ret->InputAt(0)->GetType()) {
568 case Primitive::kPrimBoolean:
569 case Primitive::kPrimByte:
570 case Primitive::kPrimChar:
571 case Primitive::kPrimShort:
572 case Primitive::kPrimInt:
573 case Primitive::kPrimNot:
574 case Primitive::kPrimLong:
575 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), RAX);
576 break;
577
578 default:
579 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
580 }
581 }
582 codegen_->GenerateFrameExit();
583 __ ret();
584}
585
586static constexpr Register kRuntimeParameterCoreRegisters[] = { RDI, RSI, RDX };
587static constexpr size_t kRuntimeParameterCoreRegistersLength =
588 arraysize(kRuntimeParameterCoreRegisters);
589
590class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
591 public:
592 InvokeRuntimeCallingConvention()
593 : CallingConvention(kRuntimeParameterCoreRegisters,
594 kRuntimeParameterCoreRegistersLength) {}
595
596 private:
597 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
598};
599
600Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
601 switch (type) {
602 case Primitive::kPrimBoolean:
603 case Primitive::kPrimByte:
604 case Primitive::kPrimChar:
605 case Primitive::kPrimShort:
606 case Primitive::kPrimInt:
607 case Primitive::kPrimNot: {
608 uint32_t index = gp_index_++;
609 stack_index_++;
610 if (index < calling_convention.GetNumberOfRegisters()) {
611 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
612 } else {
613 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
614 }
615 }
616
617 case Primitive::kPrimLong: {
618 uint32_t index = gp_index_;
619 stack_index_ += 2;
620 if (index < calling_convention.GetNumberOfRegisters()) {
621 gp_index_ += 1;
622 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
623 } else {
624 gp_index_ += 2;
625 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
626 }
627 }
628
629 case Primitive::kPrimDouble:
630 case Primitive::kPrimFloat:
631 LOG(FATAL) << "Unimplemented parameter type " << type;
632 break;
633
634 case Primitive::kPrimVoid:
635 LOG(FATAL) << "Unexpected parameter type " << type;
636 break;
637 }
638 return Location();
639}
640
641void LocationsBuilderX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
642 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
643 locations->AddTemp(X86_64CpuLocation(RDI));
644
645 InvokeDexCallingConventionVisitor calling_convention_visitor;
646 for (size_t i = 0; i < invoke->InputCount(); ++i) {
647 HInstruction* input = invoke->InputAt(i);
648 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
649 }
650
651 switch (invoke->GetType()) {
652 case Primitive::kPrimBoolean:
653 case Primitive::kPrimByte:
654 case Primitive::kPrimChar:
655 case Primitive::kPrimShort:
656 case Primitive::kPrimInt:
657 case Primitive::kPrimNot:
658 case Primitive::kPrimLong:
659 locations->SetOut(X86_64CpuLocation(RAX));
660 break;
661
662 case Primitive::kPrimVoid:
663 break;
664
665 case Primitive::kPrimDouble:
666 case Primitive::kPrimFloat:
667 LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
668 break;
669 }
670
671 invoke->SetLocations(locations);
672}
673
674void InstructionCodeGeneratorX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
675 CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister();
676 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
677 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).SizeValue() +
678 invoke->GetIndexInDexCache() * heap_reference_size;
679
680 // TODO: Implement all kinds of calls:
681 // 1) boot -> boot
682 // 2) app -> boot
683 // 3) app -> app
684 //
685 // Currently we implement the app -> app logic, which looks up in the resolve cache.
686
687 // temp = method;
688 LoadCurrentMethod(temp);
689 // temp = temp->dex_cache_resolved_methods_;
690 __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
691 // temp = temp[index_in_cache]
692 __ movl(temp, Address(temp, index_in_cache));
693 // (temp + offset_of_quick_compiled_code)()
694 __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
695
696 codegen_->RecordPcInfo(invoke->GetDexPc());
697}
698
699void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
700 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
701 switch (add->GetResultType()) {
702 case Primitive::kPrimInt:
703 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000704 locations->SetInAt(0, Location::RequiresRegister());
705 locations->SetInAt(1, Location::RequiresRegister());
706 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100707 break;
708 }
709
710 case Primitive::kPrimBoolean:
711 case Primitive::kPrimByte:
712 case Primitive::kPrimChar:
713 case Primitive::kPrimShort:
714 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
715 break;
716
717 default:
718 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
719 }
720 add->SetLocations(locations);
721}
722
723void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
724 LocationSummary* locations = add->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000725 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
726 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100727 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000728 case Primitive::kPrimInt: {
729 __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
730 locations->InAt(1).AsX86_64().AsCpuRegister());
731 break;
732 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100733 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100734 __ addq(locations->InAt(0).AsX86_64().AsCpuRegister(),
735 locations->InAt(1).AsX86_64().AsCpuRegister());
736 break;
737 }
738
739 case Primitive::kPrimBoolean:
740 case Primitive::kPrimByte:
741 case Primitive::kPrimChar:
742 case Primitive::kPrimShort:
743 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
744 break;
745
746 default:
747 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
748 }
749}
750
751void LocationsBuilderX86_64::VisitSub(HSub* sub) {
752 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
753 switch (sub->GetResultType()) {
754 case Primitive::kPrimInt:
755 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000756 locations->SetInAt(0, Location::RequiresRegister());
757 locations->SetInAt(1, Location::RequiresRegister());
758 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100759 break;
760 }
761
762 case Primitive::kPrimBoolean:
763 case Primitive::kPrimByte:
764 case Primitive::kPrimChar:
765 case Primitive::kPrimShort:
766 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
767 break;
768
769 default:
770 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
771 }
772 sub->SetLocations(locations);
773}
774
775void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
776 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000777 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
778 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100779 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000780 case Primitive::kPrimInt: {
781 __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
782 locations->InAt(1).AsX86_64().AsCpuRegister());
783 break;
784 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100785 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100786 __ subq(locations->InAt(0).AsX86_64().AsCpuRegister(),
787 locations->InAt(1).AsX86_64().AsCpuRegister());
788 break;
789 }
790
791 case Primitive::kPrimBoolean:
792 case Primitive::kPrimByte:
793 case Primitive::kPrimChar:
794 case Primitive::kPrimShort:
795 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
796 break;
797
798 default:
799 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
800 }
801}
802
803void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
804 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
805 locations->SetOut(X86_64CpuLocation(RAX));
806 instruction->SetLocations(locations);
807}
808
809void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
810 InvokeRuntimeCallingConvention calling_convention;
811 LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(1)));
812 __ movq(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(instruction->GetTypeIndex()));
813
814 __ gs()->call(Address::Absolute(
815 QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocObjectWithAccessCheck), true));
816
817 codegen_->RecordPcInfo(instruction->GetDexPc());
818}
819
820void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
821 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
822 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
823 if (location.IsStackSlot()) {
824 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
825 } else if (location.IsDoubleStackSlot()) {
826 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
827 }
828 locations->SetOut(location);
829 instruction->SetLocations(locations);
830}
831
832void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) {
833 // Nothing to do, the parameter is already at its location.
834}
835
836void LocationsBuilderX86_64::VisitNot(HNot* instruction) {
837 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000838 locations->SetInAt(0, Location::RequiresRegister());
839 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100840 instruction->SetLocations(locations);
841}
842
843void InstructionCodeGeneratorX86_64::VisitNot(HNot* instruction) {
844 LocationSummary* locations = instruction->GetLocations();
845 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
846 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
847 __ xorq(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
848}
849
850void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
851 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
852 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
853 locations->SetInAt(i, Location::Any());
854 }
855 locations->SetOut(Location::Any());
856 instruction->SetLocations(locations);
857}
858
859void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
860 LOG(FATAL) << "Unimplemented";
861}
862
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100863void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
864 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
865 locations->SetInAt(0, Location::RequiresRegister());
866 locations->SetInAt(1, Location::RequiresRegister());
867 instruction->SetLocations(locations);
868}
869
870void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
871 LocationSummary* locations = instruction->GetLocations();
872 CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
873 CpuRegister value = locations->InAt(1).AsX86_64().AsCpuRegister();
874 size_t offset = instruction->GetFieldOffset().SizeValue();
875 Primitive::Type field_type = instruction->InputAt(1)->GetType();
876
877 switch (field_type) {
878 case Primitive::kPrimBoolean:
879 case Primitive::kPrimByte: {
880 __ movb(Address(obj, offset), value);
881 break;
882 }
883
884 case Primitive::kPrimShort:
885 case Primitive::kPrimChar: {
886 __ movw(Address(obj, offset), value);
887 break;
888 }
889
890 case Primitive::kPrimInt:
891 case Primitive::kPrimNot: {
892 __ movl(Address(obj, offset), value);
893 break;
894 }
895
896 case Primitive::kPrimLong: {
897 __ movq(Address(obj, offset), value);
898 break;
899 }
900
901 case Primitive::kPrimFloat:
902 case Primitive::kPrimDouble:
903 LOG(FATAL) << "Unimplemented register type " << field_type;
904
905 case Primitive::kPrimVoid:
906 LOG(FATAL) << "Unreachable type " << field_type;
907 }
908}
909
910void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
911 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
912 locations->SetInAt(0, Location::RequiresRegister());
913 locations->SetOut(Location::RequiresRegister());
914 instruction->SetLocations(locations);
915}
916
917void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
918 LocationSummary* locations = instruction->GetLocations();
919 CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
920 CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
921 size_t offset = instruction->GetFieldOffset().SizeValue();
922
923 switch (instruction->GetType()) {
924 case Primitive::kPrimBoolean: {
925 __ movzxb(out, Address(obj, offset));
926 break;
927 }
928
929 case Primitive::kPrimByte: {
930 __ movsxb(out, Address(obj, offset));
931 break;
932 }
933
934 case Primitive::kPrimShort: {
935 __ movsxw(out, Address(obj, offset));
936 break;
937 }
938
939 case Primitive::kPrimChar: {
940 __ movzxw(out, Address(obj, offset));
941 break;
942 }
943
944 case Primitive::kPrimInt:
945 case Primitive::kPrimNot: {
946 __ movl(out, Address(obj, offset));
947 break;
948 }
949
950 case Primitive::kPrimLong: {
951 __ movq(out, Address(obj, offset));
952 break;
953 }
954
955 case Primitive::kPrimFloat:
956 case Primitive::kPrimDouble:
957 LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
958
959 case Primitive::kPrimVoid:
960 LOG(FATAL) << "Unreachable type " << instruction->GetType();
961 }
962}
963
964void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
965 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
966 locations->SetInAt(0, Location::Any());
967 // TODO: Have a normalization phase that makes this instruction never used.
968 locations->SetOut(Location::SameAsFirstInput());
969 instruction->SetLocations(locations);
970}
971
972void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
973 SlowPathCode* slow_path =
974 new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction->GetDexPc());
975 codegen_->AddSlowPath(slow_path);
976
977 LocationSummary* locations = instruction->GetLocations();
978 Location obj = locations->InAt(0);
979 DCHECK(obj.Equals(locations->Out()));
980
981 if (obj.IsRegister()) {
982 __ cmpl(obj.AsX86_64().AsCpuRegister(), Immediate(0));
983 } else {
984 DCHECK(locations->InAt(0).IsStackSlot());
985 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
986 }
987 __ j(kEqual, slow_path->GetEntryLabel());
988}
989
990void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) {
991 temp->SetLocations(nullptr);
992}
993
994void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp) {
995 // Nothing to do, this is driven by the code generator.
996}
997
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100998void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
999 LOG(FATAL) << "Unimplemented";
1000}
1001
1002void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001003 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1004}
1005
1006X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
1007 return codegen_->GetAssembler();
1008}
1009
1010void ParallelMoveResolverX86_64::EmitMove(size_t index) {
1011 MoveOperands* move = moves_.Get(index);
1012 Location source = move->GetSource();
1013 Location destination = move->GetDestination();
1014
1015 if (source.IsRegister()) {
1016 if (destination.IsRegister()) {
1017 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001018 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001019 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
1020 source.AsX86_64().AsCpuRegister());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001021 } else {
1022 DCHECK(destination.IsDoubleStackSlot());
1023 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
1024 source.AsX86_64().AsCpuRegister());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001025 }
1026 } else if (source.IsStackSlot()) {
1027 if (destination.IsRegister()) {
1028 __ movl(destination.AsX86_64().AsX86_64().AsCpuRegister(),
1029 Address(CpuRegister(RSP), source.GetStackIndex()));
1030 } else {
1031 DCHECK(destination.IsStackSlot());
1032 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1033 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1034 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001035 } else if (source.IsDoubleStackSlot()) {
1036 if (destination.IsRegister()) {
1037 __ movq(destination.AsX86_64().AsX86_64().AsCpuRegister(),
1038 Address(CpuRegister(RSP), source.GetStackIndex()));
1039 } else {
1040 DCHECK(destination.IsDoubleStackSlot());
1041 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1042 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1043 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001044 } else {
1045 LOG(FATAL) << "Unimplemented";
1046 }
1047}
1048
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001049void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001050 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001051 __ movl(Address(CpuRegister(RSP), mem), reg);
1052 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001053}
1054
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001055void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001056 ScratchRegisterScope ensure_scratch(
1057 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
1058
1059 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
1060 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
1061 __ movl(CpuRegister(ensure_scratch.GetRegister()),
1062 Address(CpuRegister(RSP), mem2 + stack_offset));
1063 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
1064 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
1065 CpuRegister(ensure_scratch.GetRegister()));
1066}
1067
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001068void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
1069 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
1070 __ movq(Address(CpuRegister(RSP), mem), reg);
1071 __ movq(reg, CpuRegister(TMP));
1072}
1073
1074void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
1075 ScratchRegisterScope ensure_scratch(
1076 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
1077
1078 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
1079 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
1080 __ movq(CpuRegister(ensure_scratch.GetRegister()),
1081 Address(CpuRegister(RSP), mem2 + stack_offset));
1082 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
1083 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
1084 CpuRegister(ensure_scratch.GetRegister()));
1085}
1086
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001087void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
1088 MoveOperands* move = moves_.Get(index);
1089 Location source = move->GetSource();
1090 Location destination = move->GetDestination();
1091
1092 if (source.IsRegister() && destination.IsRegister()) {
1093 __ xchgq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
1094 } else if (source.IsRegister() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001095 Exchange32(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001096 } else if (source.IsStackSlot() && destination.IsRegister()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001097 Exchange32(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001098 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001099 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
1100 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
1101 Exchange64(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
1102 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
1103 Exchange64(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
1104 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
1105 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001106 } else {
1107 LOG(FATAL) << "Unimplemented";
1108 }
1109}
1110
1111
1112void ParallelMoveResolverX86_64::SpillScratch(int reg) {
1113 __ pushq(CpuRegister(reg));
1114}
1115
1116
1117void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
1118 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001119}
1120
1121} // namespace x86_64
1122} // namespace art