blob: 283f1f5e571a5d4777cdfe40e049094298a4eeee [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "mirror/array.h"
21#include "mirror/art_method.h"
22#include "mirror/object_reference.h"
23#include "thread.h"
24#include "utils/assembler.h"
25#include "utils/x86_64/assembler_x86_64.h"
26#include "utils/x86_64/managed_register_x86_64.h"
27
28#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
29
30namespace art {
31
32x86_64::X86_64ManagedRegister Location::AsX86_64() const {
33 return reg().AsX86_64();
34}
35
36namespace x86_64 {
37
Dave Allison20dfc792014-06-16 20:44:29 -070038inline Condition X86_64Condition(IfCondition cond) {
39 switch (cond) {
40 case kCondEQ: return kEqual;
41 case kCondNE: return kNotEqual;
42 case kCondLT: return kLess;
43 case kCondLE: return kLessEqual;
44 case kCondGT: return kGreater;
45 case kCondGE: return kGreaterEqual;
46 default:
47 LOG(FATAL) << "Unknown if condition";
48 }
49 return kEqual;
50}
51
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000052// Some x86_64 instructions require a register to be available as temp.
53static constexpr Register TMP = R11;
54
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010055static constexpr int kNumberOfPushedRegistersAtEntry = 1;
56static constexpr int kCurrentMethodStackOffset = 0;
57
58void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
59 stream << X86_64ManagedRegister::FromCpuRegister(Register(reg));
60}
61
62void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
63 stream << X86_64ManagedRegister::FromXmmRegister(FloatRegister(reg));
64}
65
66static Location X86_64CpuLocation(Register reg) {
67 return Location::RegisterLocation(X86_64ManagedRegister::FromCpuRegister(reg));
68}
69
70CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph)
71 : CodeGenerator(graph, kNumberOfRegIds),
72 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000073 instruction_visitor_(graph, this),
74 move_resolver_(graph->GetArena(), this) {}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010075
76InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen)
77 : HGraphVisitor(graph),
78 assembler_(codegen->GetAssembler()),
79 codegen_(codegen) {}
80
81ManagedRegister CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type,
82 bool* blocked_registers) const {
83 switch (type) {
84 case Primitive::kPrimLong:
85 case Primitive::kPrimByte:
86 case Primitive::kPrimBoolean:
87 case Primitive::kPrimChar:
88 case Primitive::kPrimShort:
89 case Primitive::kPrimInt:
90 case Primitive::kPrimNot: {
91 size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters);
92 return X86_64ManagedRegister::FromCpuRegister(static_cast<Register>(reg));
93 }
94
95 case Primitive::kPrimFloat:
96 case Primitive::kPrimDouble:
97 LOG(FATAL) << "Unimplemented register type " << type;
98
99 case Primitive::kPrimVoid:
100 LOG(FATAL) << "Unreachable type " << type;
101 }
102
103 return ManagedRegister::NoRegister();
104}
105
106void CodeGeneratorX86_64::SetupBlockedRegisters(bool* blocked_registers) const {
107 // Stack register is always reserved.
108 blocked_registers[RSP] = true;
109
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000110 // Block the register used as TMP.
111 blocked_registers[TMP] = true;
112
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100113 // TODO: We currently don't use Quick's callee saved registers.
114 blocked_registers[RBX] = true;
115 blocked_registers[RBP] = true;
116 blocked_registers[R12] = true;
117 blocked_registers[R13] = true;
118 blocked_registers[R14] = true;
119 blocked_registers[R15] = true;
120}
121
122void CodeGeneratorX86_64::ComputeFrameSize(size_t number_of_spill_slots) {
123 // Add the current ART method to the frame size, the return PC, and the filler.
124 SetFrameSize(RoundUp(
125 number_of_spill_slots * kVRegSize
126 + kVRegSize // filler
127 + kVRegSize // Art method
128 + kNumberOfPushedRegistersAtEntry * kX86_64WordSize,
129 kStackAlignment));
130}
131
132void CodeGeneratorX86_64::GenerateFrameEntry() {
133 // Create a fake register to mimic Quick.
134 static const int kFakeReturnRegister = 16;
135 core_spill_mask_ |= (1 << kFakeReturnRegister);
136
137 // The return PC has already been pushed on the stack.
138 __ subq(CpuRegister(RSP), Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
139 __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
140}
141
142void CodeGeneratorX86_64::GenerateFrameExit() {
143 __ addq(CpuRegister(RSP),
144 Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
145}
146
147void CodeGeneratorX86_64::Bind(Label* label) {
148 __ Bind(label);
149}
150
151void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) {
152 __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
153}
154
155int32_t CodeGeneratorX86_64::GetStackSlot(HLocal* local) const {
156 uint16_t reg_number = local->GetRegNumber();
157 uint16_t number_of_vregs = GetGraph()->GetNumberOfVRegs();
158 uint16_t number_of_in_vregs = GetGraph()->GetNumberOfInVRegs();
159 if (reg_number >= number_of_vregs - number_of_in_vregs) {
160 // Local is a parameter of the method. It is stored in the caller's frame.
161 return GetFrameSize() + kVRegSize // ART method
162 + (reg_number - number_of_vregs + number_of_in_vregs) * kVRegSize;
163 } else {
164 // Local is a temporary in this method. It is stored in this method's frame.
165 return GetFrameSize() - (kNumberOfPushedRegistersAtEntry * kX86_64WordSize)
166 - kVRegSize
167 - (number_of_vregs * kVRegSize)
168 + (reg_number * kVRegSize);
169 }
170}
171
172Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
173 switch (load->GetType()) {
174 case Primitive::kPrimLong:
175 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
176 break;
177
178 case Primitive::kPrimInt:
179 case Primitive::kPrimNot:
180 return Location::StackSlot(GetStackSlot(load->GetLocal()));
181
182 case Primitive::kPrimFloat:
183 case Primitive::kPrimDouble:
184 LOG(FATAL) << "Unimplemented type " << load->GetType();
185
186 case Primitive::kPrimBoolean:
187 case Primitive::kPrimByte:
188 case Primitive::kPrimChar:
189 case Primitive::kPrimShort:
190 case Primitive::kPrimVoid:
191 LOG(FATAL) << "Unexpected type " << load->GetType();
192 }
193
194 LOG(FATAL) << "Unreachable";
195 return Location();
196}
197
198void CodeGeneratorX86_64::Move(Location destination, Location source) {
199 if (source.Equals(destination)) {
200 return;
201 }
202 if (destination.IsRegister()) {
203 if (source.IsRegister()) {
204 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
205 } else if (source.IsStackSlot()) {
206 __ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
207 } else {
208 DCHECK(source.IsDoubleStackSlot());
209 __ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
210 }
211 } else if (destination.IsStackSlot()) {
212 if (source.IsRegister()) {
213 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
214 } else {
215 DCHECK(source.IsStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000216 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
217 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100218 }
219 } else {
220 DCHECK(destination.IsDoubleStackSlot());
221 if (source.IsRegister()) {
222 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
223 } else {
224 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000225 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
226 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100227 }
228 }
229}
230
231void CodeGeneratorX86_64::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
232 if (instruction->AsIntConstant() != nullptr) {
233 Immediate imm(instruction->AsIntConstant()->GetValue());
234 if (location.IsRegister()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000235 __ movl(location.AsX86_64().AsCpuRegister(), imm);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100236 } else {
237 __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
238 }
239 } else if (instruction->AsLongConstant() != nullptr) {
240 int64_t value = instruction->AsLongConstant()->GetValue();
241 if (location.IsRegister()) {
242 __ movq(location.AsX86_64().AsCpuRegister(), Immediate(value));
243 } else {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000244 __ movq(CpuRegister(TMP), Immediate(value));
245 __ movq(Address(CpuRegister(RSP), location.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100246 }
247 } else if (instruction->AsLoadLocal() != nullptr) {
248 switch (instruction->GetType()) {
249 case Primitive::kPrimBoolean:
250 case Primitive::kPrimByte:
251 case Primitive::kPrimChar:
252 case Primitive::kPrimShort:
253 case Primitive::kPrimInt:
254 case Primitive::kPrimNot:
255 Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
256 break;
257
258 case Primitive::kPrimLong:
259 Move(location, Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
260 break;
261
262 default:
263 LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
264 }
265 } else {
266 // This can currently only happen when the instruction that requests the move
267 // is the next to be compiled.
268 DCHECK_EQ(instruction->GetNext(), move_for);
269 switch (instruction->GetType()) {
270 case Primitive::kPrimBoolean:
271 case Primitive::kPrimByte:
272 case Primitive::kPrimChar:
273 case Primitive::kPrimShort:
274 case Primitive::kPrimInt:
275 case Primitive::kPrimNot:
276 case Primitive::kPrimLong:
277 Move(location, instruction->GetLocations()->Out());
278 break;
279
280 default:
281 LOG(FATAL) << "Unimplemented type " << instruction->GetType();
282 }
283 }
284}
285
286void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
287 got->SetLocations(nullptr);
288}
289
290void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
291 HBasicBlock* successor = got->GetSuccessor();
292 if (GetGraph()->GetExitBlock() == successor) {
293 codegen_->GenerateFrameExit();
294 } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
295 __ jmp(codegen_->GetLabelOf(successor));
296 }
297}
298
299void LocationsBuilderX86_64::VisitExit(HExit* exit) {
300 exit->SetLocations(nullptr);
301}
302
303void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
304 if (kIsDebugBuild) {
305 __ Comment("Unreachable");
306 __ int3();
307 }
308}
309
310void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
311 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
Dave Allison20dfc792014-06-16 20:44:29 -0700312 locations->SetInAt(0, Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100313 if_instr->SetLocations(locations);
314}
315
316void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
Dave Allison20dfc792014-06-16 20:44:29 -0700317 HInstruction* cond = if_instr->InputAt(0);
318 DCHECK(cond->IsCondition());
319 HCondition* condition = cond->AsCondition();
320 if (condition->NeedsMaterialization()) {
321 // Materialized condition, compare against 0.
322 Location lhs = if_instr->GetLocations()->InAt(0);
323 if (lhs.IsRegister()) {
324 __ cmpl(lhs.AsX86_64().AsCpuRegister(), Immediate(0));
325 } else {
326 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
327 }
328 __ j(kEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
329 } else {
330 Location lhs = condition->GetLocations()->InAt(0);
331 Location rhs = condition->GetLocations()->InAt(1);
332 __ cmpl(lhs.AsX86_64().AsCpuRegister(), rhs.AsX86_64().AsCpuRegister());
333 __ j(X86_64Condition(condition->GetCondition()),
334 codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
335 }
336 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
337 __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100338 }
339}
340
341void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
342 local->SetLocations(nullptr);
343}
344
345void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
346 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
347}
348
349void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
350 local->SetLocations(nullptr);
351}
352
353void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
354 // Nothing to do, this is driven by the code generator.
355}
356
357void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
358 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
359 switch (store->InputAt(1)->GetType()) {
360 case Primitive::kPrimBoolean:
361 case Primitive::kPrimByte:
362 case Primitive::kPrimChar:
363 case Primitive::kPrimShort:
364 case Primitive::kPrimInt:
365 case Primitive::kPrimNot:
366 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
367 break;
368
369 case Primitive::kPrimLong:
370 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
371 break;
372
373 default:
374 LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
375 }
376 store->SetLocations(locations);
377}
378
379void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
380}
381
Dave Allison20dfc792014-06-16 20:44:29 -0700382void LocationsBuilderX86_64::VisitCondition(HCondition* comp) {
383 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000384 locations->SetInAt(0, Location::RequiresRegister());
385 locations->SetInAt(1, Location::RequiresRegister());
386 locations->SetOut(Location::SameAsFirstInput());
Dave Allison20dfc792014-06-16 20:44:29 -0700387 comp->SetLocations(locations);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100388}
389
Dave Allison20dfc792014-06-16 20:44:29 -0700390void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* comp) {
391 if (comp->NeedsMaterialization()) {
392 __ cmpq(comp->GetLocations()->InAt(0).AsX86_64().AsCpuRegister(),
393 comp->GetLocations()->InAt(1).AsX86_64().AsCpuRegister());
394 __ setcc(X86_64Condition(comp->GetCondition()),
395 comp->GetLocations()->Out().AsX86_64().AsCpuRegister());
396 }
397}
398
399void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
400 VisitCondition(comp);
401}
402
403void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
404 VisitCondition(comp);
405}
406
407void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
408 VisitCondition(comp);
409}
410
411void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
412 VisitCondition(comp);
413}
414
415void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
416 VisitCondition(comp);
417}
418
419void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
420 VisitCondition(comp);
421}
422
423void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
424 VisitCondition(comp);
425}
426
427void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
428 VisitCondition(comp);
429}
430
431void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
432 VisitCondition(comp);
433}
434
435void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
436 VisitCondition(comp);
437}
438
439void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
440 VisitCondition(comp);
441}
442
443void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
444 VisitCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100445}
446
447void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
448 // TODO: Support constant locations.
449 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
450 locations->SetOut(Location::RequiresRegister());
451 constant->SetLocations(locations);
452}
453
454void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000455 codegen_->Move(constant, constant->GetLocations()->Out(), nullptr);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100456}
457
458void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
459 // TODO: Support constant locations.
460 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
461 locations->SetOut(Location::RequiresRegister());
462 constant->SetLocations(locations);
463}
464
465void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
466 // Will be generated at use site.
467}
468
469void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
470 ret->SetLocations(nullptr);
471}
472
473void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
474 codegen_->GenerateFrameExit();
475 __ ret();
476}
477
478void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
479 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
480 switch (ret->InputAt(0)->GetType()) {
481 case Primitive::kPrimBoolean:
482 case Primitive::kPrimByte:
483 case Primitive::kPrimChar:
484 case Primitive::kPrimShort:
485 case Primitive::kPrimInt:
486 case Primitive::kPrimNot:
487 case Primitive::kPrimLong:
488 locations->SetInAt(0, X86_64CpuLocation(RAX));
489 break;
490
491 default:
492 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
493 }
494 ret->SetLocations(locations);
495}
496
497void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
498 if (kIsDebugBuild) {
499 switch (ret->InputAt(0)->GetType()) {
500 case Primitive::kPrimBoolean:
501 case Primitive::kPrimByte:
502 case Primitive::kPrimChar:
503 case Primitive::kPrimShort:
504 case Primitive::kPrimInt:
505 case Primitive::kPrimNot:
506 case Primitive::kPrimLong:
507 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), RAX);
508 break;
509
510 default:
511 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
512 }
513 }
514 codegen_->GenerateFrameExit();
515 __ ret();
516}
517
518static constexpr Register kRuntimeParameterCoreRegisters[] = { RDI, RSI, RDX };
519static constexpr size_t kRuntimeParameterCoreRegistersLength =
520 arraysize(kRuntimeParameterCoreRegisters);
521
522class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
523 public:
524 InvokeRuntimeCallingConvention()
525 : CallingConvention(kRuntimeParameterCoreRegisters,
526 kRuntimeParameterCoreRegistersLength) {}
527
528 private:
529 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
530};
531
532Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
533 switch (type) {
534 case Primitive::kPrimBoolean:
535 case Primitive::kPrimByte:
536 case Primitive::kPrimChar:
537 case Primitive::kPrimShort:
538 case Primitive::kPrimInt:
539 case Primitive::kPrimNot: {
540 uint32_t index = gp_index_++;
541 stack_index_++;
542 if (index < calling_convention.GetNumberOfRegisters()) {
543 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
544 } else {
545 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
546 }
547 }
548
549 case Primitive::kPrimLong: {
550 uint32_t index = gp_index_;
551 stack_index_ += 2;
552 if (index < calling_convention.GetNumberOfRegisters()) {
553 gp_index_ += 1;
554 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
555 } else {
556 gp_index_ += 2;
557 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
558 }
559 }
560
561 case Primitive::kPrimDouble:
562 case Primitive::kPrimFloat:
563 LOG(FATAL) << "Unimplemented parameter type " << type;
564 break;
565
566 case Primitive::kPrimVoid:
567 LOG(FATAL) << "Unexpected parameter type " << type;
568 break;
569 }
570 return Location();
571}
572
573void LocationsBuilderX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
574 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
575 locations->AddTemp(X86_64CpuLocation(RDI));
576
577 InvokeDexCallingConventionVisitor calling_convention_visitor;
578 for (size_t i = 0; i < invoke->InputCount(); ++i) {
579 HInstruction* input = invoke->InputAt(i);
580 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
581 }
582
583 switch (invoke->GetType()) {
584 case Primitive::kPrimBoolean:
585 case Primitive::kPrimByte:
586 case Primitive::kPrimChar:
587 case Primitive::kPrimShort:
588 case Primitive::kPrimInt:
589 case Primitive::kPrimNot:
590 case Primitive::kPrimLong:
591 locations->SetOut(X86_64CpuLocation(RAX));
592 break;
593
594 case Primitive::kPrimVoid:
595 break;
596
597 case Primitive::kPrimDouble:
598 case Primitive::kPrimFloat:
599 LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
600 break;
601 }
602
603 invoke->SetLocations(locations);
604}
605
606void InstructionCodeGeneratorX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
607 CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister();
608 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
609 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).SizeValue() +
610 invoke->GetIndexInDexCache() * heap_reference_size;
611
612 // TODO: Implement all kinds of calls:
613 // 1) boot -> boot
614 // 2) app -> boot
615 // 3) app -> app
616 //
617 // Currently we implement the app -> app logic, which looks up in the resolve cache.
618
619 // temp = method;
620 LoadCurrentMethod(temp);
621 // temp = temp->dex_cache_resolved_methods_;
622 __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
623 // temp = temp[index_in_cache]
624 __ movl(temp, Address(temp, index_in_cache));
625 // (temp + offset_of_quick_compiled_code)()
626 __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
627
628 codegen_->RecordPcInfo(invoke->GetDexPc());
629}
630
631void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
632 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
633 switch (add->GetResultType()) {
634 case Primitive::kPrimInt:
635 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000636 locations->SetInAt(0, Location::RequiresRegister());
637 locations->SetInAt(1, Location::RequiresRegister());
638 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100639 break;
640 }
641
642 case Primitive::kPrimBoolean:
643 case Primitive::kPrimByte:
644 case Primitive::kPrimChar:
645 case Primitive::kPrimShort:
646 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
647 break;
648
649 default:
650 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
651 }
652 add->SetLocations(locations);
653}
654
655void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
656 LocationSummary* locations = add->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000657 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
658 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100659 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000660 case Primitive::kPrimInt: {
661 __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
662 locations->InAt(1).AsX86_64().AsCpuRegister());
663 break;
664 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100665 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100666 __ addq(locations->InAt(0).AsX86_64().AsCpuRegister(),
667 locations->InAt(1).AsX86_64().AsCpuRegister());
668 break;
669 }
670
671 case Primitive::kPrimBoolean:
672 case Primitive::kPrimByte:
673 case Primitive::kPrimChar:
674 case Primitive::kPrimShort:
675 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
676 break;
677
678 default:
679 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
680 }
681}
682
683void LocationsBuilderX86_64::VisitSub(HSub* sub) {
684 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
685 switch (sub->GetResultType()) {
686 case Primitive::kPrimInt:
687 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000688 locations->SetInAt(0, Location::RequiresRegister());
689 locations->SetInAt(1, Location::RequiresRegister());
690 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100691 break;
692 }
693
694 case Primitive::kPrimBoolean:
695 case Primitive::kPrimByte:
696 case Primitive::kPrimChar:
697 case Primitive::kPrimShort:
698 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
699 break;
700
701 default:
702 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
703 }
704 sub->SetLocations(locations);
705}
706
707void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
708 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000709 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
710 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100711 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000712 case Primitive::kPrimInt: {
713 __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
714 locations->InAt(1).AsX86_64().AsCpuRegister());
715 break;
716 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100717 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100718 __ subq(locations->InAt(0).AsX86_64().AsCpuRegister(),
719 locations->InAt(1).AsX86_64().AsCpuRegister());
720 break;
721 }
722
723 case Primitive::kPrimBoolean:
724 case Primitive::kPrimByte:
725 case Primitive::kPrimChar:
726 case Primitive::kPrimShort:
727 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
728 break;
729
730 default:
731 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
732 }
733}
734
735void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
736 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
737 locations->SetOut(X86_64CpuLocation(RAX));
738 instruction->SetLocations(locations);
739}
740
741void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
742 InvokeRuntimeCallingConvention calling_convention;
743 LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(1)));
744 __ movq(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(instruction->GetTypeIndex()));
745
746 __ gs()->call(Address::Absolute(
747 QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocObjectWithAccessCheck), true));
748
749 codegen_->RecordPcInfo(instruction->GetDexPc());
750}
751
752void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
753 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
754 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
755 if (location.IsStackSlot()) {
756 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
757 } else if (location.IsDoubleStackSlot()) {
758 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
759 }
760 locations->SetOut(location);
761 instruction->SetLocations(locations);
762}
763
764void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) {
765 // Nothing to do, the parameter is already at its location.
766}
767
768void LocationsBuilderX86_64::VisitNot(HNot* instruction) {
769 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000770 locations->SetInAt(0, Location::RequiresRegister());
771 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100772 instruction->SetLocations(locations);
773}
774
775void InstructionCodeGeneratorX86_64::VisitNot(HNot* instruction) {
776 LocationSummary* locations = instruction->GetLocations();
777 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
778 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
779 __ xorq(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
780}
781
782void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
783 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
784 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
785 locations->SetInAt(i, Location::Any());
786 }
787 locations->SetOut(Location::Any());
788 instruction->SetLocations(locations);
789}
790
791void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
792 LOG(FATAL) << "Unimplemented";
793}
794
795void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
796 LOG(FATAL) << "Unimplemented";
797}
798
799void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000800 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
801}
802
803X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
804 return codegen_->GetAssembler();
805}
806
807void ParallelMoveResolverX86_64::EmitMove(size_t index) {
808 MoveOperands* move = moves_.Get(index);
809 Location source = move->GetSource();
810 Location destination = move->GetDestination();
811
812 if (source.IsRegister()) {
813 if (destination.IsRegister()) {
814 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
815 } else {
816 DCHECK(destination.IsStackSlot());
817 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
818 source.AsX86_64().AsCpuRegister());
819 }
820 } else if (source.IsStackSlot()) {
821 if (destination.IsRegister()) {
822 __ movl(destination.AsX86_64().AsX86_64().AsCpuRegister(),
823 Address(CpuRegister(RSP), source.GetStackIndex()));
824 } else {
825 DCHECK(destination.IsStackSlot());
826 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
827 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
828 }
829 } else {
830 LOG(FATAL) << "Unimplemented";
831 }
832}
833
834void ParallelMoveResolverX86_64::Exchange(CpuRegister reg, int mem) {
835 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
836 __ movl(Address(CpuRegister(RSP), mem), CpuRegister(reg));
837 __ movl(CpuRegister(reg), CpuRegister(TMP));
838}
839
840void ParallelMoveResolverX86_64::Exchange(int mem1, int mem2) {
841 ScratchRegisterScope ensure_scratch(
842 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
843
844 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
845 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
846 __ movl(CpuRegister(ensure_scratch.GetRegister()),
847 Address(CpuRegister(RSP), mem2 + stack_offset));
848 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
849 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
850 CpuRegister(ensure_scratch.GetRegister()));
851}
852
853void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
854 MoveOperands* move = moves_.Get(index);
855 Location source = move->GetSource();
856 Location destination = move->GetDestination();
857
858 if (source.IsRegister() && destination.IsRegister()) {
859 __ xchgq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
860 } else if (source.IsRegister() && destination.IsStackSlot()) {
861 Exchange(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
862 } else if (source.IsStackSlot() && destination.IsRegister()) {
863 Exchange(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
864 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
865 Exchange(destination.GetStackIndex(), source.GetStackIndex());
866 } else {
867 LOG(FATAL) << "Unimplemented";
868 }
869}
870
871
872void ParallelMoveResolverX86_64::SpillScratch(int reg) {
873 __ pushq(CpuRegister(reg));
874}
875
876
877void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
878 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100879}
880
881} // namespace x86_64
882} // namespace art