blob: dc1d6164b1a3fe2e68f2226edca79914bbb8987c [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "mirror/array.h"
21#include "mirror/art_method.h"
22#include "mirror/object_reference.h"
23#include "thread.h"
24#include "utils/assembler.h"
25#include "utils/x86_64/assembler_x86_64.h"
26#include "utils/x86_64/managed_register_x86_64.h"
27
28#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
29
30namespace art {
31
32x86_64::X86_64ManagedRegister Location::AsX86_64() const {
33 return reg().AsX86_64();
34}
35
36namespace x86_64 {
37
Dave Allison20dfc792014-06-16 20:44:29 -070038inline Condition X86_64Condition(IfCondition cond) {
39 switch (cond) {
40 case kCondEQ: return kEqual;
41 case kCondNE: return kNotEqual;
42 case kCondLT: return kLess;
43 case kCondLE: return kLessEqual;
44 case kCondGT: return kGreater;
45 case kCondGE: return kGreaterEqual;
46 default:
47 LOG(FATAL) << "Unknown if condition";
48 }
49 return kEqual;
50}
51
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000052// Some x86_64 instructions require a register to be available as temp.
53static constexpr Register TMP = R11;
54
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010055static constexpr int kNumberOfPushedRegistersAtEntry = 1;
56static constexpr int kCurrentMethodStackOffset = 0;
57
58void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
59 stream << X86_64ManagedRegister::FromCpuRegister(Register(reg));
60}
61
62void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
63 stream << X86_64ManagedRegister::FromXmmRegister(FloatRegister(reg));
64}
65
66static Location X86_64CpuLocation(Register reg) {
67 return Location::RegisterLocation(X86_64ManagedRegister::FromCpuRegister(reg));
68}
69
70CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph)
71 : CodeGenerator(graph, kNumberOfRegIds),
72 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000073 instruction_visitor_(graph, this),
74 move_resolver_(graph->GetArena(), this) {}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010075
76InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen)
77 : HGraphVisitor(graph),
78 assembler_(codegen->GetAssembler()),
79 codegen_(codegen) {}
80
81ManagedRegister CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type,
82 bool* blocked_registers) const {
83 switch (type) {
84 case Primitive::kPrimLong:
85 case Primitive::kPrimByte:
86 case Primitive::kPrimBoolean:
87 case Primitive::kPrimChar:
88 case Primitive::kPrimShort:
89 case Primitive::kPrimInt:
90 case Primitive::kPrimNot: {
91 size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters);
92 return X86_64ManagedRegister::FromCpuRegister(static_cast<Register>(reg));
93 }
94
95 case Primitive::kPrimFloat:
96 case Primitive::kPrimDouble:
97 LOG(FATAL) << "Unimplemented register type " << type;
98
99 case Primitive::kPrimVoid:
100 LOG(FATAL) << "Unreachable type " << type;
101 }
102
103 return ManagedRegister::NoRegister();
104}
105
106void CodeGeneratorX86_64::SetupBlockedRegisters(bool* blocked_registers) const {
107 // Stack register is always reserved.
108 blocked_registers[RSP] = true;
109
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000110 // Block the register used as TMP.
111 blocked_registers[TMP] = true;
112
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100113 // TODO: We currently don't use Quick's callee saved registers.
114 blocked_registers[RBX] = true;
115 blocked_registers[RBP] = true;
116 blocked_registers[R12] = true;
117 blocked_registers[R13] = true;
118 blocked_registers[R14] = true;
119 blocked_registers[R15] = true;
120}
121
122void CodeGeneratorX86_64::ComputeFrameSize(size_t number_of_spill_slots) {
123 // Add the current ART method to the frame size, the return PC, and the filler.
124 SetFrameSize(RoundUp(
125 number_of_spill_slots * kVRegSize
126 + kVRegSize // filler
127 + kVRegSize // Art method
128 + kNumberOfPushedRegistersAtEntry * kX86_64WordSize,
129 kStackAlignment));
130}
131
132void CodeGeneratorX86_64::GenerateFrameEntry() {
133 // Create a fake register to mimic Quick.
134 static const int kFakeReturnRegister = 16;
135 core_spill_mask_ |= (1 << kFakeReturnRegister);
136
137 // The return PC has already been pushed on the stack.
138 __ subq(CpuRegister(RSP), Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
139 __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
140}
141
142void CodeGeneratorX86_64::GenerateFrameExit() {
143 __ addq(CpuRegister(RSP),
144 Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
145}
146
147void CodeGeneratorX86_64::Bind(Label* label) {
148 __ Bind(label);
149}
150
151void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) {
152 __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
153}
154
155int32_t CodeGeneratorX86_64::GetStackSlot(HLocal* local) const {
156 uint16_t reg_number = local->GetRegNumber();
157 uint16_t number_of_vregs = GetGraph()->GetNumberOfVRegs();
158 uint16_t number_of_in_vregs = GetGraph()->GetNumberOfInVRegs();
159 if (reg_number >= number_of_vregs - number_of_in_vregs) {
160 // Local is a parameter of the method. It is stored in the caller's frame.
161 return GetFrameSize() + kVRegSize // ART method
162 + (reg_number - number_of_vregs + number_of_in_vregs) * kVRegSize;
163 } else {
164 // Local is a temporary in this method. It is stored in this method's frame.
165 return GetFrameSize() - (kNumberOfPushedRegistersAtEntry * kX86_64WordSize)
166 - kVRegSize
167 - (number_of_vregs * kVRegSize)
168 + (reg_number * kVRegSize);
169 }
170}
171
172Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
173 switch (load->GetType()) {
174 case Primitive::kPrimLong:
175 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
176 break;
177
178 case Primitive::kPrimInt:
179 case Primitive::kPrimNot:
180 return Location::StackSlot(GetStackSlot(load->GetLocal()));
181
182 case Primitive::kPrimFloat:
183 case Primitive::kPrimDouble:
184 LOG(FATAL) << "Unimplemented type " << load->GetType();
185
186 case Primitive::kPrimBoolean:
187 case Primitive::kPrimByte:
188 case Primitive::kPrimChar:
189 case Primitive::kPrimShort:
190 case Primitive::kPrimVoid:
191 LOG(FATAL) << "Unexpected type " << load->GetType();
192 }
193
194 LOG(FATAL) << "Unreachable";
195 return Location();
196}
197
198void CodeGeneratorX86_64::Move(Location destination, Location source) {
199 if (source.Equals(destination)) {
200 return;
201 }
202 if (destination.IsRegister()) {
203 if (source.IsRegister()) {
204 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
205 } else if (source.IsStackSlot()) {
206 __ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
207 } else {
208 DCHECK(source.IsDoubleStackSlot());
209 __ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
210 }
211 } else if (destination.IsStackSlot()) {
212 if (source.IsRegister()) {
213 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
214 } else {
215 DCHECK(source.IsStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000216 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
217 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100218 }
219 } else {
220 DCHECK(destination.IsDoubleStackSlot());
221 if (source.IsRegister()) {
222 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
223 } else {
224 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000225 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
226 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100227 }
228 }
229}
230
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100231void CodeGeneratorX86_64::Move(HInstruction* instruction,
232 Location location,
233 HInstruction* move_for) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100234 if (instruction->AsIntConstant() != nullptr) {
235 Immediate imm(instruction->AsIntConstant()->GetValue());
236 if (location.IsRegister()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000237 __ movl(location.AsX86_64().AsCpuRegister(), imm);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100238 } else {
239 __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
240 }
241 } else if (instruction->AsLongConstant() != nullptr) {
242 int64_t value = instruction->AsLongConstant()->GetValue();
243 if (location.IsRegister()) {
244 __ movq(location.AsX86_64().AsCpuRegister(), Immediate(value));
245 } else {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000246 __ movq(CpuRegister(TMP), Immediate(value));
247 __ movq(Address(CpuRegister(RSP), location.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100248 }
249 } else if (instruction->AsLoadLocal() != nullptr) {
250 switch (instruction->GetType()) {
251 case Primitive::kPrimBoolean:
252 case Primitive::kPrimByte:
253 case Primitive::kPrimChar:
254 case Primitive::kPrimShort:
255 case Primitive::kPrimInt:
256 case Primitive::kPrimNot:
257 Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
258 break;
259
260 case Primitive::kPrimLong:
261 Move(location, Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
262 break;
263
264 default:
265 LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
266 }
267 } else {
268 // This can currently only happen when the instruction that requests the move
269 // is the next to be compiled.
270 DCHECK_EQ(instruction->GetNext(), move_for);
271 switch (instruction->GetType()) {
272 case Primitive::kPrimBoolean:
273 case Primitive::kPrimByte:
274 case Primitive::kPrimChar:
275 case Primitive::kPrimShort:
276 case Primitive::kPrimInt:
277 case Primitive::kPrimNot:
278 case Primitive::kPrimLong:
279 Move(location, instruction->GetLocations()->Out());
280 break;
281
282 default:
283 LOG(FATAL) << "Unimplemented type " << instruction->GetType();
284 }
285 }
286}
287
288void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
289 got->SetLocations(nullptr);
290}
291
292void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
293 HBasicBlock* successor = got->GetSuccessor();
294 if (GetGraph()->GetExitBlock() == successor) {
295 codegen_->GenerateFrameExit();
296 } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
297 __ jmp(codegen_->GetLabelOf(successor));
298 }
299}
300
301void LocationsBuilderX86_64::VisitExit(HExit* exit) {
302 exit->SetLocations(nullptr);
303}
304
305void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
306 if (kIsDebugBuild) {
307 __ Comment("Unreachable");
308 __ int3();
309 }
310}
311
312void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
313 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
Dave Allison20dfc792014-06-16 20:44:29 -0700314 locations->SetInAt(0, Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100315 if_instr->SetLocations(locations);
316}
317
318void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
Dave Allison20dfc792014-06-16 20:44:29 -0700319 HInstruction* cond = if_instr->InputAt(0);
320 DCHECK(cond->IsCondition());
321 HCondition* condition = cond->AsCondition();
322 if (condition->NeedsMaterialization()) {
323 // Materialized condition, compare against 0.
324 Location lhs = if_instr->GetLocations()->InAt(0);
325 if (lhs.IsRegister()) {
326 __ cmpl(lhs.AsX86_64().AsCpuRegister(), Immediate(0));
327 } else {
328 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
329 }
330 __ j(kEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
331 } else {
332 Location lhs = condition->GetLocations()->InAt(0);
333 Location rhs = condition->GetLocations()->InAt(1);
334 __ cmpl(lhs.AsX86_64().AsCpuRegister(), rhs.AsX86_64().AsCpuRegister());
335 __ j(X86_64Condition(condition->GetCondition()),
336 codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
337 }
338 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
339 __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100340 }
341}
342
343void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
344 local->SetLocations(nullptr);
345}
346
347void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
348 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
349}
350
351void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
352 local->SetLocations(nullptr);
353}
354
355void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
356 // Nothing to do, this is driven by the code generator.
357}
358
359void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
360 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
361 switch (store->InputAt(1)->GetType()) {
362 case Primitive::kPrimBoolean:
363 case Primitive::kPrimByte:
364 case Primitive::kPrimChar:
365 case Primitive::kPrimShort:
366 case Primitive::kPrimInt:
367 case Primitive::kPrimNot:
368 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
369 break;
370
371 case Primitive::kPrimLong:
372 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
373 break;
374
375 default:
376 LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
377 }
378 store->SetLocations(locations);
379}
380
381void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
382}
383
Dave Allison20dfc792014-06-16 20:44:29 -0700384void LocationsBuilderX86_64::VisitCondition(HCondition* comp) {
385 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000386 locations->SetInAt(0, Location::RequiresRegister());
387 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100388 locations->SetOut(Location::RequiresRegister());
Dave Allison20dfc792014-06-16 20:44:29 -0700389 comp->SetLocations(locations);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100390}
391
Dave Allison20dfc792014-06-16 20:44:29 -0700392void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* comp) {
393 if (comp->NeedsMaterialization()) {
394 __ cmpq(comp->GetLocations()->InAt(0).AsX86_64().AsCpuRegister(),
395 comp->GetLocations()->InAt(1).AsX86_64().AsCpuRegister());
396 __ setcc(X86_64Condition(comp->GetCondition()),
397 comp->GetLocations()->Out().AsX86_64().AsCpuRegister());
398 }
399}
400
401void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
402 VisitCondition(comp);
403}
404
405void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
406 VisitCondition(comp);
407}
408
409void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
410 VisitCondition(comp);
411}
412
413void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
414 VisitCondition(comp);
415}
416
417void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
418 VisitCondition(comp);
419}
420
421void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
422 VisitCondition(comp);
423}
424
425void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
426 VisitCondition(comp);
427}
428
429void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
430 VisitCondition(comp);
431}
432
433void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
434 VisitCondition(comp);
435}
436
437void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
438 VisitCondition(comp);
439}
440
441void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
442 VisitCondition(comp);
443}
444
445void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
446 VisitCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100447}
448
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100449void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
450 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
451 locations->SetInAt(0, Location::RequiresRegister());
452 locations->SetInAt(1, Location::RequiresRegister());
453 locations->SetOut(Location::RequiresRegister());
454 compare->SetLocations(locations);
455}
456
457void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
458 Label greater, done;
459 LocationSummary* locations = compare->GetLocations();
460 switch (compare->InputAt(0)->GetType()) {
461 case Primitive::kPrimLong:
462 __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
463 locations->InAt(1).AsX86_64().AsCpuRegister());
464 break;
465 default:
466 LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
467 }
468
469 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(0));
470 __ j(kEqual, &done);
471 __ j(kGreater, &greater);
472
473 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(-1));
474 __ jmp(&done);
475
476 __ Bind(&greater);
477 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
478
479 __ Bind(&done);
480}
481
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100482void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
483 // TODO: Support constant locations.
484 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
485 locations->SetOut(Location::RequiresRegister());
486 constant->SetLocations(locations);
487}
488
489void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000490 codegen_->Move(constant, constant->GetLocations()->Out(), nullptr);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100491}
492
493void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
494 // TODO: Support constant locations.
495 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
496 locations->SetOut(Location::RequiresRegister());
497 constant->SetLocations(locations);
498}
499
500void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100501 codegen_->Move(constant, constant->GetLocations()->Out(), nullptr);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100502}
503
504void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
505 ret->SetLocations(nullptr);
506}
507
508void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
509 codegen_->GenerateFrameExit();
510 __ ret();
511}
512
513void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
514 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
515 switch (ret->InputAt(0)->GetType()) {
516 case Primitive::kPrimBoolean:
517 case Primitive::kPrimByte:
518 case Primitive::kPrimChar:
519 case Primitive::kPrimShort:
520 case Primitive::kPrimInt:
521 case Primitive::kPrimNot:
522 case Primitive::kPrimLong:
523 locations->SetInAt(0, X86_64CpuLocation(RAX));
524 break;
525
526 default:
527 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
528 }
529 ret->SetLocations(locations);
530}
531
532void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
533 if (kIsDebugBuild) {
534 switch (ret->InputAt(0)->GetType()) {
535 case Primitive::kPrimBoolean:
536 case Primitive::kPrimByte:
537 case Primitive::kPrimChar:
538 case Primitive::kPrimShort:
539 case Primitive::kPrimInt:
540 case Primitive::kPrimNot:
541 case Primitive::kPrimLong:
542 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), RAX);
543 break;
544
545 default:
546 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
547 }
548 }
549 codegen_->GenerateFrameExit();
550 __ ret();
551}
552
553static constexpr Register kRuntimeParameterCoreRegisters[] = { RDI, RSI, RDX };
554static constexpr size_t kRuntimeParameterCoreRegistersLength =
555 arraysize(kRuntimeParameterCoreRegisters);
556
557class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
558 public:
559 InvokeRuntimeCallingConvention()
560 : CallingConvention(kRuntimeParameterCoreRegisters,
561 kRuntimeParameterCoreRegistersLength) {}
562
563 private:
564 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
565};
566
567Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
568 switch (type) {
569 case Primitive::kPrimBoolean:
570 case Primitive::kPrimByte:
571 case Primitive::kPrimChar:
572 case Primitive::kPrimShort:
573 case Primitive::kPrimInt:
574 case Primitive::kPrimNot: {
575 uint32_t index = gp_index_++;
576 stack_index_++;
577 if (index < calling_convention.GetNumberOfRegisters()) {
578 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
579 } else {
580 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
581 }
582 }
583
584 case Primitive::kPrimLong: {
585 uint32_t index = gp_index_;
586 stack_index_ += 2;
587 if (index < calling_convention.GetNumberOfRegisters()) {
588 gp_index_ += 1;
589 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
590 } else {
591 gp_index_ += 2;
592 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
593 }
594 }
595
596 case Primitive::kPrimDouble:
597 case Primitive::kPrimFloat:
598 LOG(FATAL) << "Unimplemented parameter type " << type;
599 break;
600
601 case Primitive::kPrimVoid:
602 LOG(FATAL) << "Unexpected parameter type " << type;
603 break;
604 }
605 return Location();
606}
607
608void LocationsBuilderX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
609 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
610 locations->AddTemp(X86_64CpuLocation(RDI));
611
612 InvokeDexCallingConventionVisitor calling_convention_visitor;
613 for (size_t i = 0; i < invoke->InputCount(); ++i) {
614 HInstruction* input = invoke->InputAt(i);
615 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
616 }
617
618 switch (invoke->GetType()) {
619 case Primitive::kPrimBoolean:
620 case Primitive::kPrimByte:
621 case Primitive::kPrimChar:
622 case Primitive::kPrimShort:
623 case Primitive::kPrimInt:
624 case Primitive::kPrimNot:
625 case Primitive::kPrimLong:
626 locations->SetOut(X86_64CpuLocation(RAX));
627 break;
628
629 case Primitive::kPrimVoid:
630 break;
631
632 case Primitive::kPrimDouble:
633 case Primitive::kPrimFloat:
634 LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
635 break;
636 }
637
638 invoke->SetLocations(locations);
639}
640
641void InstructionCodeGeneratorX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
642 CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister();
643 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
644 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).SizeValue() +
645 invoke->GetIndexInDexCache() * heap_reference_size;
646
647 // TODO: Implement all kinds of calls:
648 // 1) boot -> boot
649 // 2) app -> boot
650 // 3) app -> app
651 //
652 // Currently we implement the app -> app logic, which looks up in the resolve cache.
653
654 // temp = method;
655 LoadCurrentMethod(temp);
656 // temp = temp->dex_cache_resolved_methods_;
657 __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
658 // temp = temp[index_in_cache]
659 __ movl(temp, Address(temp, index_in_cache));
660 // (temp + offset_of_quick_compiled_code)()
661 __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
662
663 codegen_->RecordPcInfo(invoke->GetDexPc());
664}
665
666void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
667 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
668 switch (add->GetResultType()) {
669 case Primitive::kPrimInt:
670 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000671 locations->SetInAt(0, Location::RequiresRegister());
672 locations->SetInAt(1, Location::RequiresRegister());
673 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100674 break;
675 }
676
677 case Primitive::kPrimBoolean:
678 case Primitive::kPrimByte:
679 case Primitive::kPrimChar:
680 case Primitive::kPrimShort:
681 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
682 break;
683
684 default:
685 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
686 }
687 add->SetLocations(locations);
688}
689
690void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
691 LocationSummary* locations = add->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000692 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
693 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100694 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000695 case Primitive::kPrimInt: {
696 __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
697 locations->InAt(1).AsX86_64().AsCpuRegister());
698 break;
699 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100700 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100701 __ addq(locations->InAt(0).AsX86_64().AsCpuRegister(),
702 locations->InAt(1).AsX86_64().AsCpuRegister());
703 break;
704 }
705
706 case Primitive::kPrimBoolean:
707 case Primitive::kPrimByte:
708 case Primitive::kPrimChar:
709 case Primitive::kPrimShort:
710 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
711 break;
712
713 default:
714 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
715 }
716}
717
718void LocationsBuilderX86_64::VisitSub(HSub* sub) {
719 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
720 switch (sub->GetResultType()) {
721 case Primitive::kPrimInt:
722 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000723 locations->SetInAt(0, Location::RequiresRegister());
724 locations->SetInAt(1, Location::RequiresRegister());
725 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100726 break;
727 }
728
729 case Primitive::kPrimBoolean:
730 case Primitive::kPrimByte:
731 case Primitive::kPrimChar:
732 case Primitive::kPrimShort:
733 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
734 break;
735
736 default:
737 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
738 }
739 sub->SetLocations(locations);
740}
741
742void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
743 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000744 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
745 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100746 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000747 case Primitive::kPrimInt: {
748 __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
749 locations->InAt(1).AsX86_64().AsCpuRegister());
750 break;
751 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100752 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100753 __ subq(locations->InAt(0).AsX86_64().AsCpuRegister(),
754 locations->InAt(1).AsX86_64().AsCpuRegister());
755 break;
756 }
757
758 case Primitive::kPrimBoolean:
759 case Primitive::kPrimByte:
760 case Primitive::kPrimChar:
761 case Primitive::kPrimShort:
762 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
763 break;
764
765 default:
766 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
767 }
768}
769
770void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
771 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
772 locations->SetOut(X86_64CpuLocation(RAX));
773 instruction->SetLocations(locations);
774}
775
776void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
777 InvokeRuntimeCallingConvention calling_convention;
778 LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(1)));
779 __ movq(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(instruction->GetTypeIndex()));
780
781 __ gs()->call(Address::Absolute(
782 QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocObjectWithAccessCheck), true));
783
784 codegen_->RecordPcInfo(instruction->GetDexPc());
785}
786
787void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
788 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
789 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
790 if (location.IsStackSlot()) {
791 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
792 } else if (location.IsDoubleStackSlot()) {
793 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
794 }
795 locations->SetOut(location);
796 instruction->SetLocations(locations);
797}
798
799void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) {
800 // Nothing to do, the parameter is already at its location.
801}
802
803void LocationsBuilderX86_64::VisitNot(HNot* instruction) {
804 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000805 locations->SetInAt(0, Location::RequiresRegister());
806 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100807 instruction->SetLocations(locations);
808}
809
810void InstructionCodeGeneratorX86_64::VisitNot(HNot* instruction) {
811 LocationSummary* locations = instruction->GetLocations();
812 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
813 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
814 __ xorq(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
815}
816
817void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
818 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
819 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
820 locations->SetInAt(i, Location::Any());
821 }
822 locations->SetOut(Location::Any());
823 instruction->SetLocations(locations);
824}
825
826void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
827 LOG(FATAL) << "Unimplemented";
828}
829
830void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
831 LOG(FATAL) << "Unimplemented";
832}
833
834void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000835 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
836}
837
838X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
839 return codegen_->GetAssembler();
840}
841
842void ParallelMoveResolverX86_64::EmitMove(size_t index) {
843 MoveOperands* move = moves_.Get(index);
844 Location source = move->GetSource();
845 Location destination = move->GetDestination();
846
847 if (source.IsRegister()) {
848 if (destination.IsRegister()) {
849 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100850 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000851 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
852 source.AsX86_64().AsCpuRegister());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100853 } else {
854 DCHECK(destination.IsDoubleStackSlot());
855 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
856 source.AsX86_64().AsCpuRegister());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000857 }
858 } else if (source.IsStackSlot()) {
859 if (destination.IsRegister()) {
860 __ movl(destination.AsX86_64().AsX86_64().AsCpuRegister(),
861 Address(CpuRegister(RSP), source.GetStackIndex()));
862 } else {
863 DCHECK(destination.IsStackSlot());
864 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
865 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
866 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100867 } else if (source.IsDoubleStackSlot()) {
868 if (destination.IsRegister()) {
869 __ movq(destination.AsX86_64().AsX86_64().AsCpuRegister(),
870 Address(CpuRegister(RSP), source.GetStackIndex()));
871 } else {
872 DCHECK(destination.IsDoubleStackSlot());
873 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
874 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
875 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000876 } else {
877 LOG(FATAL) << "Unimplemented";
878 }
879}
880
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100881void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000882 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100883 __ movl(Address(CpuRegister(RSP), mem), reg);
884 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000885}
886
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100887void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000888 ScratchRegisterScope ensure_scratch(
889 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
890
891 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
892 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
893 __ movl(CpuRegister(ensure_scratch.GetRegister()),
894 Address(CpuRegister(RSP), mem2 + stack_offset));
895 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
896 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
897 CpuRegister(ensure_scratch.GetRegister()));
898}
899
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100900void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
901 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
902 __ movq(Address(CpuRegister(RSP), mem), reg);
903 __ movq(reg, CpuRegister(TMP));
904}
905
906void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
907 ScratchRegisterScope ensure_scratch(
908 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
909
910 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
911 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
912 __ movq(CpuRegister(ensure_scratch.GetRegister()),
913 Address(CpuRegister(RSP), mem2 + stack_offset));
914 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
915 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
916 CpuRegister(ensure_scratch.GetRegister()));
917}
918
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000919void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
920 MoveOperands* move = moves_.Get(index);
921 Location source = move->GetSource();
922 Location destination = move->GetDestination();
923
924 if (source.IsRegister() && destination.IsRegister()) {
925 __ xchgq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
926 } else if (source.IsRegister() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100927 Exchange32(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000928 } else if (source.IsStackSlot() && destination.IsRegister()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100929 Exchange32(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000930 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100931 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
932 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
933 Exchange64(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
934 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
935 Exchange64(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
936 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
937 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000938 } else {
939 LOG(FATAL) << "Unimplemented";
940 }
941}
942
943
944void ParallelMoveResolverX86_64::SpillScratch(int reg) {
945 __ pushq(CpuRegister(reg));
946}
947
948
949void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
950 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100951}
952
953} // namespace x86_64
954} // namespace art