blob: ebeef9dfc1b07b723e0a5fb9cfc6a77bf667d18e [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "mirror/array.h"
21#include "mirror/art_method.h"
22#include "mirror/object_reference.h"
23#include "thread.h"
24#include "utils/assembler.h"
25#include "utils/x86_64/assembler_x86_64.h"
26#include "utils/x86_64/managed_register_x86_64.h"
27
28#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
29
30namespace art {
31
32x86_64::X86_64ManagedRegister Location::AsX86_64() const {
33 return reg().AsX86_64();
34}
35
36namespace x86_64 {
37
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000038// Some x86_64 instructions require a register to be available as temp.
39static constexpr Register TMP = R11;
40
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010041static constexpr int kNumberOfPushedRegistersAtEntry = 1;
42static constexpr int kCurrentMethodStackOffset = 0;
43
44void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
45 stream << X86_64ManagedRegister::FromCpuRegister(Register(reg));
46}
47
48void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
49 stream << X86_64ManagedRegister::FromXmmRegister(FloatRegister(reg));
50}
51
52static Location X86_64CpuLocation(Register reg) {
53 return Location::RegisterLocation(X86_64ManagedRegister::FromCpuRegister(reg));
54}
55
56CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph)
57 : CodeGenerator(graph, kNumberOfRegIds),
58 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000059 instruction_visitor_(graph, this),
60 move_resolver_(graph->GetArena(), this) {}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010061
62InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen)
63 : HGraphVisitor(graph),
64 assembler_(codegen->GetAssembler()),
65 codegen_(codegen) {}
66
67ManagedRegister CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type,
68 bool* blocked_registers) const {
69 switch (type) {
70 case Primitive::kPrimLong:
71 case Primitive::kPrimByte:
72 case Primitive::kPrimBoolean:
73 case Primitive::kPrimChar:
74 case Primitive::kPrimShort:
75 case Primitive::kPrimInt:
76 case Primitive::kPrimNot: {
77 size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters);
78 return X86_64ManagedRegister::FromCpuRegister(static_cast<Register>(reg));
79 }
80
81 case Primitive::kPrimFloat:
82 case Primitive::kPrimDouble:
83 LOG(FATAL) << "Unimplemented register type " << type;
84
85 case Primitive::kPrimVoid:
86 LOG(FATAL) << "Unreachable type " << type;
87 }
88
89 return ManagedRegister::NoRegister();
90}
91
92void CodeGeneratorX86_64::SetupBlockedRegisters(bool* blocked_registers) const {
93 // Stack register is always reserved.
94 blocked_registers[RSP] = true;
95
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +000096 // Block the register used as TMP.
97 blocked_registers[TMP] = true;
98
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010099 // TODO: We currently don't use Quick's callee saved registers.
100 blocked_registers[RBX] = true;
101 blocked_registers[RBP] = true;
102 blocked_registers[R12] = true;
103 blocked_registers[R13] = true;
104 blocked_registers[R14] = true;
105 blocked_registers[R15] = true;
106}
107
108void CodeGeneratorX86_64::ComputeFrameSize(size_t number_of_spill_slots) {
109 // Add the current ART method to the frame size, the return PC, and the filler.
110 SetFrameSize(RoundUp(
111 number_of_spill_slots * kVRegSize
112 + kVRegSize // filler
113 + kVRegSize // Art method
114 + kNumberOfPushedRegistersAtEntry * kX86_64WordSize,
115 kStackAlignment));
116}
117
118void CodeGeneratorX86_64::GenerateFrameEntry() {
119 // Create a fake register to mimic Quick.
120 static const int kFakeReturnRegister = 16;
121 core_spill_mask_ |= (1 << kFakeReturnRegister);
122
123 // The return PC has already been pushed on the stack.
124 __ subq(CpuRegister(RSP), Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
125 __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
126}
127
128void CodeGeneratorX86_64::GenerateFrameExit() {
129 __ addq(CpuRegister(RSP),
130 Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
131}
132
133void CodeGeneratorX86_64::Bind(Label* label) {
134 __ Bind(label);
135}
136
137void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) {
138 __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
139}
140
141int32_t CodeGeneratorX86_64::GetStackSlot(HLocal* local) const {
142 uint16_t reg_number = local->GetRegNumber();
143 uint16_t number_of_vregs = GetGraph()->GetNumberOfVRegs();
144 uint16_t number_of_in_vregs = GetGraph()->GetNumberOfInVRegs();
145 if (reg_number >= number_of_vregs - number_of_in_vregs) {
146 // Local is a parameter of the method. It is stored in the caller's frame.
147 return GetFrameSize() + kVRegSize // ART method
148 + (reg_number - number_of_vregs + number_of_in_vregs) * kVRegSize;
149 } else {
150 // Local is a temporary in this method. It is stored in this method's frame.
151 return GetFrameSize() - (kNumberOfPushedRegistersAtEntry * kX86_64WordSize)
152 - kVRegSize
153 - (number_of_vregs * kVRegSize)
154 + (reg_number * kVRegSize);
155 }
156}
157
158Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
159 switch (load->GetType()) {
160 case Primitive::kPrimLong:
161 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
162 break;
163
164 case Primitive::kPrimInt:
165 case Primitive::kPrimNot:
166 return Location::StackSlot(GetStackSlot(load->GetLocal()));
167
168 case Primitive::kPrimFloat:
169 case Primitive::kPrimDouble:
170 LOG(FATAL) << "Unimplemented type " << load->GetType();
171
172 case Primitive::kPrimBoolean:
173 case Primitive::kPrimByte:
174 case Primitive::kPrimChar:
175 case Primitive::kPrimShort:
176 case Primitive::kPrimVoid:
177 LOG(FATAL) << "Unexpected type " << load->GetType();
178 }
179
180 LOG(FATAL) << "Unreachable";
181 return Location();
182}
183
184void CodeGeneratorX86_64::Move(Location destination, Location source) {
185 if (source.Equals(destination)) {
186 return;
187 }
188 if (destination.IsRegister()) {
189 if (source.IsRegister()) {
190 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
191 } else if (source.IsStackSlot()) {
192 __ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
193 } else {
194 DCHECK(source.IsDoubleStackSlot());
195 __ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
196 }
197 } else if (destination.IsStackSlot()) {
198 if (source.IsRegister()) {
199 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
200 } else {
201 DCHECK(source.IsStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000202 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
203 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100204 }
205 } else {
206 DCHECK(destination.IsDoubleStackSlot());
207 if (source.IsRegister()) {
208 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
209 } else {
210 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000211 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
212 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100213 }
214 }
215}
216
217void CodeGeneratorX86_64::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
218 if (instruction->AsIntConstant() != nullptr) {
219 Immediate imm(instruction->AsIntConstant()->GetValue());
220 if (location.IsRegister()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000221 __ movl(location.AsX86_64().AsCpuRegister(), imm);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100222 } else {
223 __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
224 }
225 } else if (instruction->AsLongConstant() != nullptr) {
226 int64_t value = instruction->AsLongConstant()->GetValue();
227 if (location.IsRegister()) {
228 __ movq(location.AsX86_64().AsCpuRegister(), Immediate(value));
229 } else {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000230 __ movq(CpuRegister(TMP), Immediate(value));
231 __ movq(Address(CpuRegister(RSP), location.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100232 }
233 } else if (instruction->AsLoadLocal() != nullptr) {
234 switch (instruction->GetType()) {
235 case Primitive::kPrimBoolean:
236 case Primitive::kPrimByte:
237 case Primitive::kPrimChar:
238 case Primitive::kPrimShort:
239 case Primitive::kPrimInt:
240 case Primitive::kPrimNot:
241 Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
242 break;
243
244 case Primitive::kPrimLong:
245 Move(location, Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
246 break;
247
248 default:
249 LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
250 }
251 } else {
252 // This can currently only happen when the instruction that requests the move
253 // is the next to be compiled.
254 DCHECK_EQ(instruction->GetNext(), move_for);
255 switch (instruction->GetType()) {
256 case Primitive::kPrimBoolean:
257 case Primitive::kPrimByte:
258 case Primitive::kPrimChar:
259 case Primitive::kPrimShort:
260 case Primitive::kPrimInt:
261 case Primitive::kPrimNot:
262 case Primitive::kPrimLong:
263 Move(location, instruction->GetLocations()->Out());
264 break;
265
266 default:
267 LOG(FATAL) << "Unimplemented type " << instruction->GetType();
268 }
269 }
270}
271
272void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
273 got->SetLocations(nullptr);
274}
275
276void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
277 HBasicBlock* successor = got->GetSuccessor();
278 if (GetGraph()->GetExitBlock() == successor) {
279 codegen_->GenerateFrameExit();
280 } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
281 __ jmp(codegen_->GetLabelOf(successor));
282 }
283}
284
285void LocationsBuilderX86_64::VisitExit(HExit* exit) {
286 exit->SetLocations(nullptr);
287}
288
289void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
290 if (kIsDebugBuild) {
291 __ Comment("Unreachable");
292 __ int3();
293 }
294}
295
296void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
297 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000298 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100299 if_instr->SetLocations(locations);
300}
301
302void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
303 // TODO: Generate the input as a condition, instead of materializing in a register.
304 __ cmpl(if_instr->GetLocations()->InAt(0).AsX86_64().AsCpuRegister(), Immediate(0));
305 __ j(kEqual, codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
306 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
307 __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
308 }
309}
310
311void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
312 local->SetLocations(nullptr);
313}
314
315void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
316 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
317}
318
319void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
320 local->SetLocations(nullptr);
321}
322
323void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
324 // Nothing to do, this is driven by the code generator.
325}
326
327void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
328 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
329 switch (store->InputAt(1)->GetType()) {
330 case Primitive::kPrimBoolean:
331 case Primitive::kPrimByte:
332 case Primitive::kPrimChar:
333 case Primitive::kPrimShort:
334 case Primitive::kPrimInt:
335 case Primitive::kPrimNot:
336 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
337 break;
338
339 case Primitive::kPrimLong:
340 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
341 break;
342
343 default:
344 LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
345 }
346 store->SetLocations(locations);
347}
348
349void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
350}
351
352void LocationsBuilderX86_64::VisitEqual(HEqual* equal) {
353 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(equal);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000354 locations->SetInAt(0, Location::RequiresRegister());
355 locations->SetInAt(1, Location::RequiresRegister());
356 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100357 equal->SetLocations(locations);
358}
359
360void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* equal) {
361 __ cmpq(equal->GetLocations()->InAt(0).AsX86_64().AsCpuRegister(),
362 equal->GetLocations()->InAt(1).AsX86_64().AsCpuRegister());
363 __ setcc(kEqual, equal->GetLocations()->Out().AsX86_64().AsCpuRegister());
364}
365
366void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
367 // TODO: Support constant locations.
368 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
369 locations->SetOut(Location::RequiresRegister());
370 constant->SetLocations(locations);
371}
372
373void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000374 codegen_->Move(constant, constant->GetLocations()->Out(), nullptr);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100375}
376
377void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
378 // TODO: Support constant locations.
379 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
380 locations->SetOut(Location::RequiresRegister());
381 constant->SetLocations(locations);
382}
383
384void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
385 // Will be generated at use site.
386}
387
388void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
389 ret->SetLocations(nullptr);
390}
391
392void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
393 codegen_->GenerateFrameExit();
394 __ ret();
395}
396
397void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
398 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
399 switch (ret->InputAt(0)->GetType()) {
400 case Primitive::kPrimBoolean:
401 case Primitive::kPrimByte:
402 case Primitive::kPrimChar:
403 case Primitive::kPrimShort:
404 case Primitive::kPrimInt:
405 case Primitive::kPrimNot:
406 case Primitive::kPrimLong:
407 locations->SetInAt(0, X86_64CpuLocation(RAX));
408 break;
409
410 default:
411 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
412 }
413 ret->SetLocations(locations);
414}
415
416void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
417 if (kIsDebugBuild) {
418 switch (ret->InputAt(0)->GetType()) {
419 case Primitive::kPrimBoolean:
420 case Primitive::kPrimByte:
421 case Primitive::kPrimChar:
422 case Primitive::kPrimShort:
423 case Primitive::kPrimInt:
424 case Primitive::kPrimNot:
425 case Primitive::kPrimLong:
426 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), RAX);
427 break;
428
429 default:
430 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
431 }
432 }
433 codegen_->GenerateFrameExit();
434 __ ret();
435}
436
437static constexpr Register kRuntimeParameterCoreRegisters[] = { RDI, RSI, RDX };
438static constexpr size_t kRuntimeParameterCoreRegistersLength =
439 arraysize(kRuntimeParameterCoreRegisters);
440
441class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
442 public:
443 InvokeRuntimeCallingConvention()
444 : CallingConvention(kRuntimeParameterCoreRegisters,
445 kRuntimeParameterCoreRegistersLength) {}
446
447 private:
448 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
449};
450
451Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
452 switch (type) {
453 case Primitive::kPrimBoolean:
454 case Primitive::kPrimByte:
455 case Primitive::kPrimChar:
456 case Primitive::kPrimShort:
457 case Primitive::kPrimInt:
458 case Primitive::kPrimNot: {
459 uint32_t index = gp_index_++;
460 stack_index_++;
461 if (index < calling_convention.GetNumberOfRegisters()) {
462 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
463 } else {
464 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
465 }
466 }
467
468 case Primitive::kPrimLong: {
469 uint32_t index = gp_index_;
470 stack_index_ += 2;
471 if (index < calling_convention.GetNumberOfRegisters()) {
472 gp_index_ += 1;
473 return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
474 } else {
475 gp_index_ += 2;
476 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
477 }
478 }
479
480 case Primitive::kPrimDouble:
481 case Primitive::kPrimFloat:
482 LOG(FATAL) << "Unimplemented parameter type " << type;
483 break;
484
485 case Primitive::kPrimVoid:
486 LOG(FATAL) << "Unexpected parameter type " << type;
487 break;
488 }
489 return Location();
490}
491
492void LocationsBuilderX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
493 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
494 locations->AddTemp(X86_64CpuLocation(RDI));
495
496 InvokeDexCallingConventionVisitor calling_convention_visitor;
497 for (size_t i = 0; i < invoke->InputCount(); ++i) {
498 HInstruction* input = invoke->InputAt(i);
499 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
500 }
501
502 switch (invoke->GetType()) {
503 case Primitive::kPrimBoolean:
504 case Primitive::kPrimByte:
505 case Primitive::kPrimChar:
506 case Primitive::kPrimShort:
507 case Primitive::kPrimInt:
508 case Primitive::kPrimNot:
509 case Primitive::kPrimLong:
510 locations->SetOut(X86_64CpuLocation(RAX));
511 break;
512
513 case Primitive::kPrimVoid:
514 break;
515
516 case Primitive::kPrimDouble:
517 case Primitive::kPrimFloat:
518 LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
519 break;
520 }
521
522 invoke->SetLocations(locations);
523}
524
525void InstructionCodeGeneratorX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
526 CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister();
527 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
528 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).SizeValue() +
529 invoke->GetIndexInDexCache() * heap_reference_size;
530
531 // TODO: Implement all kinds of calls:
532 // 1) boot -> boot
533 // 2) app -> boot
534 // 3) app -> app
535 //
536 // Currently we implement the app -> app logic, which looks up in the resolve cache.
537
538 // temp = method;
539 LoadCurrentMethod(temp);
540 // temp = temp->dex_cache_resolved_methods_;
541 __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
542 // temp = temp[index_in_cache]
543 __ movl(temp, Address(temp, index_in_cache));
544 // (temp + offset_of_quick_compiled_code)()
545 __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
546
547 codegen_->RecordPcInfo(invoke->GetDexPc());
548}
549
550void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
551 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
552 switch (add->GetResultType()) {
553 case Primitive::kPrimInt:
554 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000555 locations->SetInAt(0, Location::RequiresRegister());
556 locations->SetInAt(1, Location::RequiresRegister());
557 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100558 break;
559 }
560
561 case Primitive::kPrimBoolean:
562 case Primitive::kPrimByte:
563 case Primitive::kPrimChar:
564 case Primitive::kPrimShort:
565 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
566 break;
567
568 default:
569 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
570 }
571 add->SetLocations(locations);
572}
573
574void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
575 LocationSummary* locations = add->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000576 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
577 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100578 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000579 case Primitive::kPrimInt: {
580 __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
581 locations->InAt(1).AsX86_64().AsCpuRegister());
582 break;
583 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100584 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100585 __ addq(locations->InAt(0).AsX86_64().AsCpuRegister(),
586 locations->InAt(1).AsX86_64().AsCpuRegister());
587 break;
588 }
589
590 case Primitive::kPrimBoolean:
591 case Primitive::kPrimByte:
592 case Primitive::kPrimChar:
593 case Primitive::kPrimShort:
594 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
595 break;
596
597 default:
598 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
599 }
600}
601
602void LocationsBuilderX86_64::VisitSub(HSub* sub) {
603 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
604 switch (sub->GetResultType()) {
605 case Primitive::kPrimInt:
606 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000607 locations->SetInAt(0, Location::RequiresRegister());
608 locations->SetInAt(1, Location::RequiresRegister());
609 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100610 break;
611 }
612
613 case Primitive::kPrimBoolean:
614 case Primitive::kPrimByte:
615 case Primitive::kPrimChar:
616 case Primitive::kPrimShort:
617 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
618 break;
619
620 default:
621 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
622 }
623 sub->SetLocations(locations);
624}
625
626void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
627 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000628 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
629 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100630 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000631 case Primitive::kPrimInt: {
632 __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
633 locations->InAt(1).AsX86_64().AsCpuRegister());
634 break;
635 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100636 case Primitive::kPrimLong: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100637 __ subq(locations->InAt(0).AsX86_64().AsCpuRegister(),
638 locations->InAt(1).AsX86_64().AsCpuRegister());
639 break;
640 }
641
642 case Primitive::kPrimBoolean:
643 case Primitive::kPrimByte:
644 case Primitive::kPrimChar:
645 case Primitive::kPrimShort:
646 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
647 break;
648
649 default:
650 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
651 }
652}
653
654void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
655 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
656 locations->SetOut(X86_64CpuLocation(RAX));
657 instruction->SetLocations(locations);
658}
659
660void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
661 InvokeRuntimeCallingConvention calling_convention;
662 LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(1)));
663 __ movq(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(instruction->GetTypeIndex()));
664
665 __ gs()->call(Address::Absolute(
666 QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocObjectWithAccessCheck), true));
667
668 codegen_->RecordPcInfo(instruction->GetDexPc());
669}
670
671void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
672 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
673 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
674 if (location.IsStackSlot()) {
675 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
676 } else if (location.IsDoubleStackSlot()) {
677 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
678 }
679 locations->SetOut(location);
680 instruction->SetLocations(locations);
681}
682
683void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) {
684 // Nothing to do, the parameter is already at its location.
685}
686
687void LocationsBuilderX86_64::VisitNot(HNot* instruction) {
688 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000689 locations->SetInAt(0, Location::RequiresRegister());
690 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100691 instruction->SetLocations(locations);
692}
693
694void InstructionCodeGeneratorX86_64::VisitNot(HNot* instruction) {
695 LocationSummary* locations = instruction->GetLocations();
696 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
697 locations->Out().AsX86_64().AsCpuRegister().AsRegister());
698 __ xorq(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
699}
700
701void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
702 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
703 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
704 locations->SetInAt(i, Location::Any());
705 }
706 locations->SetOut(Location::Any());
707 instruction->SetLocations(locations);
708}
709
710void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
711 LOG(FATAL) << "Unimplemented";
712}
713
714void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
715 LOG(FATAL) << "Unimplemented";
716}
717
718void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000719 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
720}
721
722X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
723 return codegen_->GetAssembler();
724}
725
726void ParallelMoveResolverX86_64::EmitMove(size_t index) {
727 MoveOperands* move = moves_.Get(index);
728 Location source = move->GetSource();
729 Location destination = move->GetDestination();
730
731 if (source.IsRegister()) {
732 if (destination.IsRegister()) {
733 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
734 } else {
735 DCHECK(destination.IsStackSlot());
736 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
737 source.AsX86_64().AsCpuRegister());
738 }
739 } else if (source.IsStackSlot()) {
740 if (destination.IsRegister()) {
741 __ movl(destination.AsX86_64().AsX86_64().AsCpuRegister(),
742 Address(CpuRegister(RSP), source.GetStackIndex()));
743 } else {
744 DCHECK(destination.IsStackSlot());
745 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
746 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
747 }
748 } else {
749 LOG(FATAL) << "Unimplemented";
750 }
751}
752
753void ParallelMoveResolverX86_64::Exchange(CpuRegister reg, int mem) {
754 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
755 __ movl(Address(CpuRegister(RSP), mem), CpuRegister(reg));
756 __ movl(CpuRegister(reg), CpuRegister(TMP));
757}
758
759void ParallelMoveResolverX86_64::Exchange(int mem1, int mem2) {
760 ScratchRegisterScope ensure_scratch(
761 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
762
763 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
764 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
765 __ movl(CpuRegister(ensure_scratch.GetRegister()),
766 Address(CpuRegister(RSP), mem2 + stack_offset));
767 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
768 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
769 CpuRegister(ensure_scratch.GetRegister()));
770}
771
772void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
773 MoveOperands* move = moves_.Get(index);
774 Location source = move->GetSource();
775 Location destination = move->GetDestination();
776
777 if (source.IsRegister() && destination.IsRegister()) {
778 __ xchgq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
779 } else if (source.IsRegister() && destination.IsStackSlot()) {
780 Exchange(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
781 } else if (source.IsStackSlot() && destination.IsRegister()) {
782 Exchange(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
783 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
784 Exchange(destination.GetStackIndex(), source.GetStackIndex());
785 } else {
786 LOG(FATAL) << "Unimplemented";
787 }
788}
789
790
791void ParallelMoveResolverX86_64::SpillScratch(int reg) {
792 __ pushq(CpuRegister(reg));
793}
794
795
796void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
797 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100798}
799
800} // namespace x86_64
801} // namespace art