blob: f6d00f5eda2f6a86b0f075a33090a7d00a27307e [file] [log] [blame]
Tom Stellardf98f2ce2012-12-11 21:25:42 +00001//===-- SIInstrInfo.cpp - SI Instruction Information ---------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file
11/// \brief SI Implementation of TargetInstrInfo.
12//
13//===----------------------------------------------------------------------===//
14
15
16#include "SIInstrInfo.h"
17#include "AMDGPUTargetMachine.h"
18#include "llvm/CodeGen/MachineInstrBuilder.h"
19#include "llvm/CodeGen/MachineRegisterInfo.h"
20#include "llvm/MC/MCInstrDesc.h"
Tom Stellardf98f2ce2012-12-11 21:25:42 +000021#include <stdio.h>
22
23using namespace llvm;
24
25SIInstrInfo::SIInstrInfo(AMDGPUTargetMachine &tm)
26 : AMDGPUInstrInfo(tm),
27 RI(tm, *this)
28 { }
29
30const SIRegisterInfo &SIInstrInfo::getRegisterInfo() const {
31 return RI;
32}
33
34void
35SIInstrInfo::copyPhysReg(MachineBasicBlock &MBB,
36 MachineBasicBlock::iterator MI, DebugLoc DL,
37 unsigned DestReg, unsigned SrcReg,
38 bool KillSrc) const {
39 // If we are trying to copy to or from SCC, there is a bug somewhere else in
40 // the backend. While it may be theoretically possible to do this, it should
41 // never be necessary.
42 assert(DestReg != AMDGPU::SCC && SrcReg != AMDGPU::SCC);
43
Tom Stellard60fc5822013-02-07 19:39:43 +000044 if (AMDGPU::VReg_64RegClass.contains(DestReg)) {
45 assert(AMDGPU::VReg_64RegClass.contains(SrcReg) ||
46 AMDGPU::SReg_64RegClass.contains(SrcReg));
47 BuildMI(MBB, MI, DL, get(AMDGPU::V_MOV_B32_e32), RI.getSubReg(DestReg, AMDGPU::sub0))
48 .addReg(RI.getSubReg(SrcReg, AMDGPU::sub0), getKillRegState(KillSrc))
49 .addReg(DestReg, RegState::Define | RegState::Implicit);
50 BuildMI(MBB, MI, DL, get(AMDGPU::V_MOV_B32_e32), RI.getSubReg(DestReg, AMDGPU::sub1))
51 .addReg(RI.getSubReg(SrcReg, AMDGPU::sub1), getKillRegState(KillSrc));
52 } else if (AMDGPU::SReg_64RegClass.contains(DestReg)) {
Tom Stellardf98f2ce2012-12-11 21:25:42 +000053 assert(AMDGPU::SReg_64RegClass.contains(SrcReg));
54 BuildMI(MBB, MI, DL, get(AMDGPU::S_MOV_B64), DestReg)
55 .addReg(SrcReg, getKillRegState(KillSrc));
56 } else if (AMDGPU::VReg_32RegClass.contains(DestReg)) {
57 assert(AMDGPU::VReg_32RegClass.contains(SrcReg) ||
58 AMDGPU::SReg_32RegClass.contains(SrcReg));
59 BuildMI(MBB, MI, DL, get(AMDGPU::V_MOV_B32_e32), DestReg)
60 .addReg(SrcReg, getKillRegState(KillSrc));
61 } else {
62 assert(AMDGPU::SReg_32RegClass.contains(DestReg));
63 assert(AMDGPU::SReg_32RegClass.contains(SrcReg));
64 BuildMI(MBB, MI, DL, get(AMDGPU::S_MOV_B32), DestReg)
65 .addReg(SrcReg, getKillRegState(KillSrc));
66 }
67}
68
69MachineInstr * SIInstrInfo::getMovImmInstr(MachineFunction *MF, unsigned DstReg,
70 int64_t Imm) const {
71 MachineInstr * MI = MF->CreateMachineInstr(get(AMDGPU::V_MOV_IMM_I32), DebugLoc());
NAKAMURA Takumi6b207d32012-12-20 00:22:11 +000072 MachineInstrBuilder MIB(*MF, MI);
73 MIB.addReg(DstReg, RegState::Define);
74 MIB.addImm(Imm);
Tom Stellardf98f2ce2012-12-11 21:25:42 +000075
76 return MI;
77
78}
79
80bool SIInstrInfo::isMov(unsigned Opcode) const {
81 switch(Opcode) {
82 default: return false;
83 case AMDGPU::S_MOV_B32:
84 case AMDGPU::S_MOV_B64:
85 case AMDGPU::V_MOV_B32_e32:
86 case AMDGPU::V_MOV_B32_e64:
87 case AMDGPU::V_MOV_IMM_F32:
88 case AMDGPU::V_MOV_IMM_I32:
89 case AMDGPU::S_MOV_IMM_I32:
90 return true;
91 }
92}
93
94bool
95SIInstrInfo::isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const {
96 return RC != &AMDGPU::EXECRegRegClass;
97}
Tom Stellardc0b0c672013-02-06 17:32:29 +000098
99//===----------------------------------------------------------------------===//
100// Indirect addressing callbacks
101//===----------------------------------------------------------------------===//
102
103unsigned SIInstrInfo::calculateIndirectAddress(unsigned RegIndex,
104 unsigned Channel) const {
105 assert(Channel == 0);
106 return RegIndex;
107}
108
109
110int SIInstrInfo::getIndirectIndexBegin(const MachineFunction &MF) const {
111 llvm_unreachable("Unimplemented");
112}
113
114int SIInstrInfo::getIndirectIndexEnd(const MachineFunction &MF) const {
115 llvm_unreachable("Unimplemented");
116}
117
118const TargetRegisterClass *SIInstrInfo::getIndirectAddrStoreRegClass(
119 unsigned SourceReg) const {
120 llvm_unreachable("Unimplemented");
121}
122
123const TargetRegisterClass *SIInstrInfo::getIndirectAddrLoadRegClass() const {
124 llvm_unreachable("Unimplemented");
125}
126
127MachineInstrBuilder SIInstrInfo::buildIndirectWrite(
128 MachineBasicBlock *MBB,
129 MachineBasicBlock::iterator I,
130 unsigned ValueReg,
131 unsigned Address, unsigned OffsetReg) const {
132 llvm_unreachable("Unimplemented");
133}
134
135MachineInstrBuilder SIInstrInfo::buildIndirectRead(
136 MachineBasicBlock *MBB,
137 MachineBasicBlock::iterator I,
138 unsigned ValueReg,
139 unsigned Address, unsigned OffsetReg) const {
140 llvm_unreachable("Unimplemented");
141}
142
143const TargetRegisterClass *SIInstrInfo::getSuperIndirectRegClass() const {
144 llvm_unreachable("Unimplemented");
145}