blob: d9dbd6a4ca9d8984963e6339a757c24a499a3a5d [file] [log] [blame]
Tom Stellard75aadc22012-12-11 21:25:42 +00001//===-- SIInstrInfo.cpp - SI Instruction Information ---------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file
11/// \brief SI Implementation of TargetInstrInfo.
12//
13//===----------------------------------------------------------------------===//
14
15
16#include "SIInstrInfo.h"
17#include "AMDGPUTargetMachine.h"
18#include "llvm/CodeGen/MachineInstrBuilder.h"
19#include "llvm/CodeGen/MachineRegisterInfo.h"
20#include "llvm/MC/MCInstrDesc.h"
Tom Stellard75aadc22012-12-11 21:25:42 +000021#include <stdio.h>
22
23using namespace llvm;
24
25SIInstrInfo::SIInstrInfo(AMDGPUTargetMachine &tm)
26 : AMDGPUInstrInfo(tm),
27 RI(tm, *this)
28 { }
29
30const SIRegisterInfo &SIInstrInfo::getRegisterInfo() const {
31 return RI;
32}
33
34void
35SIInstrInfo::copyPhysReg(MachineBasicBlock &MBB,
36 MachineBasicBlock::iterator MI, DebugLoc DL,
37 unsigned DestReg, unsigned SrcReg,
38 bool KillSrc) const {
39 // If we are trying to copy to or from SCC, there is a bug somewhere else in
40 // the backend. While it may be theoretically possible to do this, it should
41 // never be necessary.
42 assert(DestReg != AMDGPU::SCC && SrcReg != AMDGPU::SCC);
43
Tom Stellardaac18892013-02-07 19:39:43 +000044 if (AMDGPU::VReg_64RegClass.contains(DestReg)) {
45 assert(AMDGPU::VReg_64RegClass.contains(SrcReg) ||
46 AMDGPU::SReg_64RegClass.contains(SrcReg));
47 BuildMI(MBB, MI, DL, get(AMDGPU::V_MOV_B32_e32), RI.getSubReg(DestReg, AMDGPU::sub0))
48 .addReg(RI.getSubReg(SrcReg, AMDGPU::sub0), getKillRegState(KillSrc))
49 .addReg(DestReg, RegState::Define | RegState::Implicit);
50 BuildMI(MBB, MI, DL, get(AMDGPU::V_MOV_B32_e32), RI.getSubReg(DestReg, AMDGPU::sub1))
51 .addReg(RI.getSubReg(SrcReg, AMDGPU::sub1), getKillRegState(KillSrc));
52 } else if (AMDGPU::SReg_64RegClass.contains(DestReg)) {
Tom Stellard75aadc22012-12-11 21:25:42 +000053 assert(AMDGPU::SReg_64RegClass.contains(SrcReg));
54 BuildMI(MBB, MI, DL, get(AMDGPU::S_MOV_B64), DestReg)
55 .addReg(SrcReg, getKillRegState(KillSrc));
56 } else if (AMDGPU::VReg_32RegClass.contains(DestReg)) {
57 assert(AMDGPU::VReg_32RegClass.contains(SrcReg) ||
58 AMDGPU::SReg_32RegClass.contains(SrcReg));
59 BuildMI(MBB, MI, DL, get(AMDGPU::V_MOV_B32_e32), DestReg)
60 .addReg(SrcReg, getKillRegState(KillSrc));
61 } else {
62 assert(AMDGPU::SReg_32RegClass.contains(DestReg));
63 assert(AMDGPU::SReg_32RegClass.contains(SrcReg));
64 BuildMI(MBB, MI, DL, get(AMDGPU::S_MOV_B32), DestReg)
65 .addReg(SrcReg, getKillRegState(KillSrc));
66 }
67}
68
Christian Konig76edd4f2013-02-26 17:52:29 +000069MachineInstr *SIInstrInfo::commuteInstruction(MachineInstr *MI,
70 bool NewMI) const {
71
72 if (MI->getNumOperands() < 3 || !MI->getOperand(1).isReg() ||
73 !MI->getOperand(2).isReg())
74 return 0;
75
76 return TargetInstrInfo::commuteInstruction(MI, NewMI);
77}
78
Tom Stellard75aadc22012-12-11 21:25:42 +000079MachineInstr * SIInstrInfo::getMovImmInstr(MachineFunction *MF, unsigned DstReg,
80 int64_t Imm) const {
Christian Konigc756cb992013-02-16 11:28:22 +000081 MachineInstr * MI = MF->CreateMachineInstr(get(AMDGPU::V_MOV_B32_e32), DebugLoc());
NAKAMURA Takumi2a0b40f2012-12-20 00:22:11 +000082 MachineInstrBuilder MIB(*MF, MI);
83 MIB.addReg(DstReg, RegState::Define);
84 MIB.addImm(Imm);
Tom Stellard75aadc22012-12-11 21:25:42 +000085
86 return MI;
87
88}
89
90bool SIInstrInfo::isMov(unsigned Opcode) const {
91 switch(Opcode) {
92 default: return false;
93 case AMDGPU::S_MOV_B32:
94 case AMDGPU::S_MOV_B64:
95 case AMDGPU::V_MOV_B32_e32:
96 case AMDGPU::V_MOV_B32_e64:
Tom Stellard75aadc22012-12-11 21:25:42 +000097 return true;
98 }
99}
100
101bool
102SIInstrInfo::isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const {
103 return RC != &AMDGPU::EXECRegRegClass;
104}
Tom Stellardf3b2a1e2013-02-06 17:32:29 +0000105
106//===----------------------------------------------------------------------===//
107// Indirect addressing callbacks
108//===----------------------------------------------------------------------===//
109
110unsigned SIInstrInfo::calculateIndirectAddress(unsigned RegIndex,
111 unsigned Channel) const {
112 assert(Channel == 0);
113 return RegIndex;
114}
115
116
117int SIInstrInfo::getIndirectIndexBegin(const MachineFunction &MF) const {
118 llvm_unreachable("Unimplemented");
119}
120
121int SIInstrInfo::getIndirectIndexEnd(const MachineFunction &MF) const {
122 llvm_unreachable("Unimplemented");
123}
124
125const TargetRegisterClass *SIInstrInfo::getIndirectAddrStoreRegClass(
126 unsigned SourceReg) const {
127 llvm_unreachable("Unimplemented");
128}
129
130const TargetRegisterClass *SIInstrInfo::getIndirectAddrLoadRegClass() const {
131 llvm_unreachable("Unimplemented");
132}
133
134MachineInstrBuilder SIInstrInfo::buildIndirectWrite(
135 MachineBasicBlock *MBB,
136 MachineBasicBlock::iterator I,
137 unsigned ValueReg,
138 unsigned Address, unsigned OffsetReg) const {
139 llvm_unreachable("Unimplemented");
140}
141
142MachineInstrBuilder SIInstrInfo::buildIndirectRead(
143 MachineBasicBlock *MBB,
144 MachineBasicBlock::iterator I,
145 unsigned ValueReg,
146 unsigned Address, unsigned OffsetReg) const {
147 llvm_unreachable("Unimplemented");
148}
149
150const TargetRegisterClass *SIInstrInfo::getSuperIndirectRegClass() const {
151 llvm_unreachable("Unimplemented");
152}