Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 1 | //===- AMDGPUBaseInfo.h - Top level definitions for AMDGPU ------*- C++ -*-===// |
Tom Stellard | 347ac79 | 2015-06-26 21:15:07 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | |
| 10 | #ifndef LLVM_LIB_TARGET_AMDGPU_UTILS_AMDGPUBASEINFO_H |
| 11 | #define LLVM_LIB_TARGET_AMDGPU_UTILS_AMDGPUBASEINFO_H |
| 12 | |
Yaxun Liu | 1a14bfa | 2017-03-27 14:04:01 +0000 | [diff] [blame] | 13 | #include "AMDGPU.h" |
Tom Stellard | 347ac79 | 2015-06-26 21:15:07 +0000 | [diff] [blame] | 14 | #include "AMDKernelCodeT.h" |
Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 15 | #include "SIDefines.h" |
Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 16 | #include "llvm/ADT/StringRef.h" |
| 17 | #include "llvm/IR/CallingConv.h" |
| 18 | #include "llvm/MC/MCInstrDesc.h" |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 19 | #include "llvm/Support/AMDHSAKernelDescriptor.h" |
Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 20 | #include "llvm/Support/Compiler.h" |
| 21 | #include "llvm/Support/ErrorHandling.h" |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 22 | #include "llvm/Support/TargetParser.h" |
Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 23 | #include <cstdint> |
Konstantin Zhuravlyov | 9c05b2b | 2017-10-14 15:40:33 +0000 | [diff] [blame] | 24 | #include <string> |
Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 25 | #include <utility> |
Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 26 | |
Tom Stellard | 347ac79 | 2015-06-26 21:15:07 +0000 | [diff] [blame] | 27 | namespace llvm { |
| 28 | |
Matt Arsenault | 894e53d | 2017-07-26 20:39:42 +0000 | [diff] [blame] | 29 | class Argument; |
Tim Renouf | 4f703f5 | 2018-08-21 11:07:10 +0000 | [diff] [blame] | 30 | class AMDGPUSubtarget; |
Tom Stellard | 347ac79 | 2015-06-26 21:15:07 +0000 | [diff] [blame] | 31 | class FeatureBitset; |
Tom Stellard | ac00eb5 | 2015-12-15 16:26:16 +0000 | [diff] [blame] | 32 | class Function; |
Tim Renouf | 4f703f5 | 2018-08-21 11:07:10 +0000 | [diff] [blame] | 33 | class GCNSubtarget; |
Tom Stellard | e3b5aea | 2015-12-02 17:00:42 +0000 | [diff] [blame] | 34 | class GlobalValue; |
Tom Stellard | e135ffd | 2015-09-25 21:41:28 +0000 | [diff] [blame] | 35 | class MCContext; |
Krzysztof Parzyszek | c871550 | 2016-10-19 17:40:36 +0000 | [diff] [blame] | 36 | class MCRegisterClass; |
Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 37 | class MCRegisterInfo; |
Tom Stellard | e135ffd | 2015-09-25 21:41:28 +0000 | [diff] [blame] | 38 | class MCSection; |
Tom Stellard | 2b65ed3 | 2015-12-21 18:44:27 +0000 | [diff] [blame] | 39 | class MCSubtargetInfo; |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 40 | class MachineMemOperand; |
Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 41 | class Triple; |
Tom Stellard | 347ac79 | 2015-06-26 21:15:07 +0000 | [diff] [blame] | 42 | |
| 43 | namespace AMDGPU { |
Nicolai Haehnle | 0ab200b | 2018-06-21 13:36:44 +0000 | [diff] [blame] | 44 | |
| 45 | #define GET_MIMGBaseOpcode_DECL |
Nicolai Haehnle | 7a9c03f | 2018-06-21 13:36:57 +0000 | [diff] [blame] | 46 | #define GET_MIMGDim_DECL |
Nicolai Haehnle | 0ab200b | 2018-06-21 13:36:44 +0000 | [diff] [blame] | 47 | #define GET_MIMGEncoding_DECL |
Ryan Taylor | 894c8fd | 2018-08-01 12:12:01 +0000 | [diff] [blame] | 48 | #define GET_MIMGLZMapping_DECL |
Nicolai Haehnle | 0ab200b | 2018-06-21 13:36:44 +0000 | [diff] [blame] | 49 | #include "AMDGPUGenSearchableTables.inc" |
| 50 | |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 51 | namespace IsaInfo { |
Sam Kolton | a3ec5c1 | 2016-10-07 14:46:06 +0000 | [diff] [blame] | 52 | |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 53 | enum { |
| 54 | // The closed Vulkan driver sets 96, which limits the wave count to 8 but |
| 55 | // doesn't spill SGPRs as much as when 80 is set. |
Konstantin Zhuravlyov | c72ece6 | 2018-05-16 20:47:48 +0000 | [diff] [blame] | 56 | FIXED_NUM_SGPRS_FOR_INIT_BUG = 96, |
| 57 | TRAP_NUM_SGPRS = 16 |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 58 | }; |
| 59 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 60 | /// Streams isa version string for given subtarget \p STI into \p Stream. |
Konstantin Zhuravlyov | 9c05b2b | 2017-10-14 15:40:33 +0000 | [diff] [blame] | 61 | void streamIsaVersion(const MCSubtargetInfo *STI, raw_ostream &Stream); |
| 62 | |
Konstantin Zhuravlyov | 00f2cb1 | 2018-06-12 18:02:46 +0000 | [diff] [blame] | 63 | /// \returns True if given subtarget \p STI supports code object version 3, |
Konstantin Zhuravlyov | eda425e | 2017-10-14 15:59:07 +0000 | [diff] [blame] | 64 | /// false otherwise. |
Konstantin Zhuravlyov | 00f2cb1 | 2018-06-12 18:02:46 +0000 | [diff] [blame] | 65 | bool hasCodeObjectV3(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | eda425e | 2017-10-14 15:59:07 +0000 | [diff] [blame] | 66 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 67 | /// \returns Wavefront size for given subtarget \p STI. |
| 68 | unsigned getWavefrontSize(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 69 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 70 | /// \returns Local memory size in bytes for given subtarget \p STI. |
| 71 | unsigned getLocalMemorySize(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 72 | |
| 73 | /// \returns Number of execution units per compute unit for given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 74 | /// STI. |
| 75 | unsigned getEUsPerCU(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 76 | |
| 77 | /// \returns Maximum number of work groups per compute unit for given subtarget |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 78 | /// \p STI and limited by given \p FlatWorkGroupSize. |
| 79 | unsigned getMaxWorkGroupsPerCU(const MCSubtargetInfo *STI, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 80 | unsigned FlatWorkGroupSize); |
| 81 | |
| 82 | /// \returns Maximum number of waves per compute unit for given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 83 | /// STI without any kind of limitation. |
| 84 | unsigned getMaxWavesPerCU(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 85 | |
| 86 | /// \returns Maximum number of waves per compute unit for given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 87 | /// STI and limited by given \p FlatWorkGroupSize. |
| 88 | unsigned getMaxWavesPerCU(const MCSubtargetInfo *STI, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 89 | unsigned FlatWorkGroupSize); |
| 90 | |
| 91 | /// \returns Minimum number of waves per execution unit for given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 92 | /// STI. |
| 93 | unsigned getMinWavesPerEU(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 94 | |
| 95 | /// \returns Maximum number of waves per execution unit for given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 96 | /// STI without any kind of limitation. |
Tom Stellard | c5a154d | 2018-06-28 23:47:12 +0000 | [diff] [blame] | 97 | unsigned getMaxWavesPerEU(); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 98 | |
| 99 | /// \returns Maximum number of waves per execution unit for given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 100 | /// STI and limited by given \p FlatWorkGroupSize. |
| 101 | unsigned getMaxWavesPerEU(const MCSubtargetInfo *STI, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 102 | unsigned FlatWorkGroupSize); |
| 103 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 104 | /// \returns Minimum flat work group size for given subtarget \p STI. |
| 105 | unsigned getMinFlatWorkGroupSize(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 106 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 107 | /// \returns Maximum flat work group size for given subtarget \p STI. |
| 108 | unsigned getMaxFlatWorkGroupSize(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 109 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 110 | /// \returns Number of waves per work group for given subtarget \p STI and |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 111 | /// limited by given \p FlatWorkGroupSize. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 112 | unsigned getWavesPerWorkGroup(const MCSubtargetInfo *STI, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 113 | unsigned FlatWorkGroupSize); |
| 114 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 115 | /// \returns SGPR allocation granularity for given subtarget \p STI. |
| 116 | unsigned getSGPRAllocGranule(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 117 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 118 | /// \returns SGPR encoding granularity for given subtarget \p STI. |
| 119 | unsigned getSGPREncodingGranule(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 120 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 121 | /// \returns Total number of SGPRs for given subtarget \p STI. |
| 122 | unsigned getTotalNumSGPRs(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 123 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 124 | /// \returns Addressable number of SGPRs for given subtarget \p STI. |
| 125 | unsigned getAddressableNumSGPRs(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 126 | |
| 127 | /// \returns Minimum number of SGPRs that meets the given number of waves per |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 128 | /// execution unit requirement for given subtarget \p STI. |
| 129 | unsigned getMinNumSGPRs(const MCSubtargetInfo *STI, unsigned WavesPerEU); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 130 | |
| 131 | /// \returns Maximum number of SGPRs that meets the given number of waves per |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 132 | /// execution unit requirement for given subtarget \p STI. |
| 133 | unsigned getMaxNumSGPRs(const MCSubtargetInfo *STI, unsigned WavesPerEU, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 134 | bool Addressable); |
| 135 | |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 136 | /// \returns Number of extra SGPRs implicitly required by given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 137 | /// STI when the given special registers are used. |
| 138 | unsigned getNumExtraSGPRs(const MCSubtargetInfo *STI, bool VCCUsed, |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 139 | bool FlatScrUsed, bool XNACKUsed); |
| 140 | |
| 141 | /// \returns Number of extra SGPRs implicitly required by given subtarget \p |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 142 | /// STI when the given special registers are used. XNACK is inferred from |
| 143 | /// \p STI. |
| 144 | unsigned getNumExtraSGPRs(const MCSubtargetInfo *STI, bool VCCUsed, |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 145 | bool FlatScrUsed); |
| 146 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 147 | /// \returns Number of SGPR blocks needed for given subtarget \p STI when |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 148 | /// \p NumSGPRs are used. \p NumSGPRs should already include any special |
| 149 | /// register counts. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 150 | unsigned getNumSGPRBlocks(const MCSubtargetInfo *STI, unsigned NumSGPRs); |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 151 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 152 | /// \returns VGPR allocation granularity for given subtarget \p STI. |
| 153 | unsigned getVGPRAllocGranule(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 154 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 155 | /// \returns VGPR encoding granularity for given subtarget \p STI. |
| 156 | unsigned getVGPREncodingGranule(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 157 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 158 | /// \returns Total number of VGPRs for given subtarget \p STI. |
| 159 | unsigned getTotalNumVGPRs(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 160 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 161 | /// \returns Addressable number of VGPRs for given subtarget \p STI. |
| 162 | unsigned getAddressableNumVGPRs(const MCSubtargetInfo *STI); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 163 | |
| 164 | /// \returns Minimum number of VGPRs that meets given number of waves per |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 165 | /// execution unit requirement for given subtarget \p STI. |
| 166 | unsigned getMinNumVGPRs(const MCSubtargetInfo *STI, unsigned WavesPerEU); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 167 | |
| 168 | /// \returns Maximum number of VGPRs that meets given number of waves per |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 169 | /// execution unit requirement for given subtarget \p STI. |
| 170 | unsigned getMaxNumVGPRs(const MCSubtargetInfo *STI, unsigned WavesPerEU); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 171 | |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 172 | /// \returns Number of VGPR blocks needed for given subtarget \p STI when |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 173 | /// \p NumVGPRs are used. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 174 | unsigned getNumVGPRBlocks(const MCSubtargetInfo *STI, unsigned NumSGPRs); |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 175 | |
Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 176 | } // end namespace IsaInfo |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 177 | |
| 178 | LLVM_READONLY |
| 179 | int16_t getNamedOperandIdx(uint16_t Opcode, uint16_t NamedIdx); |
| 180 | |
Nicolai Haehnle | 7a9c03f | 2018-06-21 13:36:57 +0000 | [diff] [blame] | 181 | struct MIMGBaseOpcodeInfo { |
| 182 | MIMGBaseOpcode BaseOpcode; |
| 183 | bool Store; |
| 184 | bool Atomic; |
| 185 | bool AtomicX2; |
| 186 | bool Sampler; |
| 187 | |
| 188 | uint8_t NumExtraArgs; |
| 189 | bool Gradients; |
| 190 | bool Coordinates; |
| 191 | bool LodOrClampOrMip; |
| 192 | bool HasD16; |
| 193 | }; |
| 194 | |
| 195 | LLVM_READONLY |
| 196 | const MIMGBaseOpcodeInfo *getMIMGBaseOpcodeInfo(unsigned BaseOpcode); |
| 197 | |
| 198 | struct MIMGDimInfo { |
| 199 | MIMGDim Dim; |
| 200 | uint8_t NumCoords; |
| 201 | uint8_t NumGradients; |
| 202 | bool DA; |
| 203 | }; |
| 204 | |
| 205 | LLVM_READONLY |
| 206 | const MIMGDimInfo *getMIMGDimInfo(unsigned Dim); |
| 207 | |
Ryan Taylor | 894c8fd | 2018-08-01 12:12:01 +0000 | [diff] [blame] | 208 | struct MIMGLZMappingInfo { |
| 209 | MIMGBaseOpcode L; |
| 210 | MIMGBaseOpcode LZ; |
| 211 | }; |
| 212 | |
| 213 | LLVM_READONLY |
| 214 | const MIMGLZMappingInfo *getMIMGLZMappingInfo(unsigned L); |
| 215 | |
Nicolai Haehnle | 7a9c03f | 2018-06-21 13:36:57 +0000 | [diff] [blame] | 216 | LLVM_READONLY |
| 217 | int getMIMGOpcode(unsigned BaseOpcode, unsigned MIMGEncoding, |
| 218 | unsigned VDataDwords, unsigned VAddrDwords); |
| 219 | |
Matt Arsenault | cad7fa8 | 2017-12-13 21:07:51 +0000 | [diff] [blame] | 220 | LLVM_READONLY |
Nicolai Haehnle | 0ab200b | 2018-06-21 13:36:44 +0000 | [diff] [blame] | 221 | int getMaskedMIMGOp(unsigned Opc, unsigned NewChannels); |
Nicolai Haehnle | f267431 | 2018-06-21 13:36:01 +0000 | [diff] [blame] | 222 | |
| 223 | LLVM_READONLY |
Matt Arsenault | cad7fa8 | 2017-12-13 21:07:51 +0000 | [diff] [blame] | 224 | int getMCOpcode(uint16_t Opcode, unsigned Gen); |
| 225 | |
Tom Stellard | ff7416b | 2015-06-26 21:58:31 +0000 | [diff] [blame] | 226 | void initDefaultAMDKernelCodeT(amd_kernel_code_t &Header, |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 227 | const MCSubtargetInfo *STI); |
Tom Stellard | 9760f03 | 2015-12-03 03:34:32 +0000 | [diff] [blame] | 228 | |
Scott Linder | 1e8c2c7 | 2018-06-21 19:38:56 +0000 | [diff] [blame] | 229 | amdhsa::kernel_descriptor_t getDefaultAmdhsaKernelDescriptor(); |
| 230 | |
Konstantin Zhuravlyov | 435151a | 2017-11-01 19:12:38 +0000 | [diff] [blame] | 231 | bool isGroupSegment(const GlobalValue *GV); |
| 232 | bool isGlobalSegment(const GlobalValue *GV); |
| 233 | bool isReadOnlySegment(const GlobalValue *GV); |
Tom Stellard | e3b5aea | 2015-12-02 17:00:42 +0000 | [diff] [blame] | 234 | |
Konstantin Zhuravlyov | 08326b6 | 2016-10-20 18:12:38 +0000 | [diff] [blame] | 235 | /// \returns True if constants should be emitted to .text section for given |
| 236 | /// target triple \p TT, false otherwise. |
| 237 | bool shouldEmitConstantsToTextSection(const Triple &TT); |
| 238 | |
Konstantin Zhuravlyov | 1d65026 | 2016-09-06 20:22:28 +0000 | [diff] [blame] | 239 | /// \returns Integer value requested using \p F's \p Name attribute. |
| 240 | /// |
| 241 | /// \returns \p Default if attribute is not present. |
| 242 | /// |
| 243 | /// \returns \p Default and emits error if requested value cannot be converted |
| 244 | /// to integer. |
Matt Arsenault | 8300272 | 2016-05-12 02:45:18 +0000 | [diff] [blame] | 245 | int getIntegerAttribute(const Function &F, StringRef Name, int Default); |
| 246 | |
Konstantin Zhuravlyov | 1d65026 | 2016-09-06 20:22:28 +0000 | [diff] [blame] | 247 | /// \returns A pair of integer values requested using \p F's \p Name attribute |
| 248 | /// in "first[,second]" format ("second" is optional unless \p OnlyFirstRequired |
| 249 | /// is false). |
| 250 | /// |
| 251 | /// \returns \p Default if attribute is not present. |
| 252 | /// |
| 253 | /// \returns \p Default and emits error if one of the requested values cannot be |
| 254 | /// converted to integer, or \p OnlyFirstRequired is false and "second" value is |
| 255 | /// not present. |
| 256 | std::pair<int, int> getIntegerPairAttribute(const Function &F, |
| 257 | StringRef Name, |
| 258 | std::pair<int, int> Default, |
| 259 | bool OnlyFirstRequired = false); |
| 260 | |
Nicolai Haehnle | 1a94cbb | 2018-11-29 11:06:06 +0000 | [diff] [blame] | 261 | /// Represents the counter values to wait for in an s_waitcnt instruction. |
| 262 | /// |
| 263 | /// Large values (including the maximum possible integer) can be used to |
| 264 | /// represent "don't care" waits. |
| 265 | struct Waitcnt { |
| 266 | unsigned VmCnt = ~0u; |
| 267 | unsigned ExpCnt = ~0u; |
| 268 | unsigned LgkmCnt = ~0u; |
| 269 | |
| 270 | Waitcnt() {} |
| 271 | Waitcnt(unsigned VmCnt, unsigned ExpCnt, unsigned LgkmCnt) |
| 272 | : VmCnt(VmCnt), ExpCnt(ExpCnt), LgkmCnt(LgkmCnt) {} |
| 273 | |
| 274 | static Waitcnt allZero() { return Waitcnt(0, 0, 0); } |
| 275 | |
| 276 | bool dominates(const Waitcnt &Other) const { |
| 277 | return VmCnt <= Other.VmCnt && ExpCnt <= Other.ExpCnt && |
| 278 | LgkmCnt <= Other.LgkmCnt; |
| 279 | } |
| 280 | |
| 281 | Waitcnt combined(const Waitcnt &Other) const { |
| 282 | return Waitcnt(std::min(VmCnt, Other.VmCnt), std::min(ExpCnt, Other.ExpCnt), |
| 283 | std::min(LgkmCnt, Other.LgkmCnt)); |
| 284 | } |
| 285 | }; |
| 286 | |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 287 | /// \returns Vmcnt bit mask for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 288 | unsigned getVmcntBitMask(const IsaVersion &Version); |
Konstantin Zhuravlyov | 836cbff | 2016-09-30 17:01:40 +0000 | [diff] [blame] | 289 | |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 290 | /// \returns Expcnt bit mask for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 291 | unsigned getExpcntBitMask(const IsaVersion &Version); |
Konstantin Zhuravlyov | 836cbff | 2016-09-30 17:01:40 +0000 | [diff] [blame] | 292 | |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 293 | /// \returns Lgkmcnt bit mask for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 294 | unsigned getLgkmcntBitMask(const IsaVersion &Version); |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 295 | |
| 296 | /// \returns Waitcnt bit mask for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 297 | unsigned getWaitcntBitMask(const IsaVersion &Version); |
Konstantin Zhuravlyov | 836cbff | 2016-09-30 17:01:40 +0000 | [diff] [blame] | 298 | |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 299 | /// \returns Decoded Vmcnt from given \p Waitcnt for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 300 | unsigned decodeVmcnt(const IsaVersion &Version, unsigned Waitcnt); |
Konstantin Zhuravlyov | 836cbff | 2016-09-30 17:01:40 +0000 | [diff] [blame] | 301 | |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 302 | /// \returns Decoded Expcnt from given \p Waitcnt for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 303 | unsigned decodeExpcnt(const IsaVersion &Version, unsigned Waitcnt); |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 304 | |
| 305 | /// \returns Decoded Lgkmcnt from given \p Waitcnt for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 306 | unsigned decodeLgkmcnt(const IsaVersion &Version, unsigned Waitcnt); |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 307 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 308 | /// Decodes Vmcnt, Expcnt and Lgkmcnt from given \p Waitcnt for given isa |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 309 | /// \p Version, and writes decoded values into \p Vmcnt, \p Expcnt and |
| 310 | /// \p Lgkmcnt respectively. |
| 311 | /// |
| 312 | /// \details \p Vmcnt, \p Expcnt and \p Lgkmcnt are decoded as follows: |
Matt Arsenault | e823d92 | 2017-02-18 18:29:53 +0000 | [diff] [blame] | 313 | /// \p Vmcnt = \p Waitcnt[3:0] (pre-gfx9 only) |
| 314 | /// \p Vmcnt = \p Waitcnt[3:0] | \p Waitcnt[15:14] (gfx9+ only) |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 315 | /// \p Expcnt = \p Waitcnt[6:4] |
| 316 | /// \p Lgkmcnt = \p Waitcnt[11:8] |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 317 | void decodeWaitcnt(const IsaVersion &Version, unsigned Waitcnt, |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 318 | unsigned &Vmcnt, unsigned &Expcnt, unsigned &Lgkmcnt); |
| 319 | |
Nicolai Haehnle | 1a94cbb | 2018-11-29 11:06:06 +0000 | [diff] [blame] | 320 | Waitcnt decodeWaitcnt(const IsaVersion &Version, unsigned Encoded); |
| 321 | |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 322 | /// \returns \p Waitcnt with encoded \p Vmcnt for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 323 | unsigned encodeVmcnt(const IsaVersion &Version, unsigned Waitcnt, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 324 | unsigned Vmcnt); |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 325 | |
| 326 | /// \returns \p Waitcnt with encoded \p Expcnt for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 327 | unsigned encodeExpcnt(const IsaVersion &Version, unsigned Waitcnt, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 328 | unsigned Expcnt); |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 329 | |
| 330 | /// \returns \p Waitcnt with encoded \p Lgkmcnt for given isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 331 | unsigned encodeLgkmcnt(const IsaVersion &Version, unsigned Waitcnt, |
Konstantin Zhuravlyov | 9f89ede | 2017-02-08 14:05:23 +0000 | [diff] [blame] | 332 | unsigned Lgkmcnt); |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 333 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 334 | /// Encodes \p Vmcnt, \p Expcnt and \p Lgkmcnt into Waitcnt for given isa |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 335 | /// \p Version. |
| 336 | /// |
| 337 | /// \details \p Vmcnt, \p Expcnt and \p Lgkmcnt are encoded as follows: |
Matt Arsenault | e823d92 | 2017-02-18 18:29:53 +0000 | [diff] [blame] | 338 | /// Waitcnt[3:0] = \p Vmcnt (pre-gfx9 only) |
| 339 | /// Waitcnt[3:0] = \p Vmcnt[3:0] (gfx9+ only) |
| 340 | /// Waitcnt[6:4] = \p Expcnt |
| 341 | /// Waitcnt[11:8] = \p Lgkmcnt |
| 342 | /// Waitcnt[15:14] = \p Vmcnt[5:4] (gfx9+ only) |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 343 | /// |
| 344 | /// \returns Waitcnt with encoded \p Vmcnt, \p Expcnt and \p Lgkmcnt for given |
| 345 | /// isa \p Version. |
Konstantin Zhuravlyov | 71e43ee | 2018-09-12 18:50:47 +0000 | [diff] [blame] | 346 | unsigned encodeWaitcnt(const IsaVersion &Version, |
Konstantin Zhuravlyov | cdd4547 | 2016-10-11 18:58:22 +0000 | [diff] [blame] | 347 | unsigned Vmcnt, unsigned Expcnt, unsigned Lgkmcnt); |
Konstantin Zhuravlyov | 836cbff | 2016-09-30 17:01:40 +0000 | [diff] [blame] | 348 | |
Nicolai Haehnle | 1a94cbb | 2018-11-29 11:06:06 +0000 | [diff] [blame] | 349 | unsigned encodeWaitcnt(const IsaVersion &Version, const Waitcnt &Decoded); |
| 350 | |
Marek Olsak | fccabaf | 2016-01-13 11:45:36 +0000 | [diff] [blame] | 351 | unsigned getInitialPSInputAddr(const Function &F); |
| 352 | |
Matt Arsenault | e622dc3 | 2017-04-11 22:29:24 +0000 | [diff] [blame] | 353 | LLVM_READNONE |
| 354 | bool isShader(CallingConv::ID CC); |
| 355 | |
| 356 | LLVM_READNONE |
| 357 | bool isCompute(CallingConv::ID CC); |
| 358 | |
| 359 | LLVM_READNONE |
| 360 | bool isEntryFunctionCC(CallingConv::ID CC); |
| 361 | |
Matt Arsenault | efa9f4b | 2017-04-11 22:29:28 +0000 | [diff] [blame] | 362 | // FIXME: Remove this when calling conventions cleaned up |
| 363 | LLVM_READNONE |
| 364 | inline bool isKernel(CallingConv::ID CC) { |
| 365 | switch (CC) { |
Matt Arsenault | efa9f4b | 2017-04-11 22:29:28 +0000 | [diff] [blame] | 366 | case CallingConv::AMDGPU_KERNEL: |
| 367 | case CallingConv::SPIR_KERNEL: |
| 368 | return true; |
| 369 | default: |
| 370 | return false; |
| 371 | } |
| 372 | } |
Tom Stellard | ac00eb5 | 2015-12-15 16:26:16 +0000 | [diff] [blame] | 373 | |
Dmitry Preobrazhensky | 3afbd82 | 2018-01-10 14:22:19 +0000 | [diff] [blame] | 374 | bool hasXNACK(const MCSubtargetInfo &STI); |
Konstantin Zhuravlyov | 108927b | 2018-11-05 22:44:19 +0000 | [diff] [blame] | 375 | bool hasSRAMECC(const MCSubtargetInfo &STI); |
Dmitry Preobrazhensky | e3271ae | 2018-02-05 12:45:43 +0000 | [diff] [blame] | 376 | bool hasMIMG_R128(const MCSubtargetInfo &STI); |
Dmitry Preobrazhensky | 0a1ff46 | 2018-02-05 14:18:53 +0000 | [diff] [blame] | 377 | bool hasPackedD16(const MCSubtargetInfo &STI); |
Dmitry Preobrazhensky | e3271ae | 2018-02-05 12:45:43 +0000 | [diff] [blame] | 378 | |
Tom Stellard | 2b65ed3 | 2015-12-21 18:44:27 +0000 | [diff] [blame] | 379 | bool isSI(const MCSubtargetInfo &STI); |
| 380 | bool isCI(const MCSubtargetInfo &STI); |
| 381 | bool isVI(const MCSubtargetInfo &STI); |
Sam Kolton | f7659d71 | 2017-05-23 10:08:55 +0000 | [diff] [blame] | 382 | bool isGFX9(const MCSubtargetInfo &STI); |
| 383 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 384 | /// Is Reg - scalar register |
Sam Kolton | f7659d71 | 2017-05-23 10:08:55 +0000 | [diff] [blame] | 385 | bool isSGPR(unsigned Reg, const MCRegisterInfo* TRI); |
Tom Stellard | 2b65ed3 | 2015-12-21 18:44:27 +0000 | [diff] [blame] | 386 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 387 | /// Is there any intersection between registers |
Dmitry Preobrazhensky | dc4ac82 | 2017-06-21 14:41:34 +0000 | [diff] [blame] | 388 | bool isRegIntersect(unsigned Reg0, unsigned Reg1, const MCRegisterInfo* TRI); |
| 389 | |
Tom Stellard | 2b65ed3 | 2015-12-21 18:44:27 +0000 | [diff] [blame] | 390 | /// If \p Reg is a pseudo reg, return the correct hardware register given |
| 391 | /// \p STI otherwise return \p Reg. |
| 392 | unsigned getMCReg(unsigned Reg, const MCSubtargetInfo &STI); |
| 393 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 394 | /// Convert hardware register \p Reg to a pseudo register |
Dmitry Preobrazhensky | 03880f8 | 2017-03-03 14:31:06 +0000 | [diff] [blame] | 395 | LLVM_READNONE |
| 396 | unsigned mc2PseudoReg(unsigned Reg); |
| 397 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 398 | /// Can this operand also contain immediate values? |
Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 399 | bool isSISrcOperand(const MCInstrDesc &Desc, unsigned OpNo); |
| 400 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 401 | /// Is this floating-point operand? |
Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 402 | bool isSISrcFPOperand(const MCInstrDesc &Desc, unsigned OpNo); |
| 403 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 404 | /// Does this opearnd support only inlinable literals? |
Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 405 | bool isSISrcInlinableOperand(const MCInstrDesc &Desc, unsigned OpNo); |
| 406 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 407 | /// Get the size in bits of a register from the register class \p RC. |
Tom Stellard | b133fbb | 2016-10-27 23:05:31 +0000 | [diff] [blame] | 408 | unsigned getRegBitWidth(unsigned RCID); |
| 409 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 410 | /// Get the size in bits of a register from the register class \p RC. |
Krzysztof Parzyszek | c871550 | 2016-10-19 17:40:36 +0000 | [diff] [blame] | 411 | unsigned getRegBitWidth(const MCRegisterClass &RC); |
| 412 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 413 | /// Get size of register operand |
Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 414 | unsigned getRegOperandSize(const MCRegisterInfo *MRI, const MCInstrDesc &Desc, |
| 415 | unsigned OpNo); |
| 416 | |
Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 417 | LLVM_READNONE |
| 418 | inline unsigned getOperandSize(const MCOperandInfo &OpInfo) { |
| 419 | switch (OpInfo.OperandType) { |
| 420 | case AMDGPU::OPERAND_REG_IMM_INT32: |
| 421 | case AMDGPU::OPERAND_REG_IMM_FP32: |
| 422 | case AMDGPU::OPERAND_REG_INLINE_C_INT32: |
| 423 | case AMDGPU::OPERAND_REG_INLINE_C_FP32: |
| 424 | return 4; |
| 425 | |
| 426 | case AMDGPU::OPERAND_REG_IMM_INT64: |
| 427 | case AMDGPU::OPERAND_REG_IMM_FP64: |
| 428 | case AMDGPU::OPERAND_REG_INLINE_C_INT64: |
| 429 | case AMDGPU::OPERAND_REG_INLINE_C_FP64: |
| 430 | return 8; |
| 431 | |
| 432 | case AMDGPU::OPERAND_REG_IMM_INT16: |
| 433 | case AMDGPU::OPERAND_REG_IMM_FP16: |
| 434 | case AMDGPU::OPERAND_REG_INLINE_C_INT16: |
| 435 | case AMDGPU::OPERAND_REG_INLINE_C_FP16: |
Matt Arsenault | 9be7b0d | 2017-02-27 18:49:11 +0000 | [diff] [blame] | 436 | case AMDGPU::OPERAND_REG_INLINE_C_V2INT16: |
| 437 | case AMDGPU::OPERAND_REG_INLINE_C_V2FP16: |
Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 438 | return 2; |
| 439 | |
| 440 | default: |
| 441 | llvm_unreachable("unhandled operand type"); |
| 442 | } |
| 443 | } |
| 444 | |
| 445 | LLVM_READNONE |
| 446 | inline unsigned getOperandSize(const MCInstrDesc &Desc, unsigned OpNo) { |
| 447 | return getOperandSize(Desc.OpInfo[OpNo]); |
| 448 | } |
| 449 | |
Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 450 | /// Is this literal inlinable |
Matt Arsenault | 26faed3 | 2016-12-05 22:26:17 +0000 | [diff] [blame] | 451 | LLVM_READNONE |
| 452 | bool isInlinableLiteral64(int64_t Literal, bool HasInv2Pi); |
| 453 | |
| 454 | LLVM_READNONE |
| 455 | bool isInlinableLiteral32(int32_t Literal, bool HasInv2Pi); |
| 456 | |
Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 457 | LLVM_READNONE |
| 458 | bool isInlinableLiteral16(int16_t Literal, bool HasInv2Pi); |
Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 459 | |
Matt Arsenault | 9be7b0d | 2017-02-27 18:49:11 +0000 | [diff] [blame] | 460 | LLVM_READNONE |
| 461 | bool isInlinableLiteralV216(int32_t Literal, bool HasInv2Pi); |
| 462 | |
Matt Arsenault | 894e53d | 2017-07-26 20:39:42 +0000 | [diff] [blame] | 463 | bool isArgPassedInSGPR(const Argument *Arg); |
Tom Stellard | 08efb7e | 2017-01-27 18:41:14 +0000 | [diff] [blame] | 464 | |
| 465 | /// \returns The encoding that will be used for \p ByteOffset in the SMRD |
| 466 | /// offset field. |
| 467 | int64_t getSMRDEncodedOffset(const MCSubtargetInfo &ST, int64_t ByteOffset); |
| 468 | |
| 469 | /// \returns true if this offset is small enough to fit in the SMRD |
| 470 | /// offset field. \p ByteOffset should be the offset in bytes and |
| 471 | /// not the encoded offset. |
| 472 | bool isLegalSMRDImmOffset(const MCSubtargetInfo &ST, int64_t ByteOffset); |
| 473 | |
Nicolai Haehnle | bc233f5 | 2018-11-07 21:53:43 +0000 | [diff] [blame] | 474 | // Given Imm, split it into the values to put into the SOffset and ImmOffset |
| 475 | // fields in an MUBUF instruction. Return false if it is not possible (due to a |
| 476 | // hardware bug needing a workaround). |
Tim Renouf | 4f703f5 | 2018-08-21 11:07:10 +0000 | [diff] [blame] | 477 | bool splitMUBUFOffset(uint32_t Imm, uint32_t &SOffset, uint32_t &ImmOffset, |
Nicolai Haehnle | bc233f5 | 2018-11-07 21:53:43 +0000 | [diff] [blame] | 478 | const GCNSubtarget *Subtarget); |
Tim Renouf | 4f703f5 | 2018-08-21 11:07:10 +0000 | [diff] [blame] | 479 | |
Alexander Timofeev | 2e5eece | 2018-03-05 15:12:21 +0000 | [diff] [blame] | 480 | /// \returns true if the intrinsic is divergent |
| 481 | bool isIntrinsicSourceOfDivergence(unsigned IntrID); |
| 482 | |
Tom Stellard | 347ac79 | 2015-06-26 21:15:07 +0000 | [diff] [blame] | 483 | } // end namespace AMDGPU |
| 484 | } // end namespace llvm |
| 485 | |
Eugene Zelenko | d96089b | 2017-02-14 00:33:36 +0000 | [diff] [blame] | 486 | #endif // LLVM_LIB_TARGET_AMDGPU_UTILS_AMDGPUBASEINFO_H |