| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 1 | //===-- SIInstructions.td - SI Instruction Defintions ---------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // This file was originally auto-generated from a GPU register header file and |
| 10 | // all the instruction definitions were originally commented out. Instructions |
| 11 | // that are not yet supported remain commented out. |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | def isSI : Predicate<"Subtarget.device()" |
| 15 | "->getGeneration() == AMDGPUDeviceInfo::HD7XXX">; |
| 16 | |
| 17 | let Predicates = [isSI] in { |
| 18 | |
| 19 | let neverHasSideEffects = 1 in { |
| 20 | def S_MOV_B32 : SOP1_32 <0x00000003, "S_MOV_B32", []>; |
| 21 | def S_MOV_B64 : SOP1_64 <0x00000004, "S_MOV_B64", []>; |
| 22 | def S_CMOV_B32 : SOP1_32 <0x00000005, "S_CMOV_B32", []>; |
| 23 | def S_CMOV_B64 : SOP1_64 <0x00000006, "S_CMOV_B64", []>; |
| 24 | def S_NOT_B32 : SOP1_32 <0x00000007, "S_NOT_B32", []>; |
| 25 | def S_NOT_B64 : SOP1_64 <0x00000008, "S_NOT_B64", []>; |
| 26 | def S_WQM_B32 : SOP1_32 <0x00000009, "S_WQM_B32", []>; |
| 27 | def S_WQM_B64 : SOP1_64 <0x0000000a, "S_WQM_B64", []>; |
| 28 | def S_BREV_B32 : SOP1_32 <0x0000000b, "S_BREV_B32", []>; |
| 29 | def S_BREV_B64 : SOP1_64 <0x0000000c, "S_BREV_B64", []>; |
| 30 | } // End neverHasSideEffects = 1 |
| 31 | ////def S_BCNT0_I32_B32 : SOP1_BCNT0 <0x0000000d, "S_BCNT0_I32_B32", []>; |
| 32 | ////def S_BCNT0_I32_B64 : SOP1_BCNT0 <0x0000000e, "S_BCNT0_I32_B64", []>; |
| 33 | ////def S_BCNT1_I32_B32 : SOP1_BCNT1 <0x0000000f, "S_BCNT1_I32_B32", []>; |
| 34 | ////def S_BCNT1_I32_B64 : SOP1_BCNT1 <0x00000010, "S_BCNT1_I32_B64", []>; |
| 35 | ////def S_FF0_I32_B32 : SOP1_FF0 <0x00000011, "S_FF0_I32_B32", []>; |
| 36 | ////def S_FF0_I32_B64 : SOP1_FF0 <0x00000012, "S_FF0_I32_B64", []>; |
| 37 | ////def S_FF1_I32_B32 : SOP1_FF1 <0x00000013, "S_FF1_I32_B32", []>; |
| 38 | ////def S_FF1_I32_B64 : SOP1_FF1 <0x00000014, "S_FF1_I32_B64", []>; |
| 39 | //def S_FLBIT_I32_B32 : SOP1_32 <0x00000015, "S_FLBIT_I32_B32", []>; |
| 40 | //def S_FLBIT_I32_B64 : SOP1_32 <0x00000016, "S_FLBIT_I32_B64", []>; |
| 41 | def S_FLBIT_I32 : SOP1_32 <0x00000017, "S_FLBIT_I32", []>; |
| 42 | //def S_FLBIT_I32_I64 : SOP1_32 <0x00000018, "S_FLBIT_I32_I64", []>; |
| 43 | //def S_SEXT_I32_I8 : SOP1_32 <0x00000019, "S_SEXT_I32_I8", []>; |
| 44 | //def S_SEXT_I32_I16 : SOP1_32 <0x0000001a, "S_SEXT_I32_I16", []>; |
| 45 | ////def S_BITSET0_B32 : SOP1_BITSET0 <0x0000001b, "S_BITSET0_B32", []>; |
| 46 | ////def S_BITSET0_B64 : SOP1_BITSET0 <0x0000001c, "S_BITSET0_B64", []>; |
| 47 | ////def S_BITSET1_B32 : SOP1_BITSET1 <0x0000001d, "S_BITSET1_B32", []>; |
| 48 | ////def S_BITSET1_B64 : SOP1_BITSET1 <0x0000001e, "S_BITSET1_B64", []>; |
| 49 | def S_GETPC_B64 : SOP1_64 <0x0000001f, "S_GETPC_B64", []>; |
| 50 | def S_SETPC_B64 : SOP1_64 <0x00000020, "S_SETPC_B64", []>; |
| 51 | def S_SWAPPC_B64 : SOP1_64 <0x00000021, "S_SWAPPC_B64", []>; |
| 52 | def S_RFE_B64 : SOP1_64 <0x00000022, "S_RFE_B64", []>; |
| 53 | |
| 54 | let hasSideEffects = 1, Uses = [EXEC], Defs = [EXEC] in { |
| 55 | |
| 56 | def S_AND_SAVEEXEC_B64 : SOP1_64 <0x00000024, "S_AND_SAVEEXEC_B64", []>; |
| 57 | def S_OR_SAVEEXEC_B64 : SOP1_64 <0x00000025, "S_OR_SAVEEXEC_B64", []>; |
| 58 | def S_XOR_SAVEEXEC_B64 : SOP1_64 <0x00000026, "S_XOR_SAVEEXEC_B64", []>; |
| 59 | def S_ANDN2_SAVEEXEC_B64 : SOP1_64 <0x00000027, "S_ANDN2_SAVEEXEC_B64", []>; |
| 60 | def S_ORN2_SAVEEXEC_B64 : SOP1_64 <0x00000028, "S_ORN2_SAVEEXEC_B64", []>; |
| 61 | def S_NAND_SAVEEXEC_B64 : SOP1_64 <0x00000029, "S_NAND_SAVEEXEC_B64", []>; |
| 62 | def S_NOR_SAVEEXEC_B64 : SOP1_64 <0x0000002a, "S_NOR_SAVEEXEC_B64", []>; |
| 63 | def S_XNOR_SAVEEXEC_B64 : SOP1_64 <0x0000002b, "S_XNOR_SAVEEXEC_B64", []>; |
| 64 | |
| 65 | } // End hasSideEffects = 1 |
| 66 | |
| 67 | def S_QUADMASK_B32 : SOP1_32 <0x0000002c, "S_QUADMASK_B32", []>; |
| 68 | def S_QUADMASK_B64 : SOP1_64 <0x0000002d, "S_QUADMASK_B64", []>; |
| 69 | def S_MOVRELS_B32 : SOP1_32 <0x0000002e, "S_MOVRELS_B32", []>; |
| 70 | def S_MOVRELS_B64 : SOP1_64 <0x0000002f, "S_MOVRELS_B64", []>; |
| 71 | def S_MOVRELD_B32 : SOP1_32 <0x00000030, "S_MOVRELD_B32", []>; |
| 72 | def S_MOVRELD_B64 : SOP1_64 <0x00000031, "S_MOVRELD_B64", []>; |
| 73 | //def S_CBRANCH_JOIN : SOP1_ <0x00000032, "S_CBRANCH_JOIN", []>; |
| 74 | def S_MOV_REGRD_B32 : SOP1_32 <0x00000033, "S_MOV_REGRD_B32", []>; |
| 75 | def S_ABS_I32 : SOP1_32 <0x00000034, "S_ABS_I32", []>; |
| 76 | def S_MOV_FED_B32 : SOP1_32 <0x00000035, "S_MOV_FED_B32", []>; |
| 77 | def S_MOVK_I32 : SOPK_32 <0x00000000, "S_MOVK_I32", []>; |
| 78 | def S_CMOVK_I32 : SOPK_32 <0x00000002, "S_CMOVK_I32", []>; |
| 79 | |
| 80 | /* |
| 81 | This instruction is disabled for now until we can figure out how to teach |
| 82 | the instruction selector to correctly use the S_CMP* vs V_CMP* |
| 83 | instructions. |
| 84 | |
| 85 | When this instruction is enabled the code generator sometimes produces this |
| 86 | invalid sequence: |
| 87 | |
| 88 | SCC = S_CMPK_EQ_I32 SGPR0, imm |
| 89 | VCC = COPY SCC |
| 90 | VGPR0 = V_CNDMASK VCC, VGPR0, VGPR1 |
| 91 | |
| 92 | def S_CMPK_EQ_I32 : SOPK < |
| 93 | 0x00000003, (outs SCCReg:$dst), (ins SReg_32:$src0, i32imm:$src1), |
| 94 | "S_CMPK_EQ_I32", |
| 95 | [(set SCCReg:$dst, (setcc SReg_32:$src0, imm:$src1, SETEQ))] |
| 96 | >; |
| 97 | */ |
| 98 | |
| 99 | def S_CMPK_LG_I32 : SOPK_32 <0x00000004, "S_CMPK_LG_I32", []>; |
| 100 | def S_CMPK_GT_I32 : SOPK_32 <0x00000005, "S_CMPK_GT_I32", []>; |
| 101 | def S_CMPK_GE_I32 : SOPK_32 <0x00000006, "S_CMPK_GE_I32", []>; |
| 102 | def S_CMPK_LT_I32 : SOPK_32 <0x00000007, "S_CMPK_LT_I32", []>; |
| 103 | def S_CMPK_LE_I32 : SOPK_32 <0x00000008, "S_CMPK_LE_I32", []>; |
| 104 | def S_CMPK_EQ_U32 : SOPK_32 <0x00000009, "S_CMPK_EQ_U32", []>; |
| 105 | def S_CMPK_LG_U32 : SOPK_32 <0x0000000a, "S_CMPK_LG_U32", []>; |
| 106 | def S_CMPK_GT_U32 : SOPK_32 <0x0000000b, "S_CMPK_GT_U32", []>; |
| 107 | def S_CMPK_GE_U32 : SOPK_32 <0x0000000c, "S_CMPK_GE_U32", []>; |
| 108 | def S_CMPK_LT_U32 : SOPK_32 <0x0000000d, "S_CMPK_LT_U32", []>; |
| 109 | def S_CMPK_LE_U32 : SOPK_32 <0x0000000e, "S_CMPK_LE_U32", []>; |
| 110 | def S_ADDK_I32 : SOPK_32 <0x0000000f, "S_ADDK_I32", []>; |
| 111 | def S_MULK_I32 : SOPK_32 <0x00000010, "S_MULK_I32", []>; |
| 112 | //def S_CBRANCH_I_FORK : SOPK_ <0x00000011, "S_CBRANCH_I_FORK", []>; |
| 113 | def S_GETREG_B32 : SOPK_32 <0x00000012, "S_GETREG_B32", []>; |
| 114 | def S_SETREG_B32 : SOPK_32 <0x00000013, "S_SETREG_B32", []>; |
| 115 | def S_GETREG_REGRD_B32 : SOPK_32 <0x00000014, "S_GETREG_REGRD_B32", []>; |
| 116 | //def S_SETREG_IMM32_B32 : SOPK_32 <0x00000015, "S_SETREG_IMM32_B32", []>; |
| 117 | //def EXP : EXP_ <0x00000000, "EXP", []>; |
| 118 | |
| 119 | defm V_CMP_F_F32 : VOPC_32 <0x00000000, "V_CMP_F_F32", []>; |
| 120 | defm V_CMP_LT_F32 : VOPC_32 <0x00000001, "V_CMP_LT_F32", []>; |
| 121 | def : Pat < |
| 122 | (i1 (setcc (f32 AllReg_32:$src0), VReg_32:$src1, COND_LT)), |
| 123 | (V_CMP_LT_F32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 124 | >; |
| 125 | defm V_CMP_EQ_F32 : VOPC_32 <0x00000002, "V_CMP_EQ_F32", []>; |
| 126 | def : Pat < |
| 127 | (i1 (setcc (f32 AllReg_32:$src0), VReg_32:$src1, COND_EQ)), |
| 128 | (V_CMP_EQ_F32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 129 | >; |
| 130 | defm V_CMP_LE_F32 : VOPC_32 <0x00000003, "V_CMP_LE_F32", []>; |
| 131 | def : Pat < |
| 132 | (i1 (setcc (f32 AllReg_32:$src0), VReg_32:$src1, COND_LE)), |
| 133 | (V_CMP_LE_F32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 134 | >; |
| 135 | defm V_CMP_GT_F32 : VOPC_32 <0x00000004, "V_CMP_GT_F32", []>; |
| 136 | def : Pat < |
| 137 | (i1 (setcc (f32 AllReg_32:$src0), VReg_32:$src1, COND_GT)), |
| 138 | (V_CMP_GT_F32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 139 | >; |
| 140 | defm V_CMP_LG_F32 : VOPC_32 <0x00000005, "V_CMP_LG_F32", []>; |
| 141 | def : Pat < |
| 142 | (i1 (setcc (f32 AllReg_32:$src0), VReg_32:$src1, COND_NE)), |
| 143 | (V_CMP_LG_F32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 144 | >; |
| 145 | defm V_CMP_GE_F32 : VOPC_32 <0x00000006, "V_CMP_GE_F32", []>; |
| 146 | def : Pat < |
| 147 | (i1 (setcc (f32 AllReg_32:$src0), VReg_32:$src1, COND_GE)), |
| 148 | (V_CMP_GE_F32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 149 | >; |
| 150 | defm V_CMP_O_F32 : VOPC_32 <0x00000007, "V_CMP_O_F32", []>; |
| 151 | defm V_CMP_U_F32 : VOPC_32 <0x00000008, "V_CMP_U_F32", []>; |
| 152 | defm V_CMP_NGE_F32 : VOPC_32 <0x00000009, "V_CMP_NGE_F32", []>; |
| 153 | defm V_CMP_NLG_F32 : VOPC_32 <0x0000000a, "V_CMP_NLG_F32", []>; |
| 154 | defm V_CMP_NGT_F32 : VOPC_32 <0x0000000b, "V_CMP_NGT_F32", []>; |
| 155 | defm V_CMP_NLE_F32 : VOPC_32 <0x0000000c, "V_CMP_NLE_F32", []>; |
| 156 | defm V_CMP_NEQ_F32 : VOPC_32 <0x0000000d, "V_CMP_NEQ_F32", []>; |
| 157 | def : Pat < |
| 158 | (i1 (setcc (f32 AllReg_32:$src0), VReg_32:$src1, COND_NE)), |
| 159 | (V_CMP_NEQ_F32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 160 | >; |
| 161 | defm V_CMP_NLT_F32 : VOPC_32 <0x0000000e, "V_CMP_NLT_F32", []>; |
| 162 | defm V_CMP_TRU_F32 : VOPC_32 <0x0000000f, "V_CMP_TRU_F32", []>; |
| 163 | |
| 164 | //Side effect is writing to EXEC |
| 165 | let hasSideEffects = 1 in { |
| 166 | |
| 167 | defm V_CMPX_F_F32 : VOPC_32 <0x00000010, "V_CMPX_F_F32", []>; |
| 168 | defm V_CMPX_LT_F32 : VOPC_32 <0x00000011, "V_CMPX_LT_F32", []>; |
| 169 | defm V_CMPX_EQ_F32 : VOPC_32 <0x00000012, "V_CMPX_EQ_F32", []>; |
| 170 | defm V_CMPX_LE_F32 : VOPC_32 <0x00000013, "V_CMPX_LE_F32", []>; |
| 171 | defm V_CMPX_GT_F32 : VOPC_32 <0x00000014, "V_CMPX_GT_F32", []>; |
| 172 | defm V_CMPX_LG_F32 : VOPC_32 <0x00000015, "V_CMPX_LG_F32", []>; |
| 173 | defm V_CMPX_GE_F32 : VOPC_32 <0x00000016, "V_CMPX_GE_F32", []>; |
| 174 | defm V_CMPX_O_F32 : VOPC_32 <0x00000017, "V_CMPX_O_F32", []>; |
| 175 | defm V_CMPX_U_F32 : VOPC_32 <0x00000018, "V_CMPX_U_F32", []>; |
| 176 | defm V_CMPX_NGE_F32 : VOPC_32 <0x00000019, "V_CMPX_NGE_F32", []>; |
| 177 | defm V_CMPX_NLG_F32 : VOPC_32 <0x0000001a, "V_CMPX_NLG_F32", []>; |
| 178 | defm V_CMPX_NGT_F32 : VOPC_32 <0x0000001b, "V_CMPX_NGT_F32", []>; |
| 179 | defm V_CMPX_NLE_F32 : VOPC_32 <0x0000001c, "V_CMPX_NLE_F32", []>; |
| 180 | defm V_CMPX_NEQ_F32 : VOPC_32 <0x0000001d, "V_CMPX_NEQ_F32", []>; |
| 181 | defm V_CMPX_NLT_F32 : VOPC_32 <0x0000001e, "V_CMPX_NLT_F32", []>; |
| 182 | defm V_CMPX_TRU_F32 : VOPC_32 <0x0000001f, "V_CMPX_TRU_F32", []>; |
| 183 | |
| 184 | } // End hasSideEffects = 1 |
| 185 | |
| 186 | defm V_CMP_F_F64 : VOPC_64 <0x00000020, "V_CMP_F_F64", []>; |
| 187 | defm V_CMP_LT_F64 : VOPC_64 <0x00000021, "V_CMP_LT_F64", []>; |
| 188 | defm V_CMP_EQ_F64 : VOPC_64 <0x00000022, "V_CMP_EQ_F64", []>; |
| 189 | defm V_CMP_LE_F64 : VOPC_64 <0x00000023, "V_CMP_LE_F64", []>; |
| 190 | defm V_CMP_GT_F64 : VOPC_64 <0x00000024, "V_CMP_GT_F64", []>; |
| 191 | defm V_CMP_LG_F64 : VOPC_64 <0x00000025, "V_CMP_LG_F64", []>; |
| 192 | defm V_CMP_GE_F64 : VOPC_64 <0x00000026, "V_CMP_GE_F64", []>; |
| 193 | defm V_CMP_O_F64 : VOPC_64 <0x00000027, "V_CMP_O_F64", []>; |
| 194 | defm V_CMP_U_F64 : VOPC_64 <0x00000028, "V_CMP_U_F64", []>; |
| 195 | defm V_CMP_NGE_F64 : VOPC_64 <0x00000029, "V_CMP_NGE_F64", []>; |
| 196 | defm V_CMP_NLG_F64 : VOPC_64 <0x0000002a, "V_CMP_NLG_F64", []>; |
| 197 | defm V_CMP_NGT_F64 : VOPC_64 <0x0000002b, "V_CMP_NGT_F64", []>; |
| 198 | defm V_CMP_NLE_F64 : VOPC_64 <0x0000002c, "V_CMP_NLE_F64", []>; |
| 199 | defm V_CMP_NEQ_F64 : VOPC_64 <0x0000002d, "V_CMP_NEQ_F64", []>; |
| 200 | defm V_CMP_NLT_F64 : VOPC_64 <0x0000002e, "V_CMP_NLT_F64", []>; |
| 201 | defm V_CMP_TRU_F64 : VOPC_64 <0x0000002f, "V_CMP_TRU_F64", []>; |
| 202 | |
| 203 | //Side effect is writing to EXEC |
| 204 | let hasSideEffects = 1 in { |
| 205 | |
| 206 | defm V_CMPX_F_F64 : VOPC_64 <0x00000030, "V_CMPX_F_F64", []>; |
| 207 | defm V_CMPX_LT_F64 : VOPC_64 <0x00000031, "V_CMPX_LT_F64", []>; |
| 208 | defm V_CMPX_EQ_F64 : VOPC_64 <0x00000032, "V_CMPX_EQ_F64", []>; |
| 209 | defm V_CMPX_LE_F64 : VOPC_64 <0x00000033, "V_CMPX_LE_F64", []>; |
| 210 | defm V_CMPX_GT_F64 : VOPC_64 <0x00000034, "V_CMPX_GT_F64", []>; |
| 211 | defm V_CMPX_LG_F64 : VOPC_64 <0x00000035, "V_CMPX_LG_F64", []>; |
| 212 | defm V_CMPX_GE_F64 : VOPC_64 <0x00000036, "V_CMPX_GE_F64", []>; |
| 213 | defm V_CMPX_O_F64 : VOPC_64 <0x00000037, "V_CMPX_O_F64", []>; |
| 214 | defm V_CMPX_U_F64 : VOPC_64 <0x00000038, "V_CMPX_U_F64", []>; |
| 215 | defm V_CMPX_NGE_F64 : VOPC_64 <0x00000039, "V_CMPX_NGE_F64", []>; |
| 216 | defm V_CMPX_NLG_F64 : VOPC_64 <0x0000003a, "V_CMPX_NLG_F64", []>; |
| 217 | defm V_CMPX_NGT_F64 : VOPC_64 <0x0000003b, "V_CMPX_NGT_F64", []>; |
| 218 | defm V_CMPX_NLE_F64 : VOPC_64 <0x0000003c, "V_CMPX_NLE_F64", []>; |
| 219 | defm V_CMPX_NEQ_F64 : VOPC_64 <0x0000003d, "V_CMPX_NEQ_F64", []>; |
| 220 | defm V_CMPX_NLT_F64 : VOPC_64 <0x0000003e, "V_CMPX_NLT_F64", []>; |
| 221 | defm V_CMPX_TRU_F64 : VOPC_64 <0x0000003f, "V_CMPX_TRU_F64", []>; |
| 222 | |
| 223 | } // End hasSideEffects = 1 |
| 224 | |
| 225 | defm V_CMPS_F_F32 : VOPC_32 <0x00000040, "V_CMPS_F_F32", []>; |
| 226 | defm V_CMPS_LT_F32 : VOPC_32 <0x00000041, "V_CMPS_LT_F32", []>; |
| 227 | defm V_CMPS_EQ_F32 : VOPC_32 <0x00000042, "V_CMPS_EQ_F32", []>; |
| 228 | defm V_CMPS_LE_F32 : VOPC_32 <0x00000043, "V_CMPS_LE_F32", []>; |
| 229 | defm V_CMPS_GT_F32 : VOPC_32 <0x00000044, "V_CMPS_GT_F32", []>; |
| 230 | defm V_CMPS_LG_F32 : VOPC_32 <0x00000045, "V_CMPS_LG_F32", []>; |
| 231 | defm V_CMPS_GE_F32 : VOPC_32 <0x00000046, "V_CMPS_GE_F32", []>; |
| 232 | defm V_CMPS_O_F32 : VOPC_32 <0x00000047, "V_CMPS_O_F32", []>; |
| 233 | defm V_CMPS_U_F32 : VOPC_32 <0x00000048, "V_CMPS_U_F32", []>; |
| 234 | defm V_CMPS_NGE_F32 : VOPC_32 <0x00000049, "V_CMPS_NGE_F32", []>; |
| 235 | defm V_CMPS_NLG_F32 : VOPC_32 <0x0000004a, "V_CMPS_NLG_F32", []>; |
| 236 | defm V_CMPS_NGT_F32 : VOPC_32 <0x0000004b, "V_CMPS_NGT_F32", []>; |
| 237 | defm V_CMPS_NLE_F32 : VOPC_32 <0x0000004c, "V_CMPS_NLE_F32", []>; |
| 238 | defm V_CMPS_NEQ_F32 : VOPC_32 <0x0000004d, "V_CMPS_NEQ_F32", []>; |
| 239 | defm V_CMPS_NLT_F32 : VOPC_32 <0x0000004e, "V_CMPS_NLT_F32", []>; |
| 240 | defm V_CMPS_TRU_F32 : VOPC_32 <0x0000004f, "V_CMPS_TRU_F32", []>; |
| 241 | defm V_CMPSX_F_F32 : VOPC_32 <0x00000050, "V_CMPSX_F_F32", []>; |
| 242 | defm V_CMPSX_LT_F32 : VOPC_32 <0x00000051, "V_CMPSX_LT_F32", []>; |
| 243 | defm V_CMPSX_EQ_F32 : VOPC_32 <0x00000052, "V_CMPSX_EQ_F32", []>; |
| 244 | defm V_CMPSX_LE_F32 : VOPC_32 <0x00000053, "V_CMPSX_LE_F32", []>; |
| 245 | defm V_CMPSX_GT_F32 : VOPC_32 <0x00000054, "V_CMPSX_GT_F32", []>; |
| 246 | defm V_CMPSX_LG_F32 : VOPC_32 <0x00000055, "V_CMPSX_LG_F32", []>; |
| 247 | defm V_CMPSX_GE_F32 : VOPC_32 <0x00000056, "V_CMPSX_GE_F32", []>; |
| 248 | defm V_CMPSX_O_F32 : VOPC_32 <0x00000057, "V_CMPSX_O_F32", []>; |
| 249 | defm V_CMPSX_U_F32 : VOPC_32 <0x00000058, "V_CMPSX_U_F32", []>; |
| 250 | defm V_CMPSX_NGE_F32 : VOPC_32 <0x00000059, "V_CMPSX_NGE_F32", []>; |
| 251 | defm V_CMPSX_NLG_F32 : VOPC_32 <0x0000005a, "V_CMPSX_NLG_F32", []>; |
| 252 | defm V_CMPSX_NGT_F32 : VOPC_32 <0x0000005b, "V_CMPSX_NGT_F32", []>; |
| 253 | defm V_CMPSX_NLE_F32 : VOPC_32 <0x0000005c, "V_CMPSX_NLE_F32", []>; |
| 254 | defm V_CMPSX_NEQ_F32 : VOPC_32 <0x0000005d, "V_CMPSX_NEQ_F32", []>; |
| 255 | defm V_CMPSX_NLT_F32 : VOPC_32 <0x0000005e, "V_CMPSX_NLT_F32", []>; |
| 256 | defm V_CMPSX_TRU_F32 : VOPC_32 <0x0000005f, "V_CMPSX_TRU_F32", []>; |
| 257 | defm V_CMPS_F_F64 : VOPC_64 <0x00000060, "V_CMPS_F_F64", []>; |
| 258 | defm V_CMPS_LT_F64 : VOPC_64 <0x00000061, "V_CMPS_LT_F64", []>; |
| 259 | defm V_CMPS_EQ_F64 : VOPC_64 <0x00000062, "V_CMPS_EQ_F64", []>; |
| 260 | defm V_CMPS_LE_F64 : VOPC_64 <0x00000063, "V_CMPS_LE_F64", []>; |
| 261 | defm V_CMPS_GT_F64 : VOPC_64 <0x00000064, "V_CMPS_GT_F64", []>; |
| 262 | defm V_CMPS_LG_F64 : VOPC_64 <0x00000065, "V_CMPS_LG_F64", []>; |
| 263 | defm V_CMPS_GE_F64 : VOPC_64 <0x00000066, "V_CMPS_GE_F64", []>; |
| 264 | defm V_CMPS_O_F64 : VOPC_64 <0x00000067, "V_CMPS_O_F64", []>; |
| 265 | defm V_CMPS_U_F64 : VOPC_64 <0x00000068, "V_CMPS_U_F64", []>; |
| 266 | defm V_CMPS_NGE_F64 : VOPC_64 <0x00000069, "V_CMPS_NGE_F64", []>; |
| 267 | defm V_CMPS_NLG_F64 : VOPC_64 <0x0000006a, "V_CMPS_NLG_F64", []>; |
| 268 | defm V_CMPS_NGT_F64 : VOPC_64 <0x0000006b, "V_CMPS_NGT_F64", []>; |
| 269 | defm V_CMPS_NLE_F64 : VOPC_64 <0x0000006c, "V_CMPS_NLE_F64", []>; |
| 270 | defm V_CMPS_NEQ_F64 : VOPC_64 <0x0000006d, "V_CMPS_NEQ_F64", []>; |
| 271 | defm V_CMPS_NLT_F64 : VOPC_64 <0x0000006e, "V_CMPS_NLT_F64", []>; |
| 272 | defm V_CMPS_TRU_F64 : VOPC_64 <0x0000006f, "V_CMPS_TRU_F64", []>; |
| 273 | defm V_CMPSX_F_F64 : VOPC_64 <0x00000070, "V_CMPSX_F_F64", []>; |
| 274 | defm V_CMPSX_LT_F64 : VOPC_64 <0x00000071, "V_CMPSX_LT_F64", []>; |
| 275 | defm V_CMPSX_EQ_F64 : VOPC_64 <0x00000072, "V_CMPSX_EQ_F64", []>; |
| 276 | defm V_CMPSX_LE_F64 : VOPC_64 <0x00000073, "V_CMPSX_LE_F64", []>; |
| 277 | defm V_CMPSX_GT_F64 : VOPC_64 <0x00000074, "V_CMPSX_GT_F64", []>; |
| 278 | defm V_CMPSX_LG_F64 : VOPC_64 <0x00000075, "V_CMPSX_LG_F64", []>; |
| 279 | defm V_CMPSX_GE_F64 : VOPC_64 <0x00000076, "V_CMPSX_GE_F64", []>; |
| 280 | defm V_CMPSX_O_F64 : VOPC_64 <0x00000077, "V_CMPSX_O_F64", []>; |
| 281 | defm V_CMPSX_U_F64 : VOPC_64 <0x00000078, "V_CMPSX_U_F64", []>; |
| 282 | defm V_CMPSX_NGE_F64 : VOPC_64 <0x00000079, "V_CMPSX_NGE_F64", []>; |
| 283 | defm V_CMPSX_NLG_F64 : VOPC_64 <0x0000007a, "V_CMPSX_NLG_F64", []>; |
| 284 | defm V_CMPSX_NGT_F64 : VOPC_64 <0x0000007b, "V_CMPSX_NGT_F64", []>; |
| 285 | defm V_CMPSX_NLE_F64 : VOPC_64 <0x0000007c, "V_CMPSX_NLE_F64", []>; |
| 286 | defm V_CMPSX_NEQ_F64 : VOPC_64 <0x0000007d, "V_CMPSX_NEQ_F64", []>; |
| 287 | defm V_CMPSX_NLT_F64 : VOPC_64 <0x0000007e, "V_CMPSX_NLT_F64", []>; |
| 288 | defm V_CMPSX_TRU_F64 : VOPC_64 <0x0000007f, "V_CMPSX_TRU_F64", []>; |
| 289 | defm V_CMP_F_I32 : VOPC_32 <0x00000080, "V_CMP_F_I32", []>; |
| 290 | defm V_CMP_LT_I32 : VOPC_32 <0x00000081, "V_CMP_LT_I32", []>; |
| 291 | def : Pat < |
| 292 | (i1 (setcc (i32 AllReg_32:$src0), VReg_32:$src1, COND_LT)), |
| 293 | (V_CMP_LT_I32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 294 | >; |
| 295 | defm V_CMP_EQ_I32 : VOPC_32 <0x00000082, "V_CMP_EQ_I32", []>; |
| 296 | def : Pat < |
| 297 | (i1 (setcc (i32 AllReg_32:$src0), VReg_32:$src1, COND_EQ)), |
| 298 | (V_CMP_EQ_I32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 299 | >; |
| 300 | defm V_CMP_LE_I32 : VOPC_32 <0x00000083, "V_CMP_LE_I32", []>; |
| 301 | def : Pat < |
| 302 | (i1 (setcc (i32 AllReg_32:$src0), VReg_32:$src1, COND_LE)), |
| 303 | (V_CMP_LE_I32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 304 | >; |
| 305 | defm V_CMP_GT_I32 : VOPC_32 <0x00000084, "V_CMP_GT_I32", []>; |
| 306 | def : Pat < |
| 307 | (i1 (setcc (i32 AllReg_32:$src0), VReg_32:$src1, COND_GT)), |
| 308 | (V_CMP_GT_I32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 309 | >; |
| 310 | defm V_CMP_NE_I32 : VOPC_32 <0x00000085, "V_CMP_NE_I32", []>; |
| 311 | def : Pat < |
| 312 | (i1 (setcc (i32 AllReg_32:$src0), VReg_32:$src1, COND_NE)), |
| 313 | (V_CMP_NE_I32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 314 | >; |
| 315 | defm V_CMP_GE_I32 : VOPC_32 <0x00000086, "V_CMP_GE_I32", []>; |
| 316 | def : Pat < |
| 317 | (i1 (setcc (i32 AllReg_32:$src0), VReg_32:$src1, COND_GE)), |
| 318 | (V_CMP_GE_I32_e64 AllReg_32:$src0, VReg_32:$src1) |
| 319 | >; |
| 320 | defm V_CMP_T_I32 : VOPC_32 <0x00000087, "V_CMP_T_I32", []>; |
| 321 | |
| 322 | let hasSideEffects = 1 in { |
| 323 | |
| 324 | defm V_CMPX_F_I32 : VOPC_32 <0x00000090, "V_CMPX_F_I32", []>; |
| 325 | defm V_CMPX_LT_I32 : VOPC_32 <0x00000091, "V_CMPX_LT_I32", []>; |
| 326 | defm V_CMPX_EQ_I32 : VOPC_32 <0x00000092, "V_CMPX_EQ_I32", []>; |
| 327 | defm V_CMPX_LE_I32 : VOPC_32 <0x00000093, "V_CMPX_LE_I32", []>; |
| 328 | defm V_CMPX_GT_I32 : VOPC_32 <0x00000094, "V_CMPX_GT_I32", []>; |
| 329 | defm V_CMPX_NE_I32 : VOPC_32 <0x00000095, "V_CMPX_NE_I32", []>; |
| 330 | defm V_CMPX_GE_I32 : VOPC_32 <0x00000096, "V_CMPX_GE_I32", []>; |
| 331 | defm V_CMPX_T_I32 : VOPC_32 <0x00000097, "V_CMPX_T_I32", []>; |
| 332 | |
| 333 | } // End hasSideEffects |
| 334 | |
| 335 | defm V_CMP_F_I64 : VOPC_64 <0x000000a0, "V_CMP_F_I64", []>; |
| 336 | defm V_CMP_LT_I64 : VOPC_64 <0x000000a1, "V_CMP_LT_I64", []>; |
| 337 | defm V_CMP_EQ_I64 : VOPC_64 <0x000000a2, "V_CMP_EQ_I64", []>; |
| 338 | defm V_CMP_LE_I64 : VOPC_64 <0x000000a3, "V_CMP_LE_I64", []>; |
| 339 | defm V_CMP_GT_I64 : VOPC_64 <0x000000a4, "V_CMP_GT_I64", []>; |
| 340 | defm V_CMP_NE_I64 : VOPC_64 <0x000000a5, "V_CMP_NE_I64", []>; |
| 341 | defm V_CMP_GE_I64 : VOPC_64 <0x000000a6, "V_CMP_GE_I64", []>; |
| 342 | defm V_CMP_T_I64 : VOPC_64 <0x000000a7, "V_CMP_T_I64", []>; |
| 343 | |
| 344 | let hasSideEffects = 1 in { |
| 345 | |
| 346 | defm V_CMPX_F_I64 : VOPC_64 <0x000000b0, "V_CMPX_F_I64", []>; |
| 347 | defm V_CMPX_LT_I64 : VOPC_64 <0x000000b1, "V_CMPX_LT_I64", []>; |
| 348 | defm V_CMPX_EQ_I64 : VOPC_64 <0x000000b2, "V_CMPX_EQ_I64", []>; |
| 349 | defm V_CMPX_LE_I64 : VOPC_64 <0x000000b3, "V_CMPX_LE_I64", []>; |
| 350 | defm V_CMPX_GT_I64 : VOPC_64 <0x000000b4, "V_CMPX_GT_I64", []>; |
| 351 | defm V_CMPX_NE_I64 : VOPC_64 <0x000000b5, "V_CMPX_NE_I64", []>; |
| 352 | defm V_CMPX_GE_I64 : VOPC_64 <0x000000b6, "V_CMPX_GE_I64", []>; |
| 353 | defm V_CMPX_T_I64 : VOPC_64 <0x000000b7, "V_CMPX_T_I64", []>; |
| 354 | |
| 355 | } // End hasSideEffects |
| 356 | |
| 357 | defm V_CMP_F_U32 : VOPC_32 <0x000000c0, "V_CMP_F_U32", []>; |
| 358 | defm V_CMP_LT_U32 : VOPC_32 <0x000000c1, "V_CMP_LT_U32", []>; |
| 359 | defm V_CMP_EQ_U32 : VOPC_32 <0x000000c2, "V_CMP_EQ_U32", []>; |
| 360 | defm V_CMP_LE_U32 : VOPC_32 <0x000000c3, "V_CMP_LE_U32", []>; |
| 361 | defm V_CMP_GT_U32 : VOPC_32 <0x000000c4, "V_CMP_GT_U32", []>; |
| 362 | defm V_CMP_NE_U32 : VOPC_32 <0x000000c5, "V_CMP_NE_U32", []>; |
| 363 | defm V_CMP_GE_U32 : VOPC_32 <0x000000c6, "V_CMP_GE_U32", []>; |
| 364 | defm V_CMP_T_U32 : VOPC_32 <0x000000c7, "V_CMP_T_U32", []>; |
| 365 | |
| 366 | let hasSideEffects = 1 in { |
| 367 | |
| 368 | defm V_CMPX_F_U32 : VOPC_32 <0x000000d0, "V_CMPX_F_U32", []>; |
| 369 | defm V_CMPX_LT_U32 : VOPC_32 <0x000000d1, "V_CMPX_LT_U32", []>; |
| 370 | defm V_CMPX_EQ_U32 : VOPC_32 <0x000000d2, "V_CMPX_EQ_U32", []>; |
| 371 | defm V_CMPX_LE_U32 : VOPC_32 <0x000000d3, "V_CMPX_LE_U32", []>; |
| 372 | defm V_CMPX_GT_U32 : VOPC_32 <0x000000d4, "V_CMPX_GT_U32", []>; |
| 373 | defm V_CMPX_NE_U32 : VOPC_32 <0x000000d5, "V_CMPX_NE_U32", []>; |
| 374 | defm V_CMPX_GE_U32 : VOPC_32 <0x000000d6, "V_CMPX_GE_U32", []>; |
| 375 | defm V_CMPX_T_U32 : VOPC_32 <0x000000d7, "V_CMPX_T_U32", []>; |
| 376 | |
| 377 | } // End hasSideEffects |
| 378 | |
| 379 | defm V_CMP_F_U64 : VOPC_64 <0x000000e0, "V_CMP_F_U64", []>; |
| 380 | defm V_CMP_LT_U64 : VOPC_64 <0x000000e1, "V_CMP_LT_U64", []>; |
| 381 | defm V_CMP_EQ_U64 : VOPC_64 <0x000000e2, "V_CMP_EQ_U64", []>; |
| 382 | defm V_CMP_LE_U64 : VOPC_64 <0x000000e3, "V_CMP_LE_U64", []>; |
| 383 | defm V_CMP_GT_U64 : VOPC_64 <0x000000e4, "V_CMP_GT_U64", []>; |
| 384 | defm V_CMP_NE_U64 : VOPC_64 <0x000000e5, "V_CMP_NE_U64", []>; |
| 385 | defm V_CMP_GE_U64 : VOPC_64 <0x000000e6, "V_CMP_GE_U64", []>; |
| 386 | defm V_CMP_T_U64 : VOPC_64 <0x000000e7, "V_CMP_T_U64", []>; |
| 387 | defm V_CMPX_F_U64 : VOPC_64 <0x000000f0, "V_CMPX_F_U64", []>; |
| 388 | defm V_CMPX_LT_U64 : VOPC_64 <0x000000f1, "V_CMPX_LT_U64", []>; |
| 389 | defm V_CMPX_EQ_U64 : VOPC_64 <0x000000f2, "V_CMPX_EQ_U64", []>; |
| 390 | defm V_CMPX_LE_U64 : VOPC_64 <0x000000f3, "V_CMPX_LE_U64", []>; |
| 391 | defm V_CMPX_GT_U64 : VOPC_64 <0x000000f4, "V_CMPX_GT_U64", []>; |
| 392 | defm V_CMPX_NE_U64 : VOPC_64 <0x000000f5, "V_CMPX_NE_U64", []>; |
| 393 | defm V_CMPX_GE_U64 : VOPC_64 <0x000000f6, "V_CMPX_GE_U64", []>; |
| 394 | defm V_CMPX_T_U64 : VOPC_64 <0x000000f7, "V_CMPX_T_U64", []>; |
| 395 | defm V_CMP_CLASS_F32 : VOPC_32 <0x00000088, "V_CMP_CLASS_F32", []>; |
| 396 | defm V_CMPX_CLASS_F32 : VOPC_32 <0x00000098, "V_CMPX_CLASS_F32", []>; |
| 397 | defm V_CMP_CLASS_F64 : VOPC_64 <0x000000a8, "V_CMP_CLASS_F64", []>; |
| 398 | defm V_CMPX_CLASS_F64 : VOPC_64 <0x000000b8, "V_CMPX_CLASS_F64", []>; |
| 399 | //def BUFFER_LOAD_FORMAT_X : MUBUF_ <0x00000000, "BUFFER_LOAD_FORMAT_X", []>; |
| 400 | //def BUFFER_LOAD_FORMAT_XY : MUBUF_ <0x00000001, "BUFFER_LOAD_FORMAT_XY", []>; |
| 401 | //def BUFFER_LOAD_FORMAT_XYZ : MUBUF_ <0x00000002, "BUFFER_LOAD_FORMAT_XYZ", []>; |
| 402 | def BUFFER_LOAD_FORMAT_XYZW : MUBUF_Load_Helper <0x00000003, "BUFFER_LOAD_FORMAT_XYZW", VReg_128>; |
| 403 | //def BUFFER_STORE_FORMAT_X : MUBUF_ <0x00000004, "BUFFER_STORE_FORMAT_X", []>; |
| 404 | //def BUFFER_STORE_FORMAT_XY : MUBUF_ <0x00000005, "BUFFER_STORE_FORMAT_XY", []>; |
| 405 | //def BUFFER_STORE_FORMAT_XYZ : MUBUF_ <0x00000006, "BUFFER_STORE_FORMAT_XYZ", []>; |
| 406 | //def BUFFER_STORE_FORMAT_XYZW : MUBUF_ <0x00000007, "BUFFER_STORE_FORMAT_XYZW", []>; |
| 407 | //def BUFFER_LOAD_UBYTE : MUBUF_ <0x00000008, "BUFFER_LOAD_UBYTE", []>; |
| 408 | //def BUFFER_LOAD_SBYTE : MUBUF_ <0x00000009, "BUFFER_LOAD_SBYTE", []>; |
| 409 | //def BUFFER_LOAD_USHORT : MUBUF_ <0x0000000a, "BUFFER_LOAD_USHORT", []>; |
| 410 | //def BUFFER_LOAD_SSHORT : MUBUF_ <0x0000000b, "BUFFER_LOAD_SSHORT", []>; |
| 411 | //def BUFFER_LOAD_DWORD : MUBUF_ <0x0000000c, "BUFFER_LOAD_DWORD", []>; |
| 412 | //def BUFFER_LOAD_DWORDX2 : MUBUF_DWORDX2 <0x0000000d, "BUFFER_LOAD_DWORDX2", []>; |
| 413 | //def BUFFER_LOAD_DWORDX4 : MUBUF_DWORDX4 <0x0000000e, "BUFFER_LOAD_DWORDX4", []>; |
| 414 | //def BUFFER_STORE_BYTE : MUBUF_ <0x00000018, "BUFFER_STORE_BYTE", []>; |
| 415 | //def BUFFER_STORE_SHORT : MUBUF_ <0x0000001a, "BUFFER_STORE_SHORT", []>; |
| 416 | //def BUFFER_STORE_DWORD : MUBUF_ <0x0000001c, "BUFFER_STORE_DWORD", []>; |
| 417 | //def BUFFER_STORE_DWORDX2 : MUBUF_DWORDX2 <0x0000001d, "BUFFER_STORE_DWORDX2", []>; |
| 418 | //def BUFFER_STORE_DWORDX4 : MUBUF_DWORDX4 <0x0000001e, "BUFFER_STORE_DWORDX4", []>; |
| 419 | //def BUFFER_ATOMIC_SWAP : MUBUF_ <0x00000030, "BUFFER_ATOMIC_SWAP", []>; |
| 420 | //def BUFFER_ATOMIC_CMPSWAP : MUBUF_ <0x00000031, "BUFFER_ATOMIC_CMPSWAP", []>; |
| 421 | //def BUFFER_ATOMIC_ADD : MUBUF_ <0x00000032, "BUFFER_ATOMIC_ADD", []>; |
| 422 | //def BUFFER_ATOMIC_SUB : MUBUF_ <0x00000033, "BUFFER_ATOMIC_SUB", []>; |
| 423 | //def BUFFER_ATOMIC_RSUB : MUBUF_ <0x00000034, "BUFFER_ATOMIC_RSUB", []>; |
| 424 | //def BUFFER_ATOMIC_SMIN : MUBUF_ <0x00000035, "BUFFER_ATOMIC_SMIN", []>; |
| 425 | //def BUFFER_ATOMIC_UMIN : MUBUF_ <0x00000036, "BUFFER_ATOMIC_UMIN", []>; |
| 426 | //def BUFFER_ATOMIC_SMAX : MUBUF_ <0x00000037, "BUFFER_ATOMIC_SMAX", []>; |
| 427 | //def BUFFER_ATOMIC_UMAX : MUBUF_ <0x00000038, "BUFFER_ATOMIC_UMAX", []>; |
| 428 | //def BUFFER_ATOMIC_AND : MUBUF_ <0x00000039, "BUFFER_ATOMIC_AND", []>; |
| 429 | //def BUFFER_ATOMIC_OR : MUBUF_ <0x0000003a, "BUFFER_ATOMIC_OR", []>; |
| 430 | //def BUFFER_ATOMIC_XOR : MUBUF_ <0x0000003b, "BUFFER_ATOMIC_XOR", []>; |
| 431 | //def BUFFER_ATOMIC_INC : MUBUF_ <0x0000003c, "BUFFER_ATOMIC_INC", []>; |
| 432 | //def BUFFER_ATOMIC_DEC : MUBUF_ <0x0000003d, "BUFFER_ATOMIC_DEC", []>; |
| 433 | //def BUFFER_ATOMIC_FCMPSWAP : MUBUF_ <0x0000003e, "BUFFER_ATOMIC_FCMPSWAP", []>; |
| 434 | //def BUFFER_ATOMIC_FMIN : MUBUF_ <0x0000003f, "BUFFER_ATOMIC_FMIN", []>; |
| 435 | //def BUFFER_ATOMIC_FMAX : MUBUF_ <0x00000040, "BUFFER_ATOMIC_FMAX", []>; |
| 436 | //def BUFFER_ATOMIC_SWAP_X2 : MUBUF_X2 <0x00000050, "BUFFER_ATOMIC_SWAP_X2", []>; |
| 437 | //def BUFFER_ATOMIC_CMPSWAP_X2 : MUBUF_X2 <0x00000051, "BUFFER_ATOMIC_CMPSWAP_X2", []>; |
| 438 | //def BUFFER_ATOMIC_ADD_X2 : MUBUF_X2 <0x00000052, "BUFFER_ATOMIC_ADD_X2", []>; |
| 439 | //def BUFFER_ATOMIC_SUB_X2 : MUBUF_X2 <0x00000053, "BUFFER_ATOMIC_SUB_X2", []>; |
| 440 | //def BUFFER_ATOMIC_RSUB_X2 : MUBUF_X2 <0x00000054, "BUFFER_ATOMIC_RSUB_X2", []>; |
| 441 | //def BUFFER_ATOMIC_SMIN_X2 : MUBUF_X2 <0x00000055, "BUFFER_ATOMIC_SMIN_X2", []>; |
| 442 | //def BUFFER_ATOMIC_UMIN_X2 : MUBUF_X2 <0x00000056, "BUFFER_ATOMIC_UMIN_X2", []>; |
| 443 | //def BUFFER_ATOMIC_SMAX_X2 : MUBUF_X2 <0x00000057, "BUFFER_ATOMIC_SMAX_X2", []>; |
| 444 | //def BUFFER_ATOMIC_UMAX_X2 : MUBUF_X2 <0x00000058, "BUFFER_ATOMIC_UMAX_X2", []>; |
| 445 | //def BUFFER_ATOMIC_AND_X2 : MUBUF_X2 <0x00000059, "BUFFER_ATOMIC_AND_X2", []>; |
| 446 | //def BUFFER_ATOMIC_OR_X2 : MUBUF_X2 <0x0000005a, "BUFFER_ATOMIC_OR_X2", []>; |
| 447 | //def BUFFER_ATOMIC_XOR_X2 : MUBUF_X2 <0x0000005b, "BUFFER_ATOMIC_XOR_X2", []>; |
| 448 | //def BUFFER_ATOMIC_INC_X2 : MUBUF_X2 <0x0000005c, "BUFFER_ATOMIC_INC_X2", []>; |
| 449 | //def BUFFER_ATOMIC_DEC_X2 : MUBUF_X2 <0x0000005d, "BUFFER_ATOMIC_DEC_X2", []>; |
| 450 | //def BUFFER_ATOMIC_FCMPSWAP_X2 : MUBUF_X2 <0x0000005e, "BUFFER_ATOMIC_FCMPSWAP_X2", []>; |
| 451 | //def BUFFER_ATOMIC_FMIN_X2 : MUBUF_X2 <0x0000005f, "BUFFER_ATOMIC_FMIN_X2", []>; |
| 452 | //def BUFFER_ATOMIC_FMAX_X2 : MUBUF_X2 <0x00000060, "BUFFER_ATOMIC_FMAX_X2", []>; |
| 453 | //def BUFFER_WBINVL1_SC : MUBUF_WBINVL1 <0x00000070, "BUFFER_WBINVL1_SC", []>; |
| 454 | //def BUFFER_WBINVL1 : MUBUF_WBINVL1 <0x00000071, "BUFFER_WBINVL1", []>; |
| 455 | //def TBUFFER_LOAD_FORMAT_X : MTBUF_ <0x00000000, "TBUFFER_LOAD_FORMAT_X", []>; |
| 456 | //def TBUFFER_LOAD_FORMAT_XY : MTBUF_ <0x00000001, "TBUFFER_LOAD_FORMAT_XY", []>; |
| 457 | //def TBUFFER_LOAD_FORMAT_XYZ : MTBUF_ <0x00000002, "TBUFFER_LOAD_FORMAT_XYZ", []>; |
| 458 | def TBUFFER_LOAD_FORMAT_XYZW : MTBUF_Load_Helper <0x00000003, "TBUFFER_LOAD_FORMAT_XYZW", VReg_128>; |
| 459 | //def TBUFFER_STORE_FORMAT_X : MTBUF_ <0x00000004, "TBUFFER_STORE_FORMAT_X", []>; |
| 460 | //def TBUFFER_STORE_FORMAT_XY : MTBUF_ <0x00000005, "TBUFFER_STORE_FORMAT_XY", []>; |
| 461 | //def TBUFFER_STORE_FORMAT_XYZ : MTBUF_ <0x00000006, "TBUFFER_STORE_FORMAT_XYZ", []>; |
| 462 | //def TBUFFER_STORE_FORMAT_XYZW : MTBUF_ <0x00000007, "TBUFFER_STORE_FORMAT_XYZW", []>; |
| 463 | |
| 464 | defm S_LOAD_DWORD : SMRD_32 <0x00000000, "S_LOAD_DWORD", SReg_32>; |
| 465 | |
| 466 | //def S_LOAD_DWORDX2 : SMRD_DWORDX2 <0x00000001, "S_LOAD_DWORDX2", []>; |
| 467 | defm S_LOAD_DWORDX4 : SMRD_Helper <0x00000002, "S_LOAD_DWORDX4", SReg_128, v4i32>; |
| 468 | defm S_LOAD_DWORDX8 : SMRD_Helper <0x00000003, "S_LOAD_DWORDX8", SReg_256, v8i32>; |
| 469 | //def S_LOAD_DWORDX16 : SMRD_DWORDX16 <0x00000004, "S_LOAD_DWORDX16", []>; |
| 470 | //def S_BUFFER_LOAD_DWORD : SMRD_ <0x00000008, "S_BUFFER_LOAD_DWORD", []>; |
| 471 | //def S_BUFFER_LOAD_DWORDX2 : SMRD_DWORDX2 <0x00000009, "S_BUFFER_LOAD_DWORDX2", []>; |
| 472 | //def S_BUFFER_LOAD_DWORDX4 : SMRD_DWORDX4 <0x0000000a, "S_BUFFER_LOAD_DWORDX4", []>; |
| 473 | //def S_BUFFER_LOAD_DWORDX8 : SMRD_DWORDX8 <0x0000000b, "S_BUFFER_LOAD_DWORDX8", []>; |
| 474 | //def S_BUFFER_LOAD_DWORDX16 : SMRD_DWORDX16 <0x0000000c, "S_BUFFER_LOAD_DWORDX16", []>; |
| 475 | |
| 476 | //def S_MEMTIME : SMRD_ <0x0000001e, "S_MEMTIME", []>; |
| 477 | //def S_DCACHE_INV : SMRD_ <0x0000001f, "S_DCACHE_INV", []>; |
| 478 | //def IMAGE_LOAD : MIMG_NoPattern_ <"IMAGE_LOAD", 0x00000000>; |
| 479 | //def IMAGE_LOAD_MIP : MIMG_NoPattern_ <"IMAGE_LOAD_MIP", 0x00000001>; |
| 480 | //def IMAGE_LOAD_PCK : MIMG_NoPattern_ <"IMAGE_LOAD_PCK", 0x00000002>; |
| 481 | //def IMAGE_LOAD_PCK_SGN : MIMG_NoPattern_ <"IMAGE_LOAD_PCK_SGN", 0x00000003>; |
| 482 | //def IMAGE_LOAD_MIP_PCK : MIMG_NoPattern_ <"IMAGE_LOAD_MIP_PCK", 0x00000004>; |
| 483 | //def IMAGE_LOAD_MIP_PCK_SGN : MIMG_NoPattern_ <"IMAGE_LOAD_MIP_PCK_SGN", 0x00000005>; |
| 484 | //def IMAGE_STORE : MIMG_NoPattern_ <"IMAGE_STORE", 0x00000008>; |
| 485 | //def IMAGE_STORE_MIP : MIMG_NoPattern_ <"IMAGE_STORE_MIP", 0x00000009>; |
| 486 | //def IMAGE_STORE_PCK : MIMG_NoPattern_ <"IMAGE_STORE_PCK", 0x0000000a>; |
| 487 | //def IMAGE_STORE_MIP_PCK : MIMG_NoPattern_ <"IMAGE_STORE_MIP_PCK", 0x0000000b>; |
| 488 | //def IMAGE_GET_RESINFO : MIMG_NoPattern_ <"IMAGE_GET_RESINFO", 0x0000000e>; |
| 489 | //def IMAGE_ATOMIC_SWAP : MIMG_NoPattern_ <"IMAGE_ATOMIC_SWAP", 0x0000000f>; |
| 490 | //def IMAGE_ATOMIC_CMPSWAP : MIMG_NoPattern_ <"IMAGE_ATOMIC_CMPSWAP", 0x00000010>; |
| 491 | //def IMAGE_ATOMIC_ADD : MIMG_NoPattern_ <"IMAGE_ATOMIC_ADD", 0x00000011>; |
| 492 | //def IMAGE_ATOMIC_SUB : MIMG_NoPattern_ <"IMAGE_ATOMIC_SUB", 0x00000012>; |
| 493 | //def IMAGE_ATOMIC_RSUB : MIMG_NoPattern_ <"IMAGE_ATOMIC_RSUB", 0x00000013>; |
| 494 | //def IMAGE_ATOMIC_SMIN : MIMG_NoPattern_ <"IMAGE_ATOMIC_SMIN", 0x00000014>; |
| 495 | //def IMAGE_ATOMIC_UMIN : MIMG_NoPattern_ <"IMAGE_ATOMIC_UMIN", 0x00000015>; |
| 496 | //def IMAGE_ATOMIC_SMAX : MIMG_NoPattern_ <"IMAGE_ATOMIC_SMAX", 0x00000016>; |
| 497 | //def IMAGE_ATOMIC_UMAX : MIMG_NoPattern_ <"IMAGE_ATOMIC_UMAX", 0x00000017>; |
| 498 | //def IMAGE_ATOMIC_AND : MIMG_NoPattern_ <"IMAGE_ATOMIC_AND", 0x00000018>; |
| 499 | //def IMAGE_ATOMIC_OR : MIMG_NoPattern_ <"IMAGE_ATOMIC_OR", 0x00000019>; |
| 500 | //def IMAGE_ATOMIC_XOR : MIMG_NoPattern_ <"IMAGE_ATOMIC_XOR", 0x0000001a>; |
| 501 | //def IMAGE_ATOMIC_INC : MIMG_NoPattern_ <"IMAGE_ATOMIC_INC", 0x0000001b>; |
| 502 | //def IMAGE_ATOMIC_DEC : MIMG_NoPattern_ <"IMAGE_ATOMIC_DEC", 0x0000001c>; |
| 503 | //def IMAGE_ATOMIC_FCMPSWAP : MIMG_NoPattern_ <"IMAGE_ATOMIC_FCMPSWAP", 0x0000001d>; |
| 504 | //def IMAGE_ATOMIC_FMIN : MIMG_NoPattern_ <"IMAGE_ATOMIC_FMIN", 0x0000001e>; |
| 505 | //def IMAGE_ATOMIC_FMAX : MIMG_NoPattern_ <"IMAGE_ATOMIC_FMAX", 0x0000001f>; |
| 506 | def IMAGE_SAMPLE : MIMG_Load_Helper <0x00000020, "IMAGE_SAMPLE">; |
| 507 | //def IMAGE_SAMPLE_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_CL", 0x00000021>; |
| 508 | def IMAGE_SAMPLE_D : MIMG_Load_Helper <0x00000022, "IMAGE_SAMPLE_D">; |
| 509 | //def IMAGE_SAMPLE_D_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_D_CL", 0x00000023>; |
| 510 | def IMAGE_SAMPLE_L : MIMG_Load_Helper <0x00000024, "IMAGE_SAMPLE_L">; |
| 511 | def IMAGE_SAMPLE_B : MIMG_Load_Helper <0x00000025, "IMAGE_SAMPLE_B">; |
| 512 | //def IMAGE_SAMPLE_B_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_B_CL", 0x00000026>; |
| 513 | //def IMAGE_SAMPLE_LZ : MIMG_NoPattern_ <"IMAGE_SAMPLE_LZ", 0x00000027>; |
| 514 | //def IMAGE_SAMPLE_C : MIMG_NoPattern_ <"IMAGE_SAMPLE_C", 0x00000028>; |
| 515 | //def IMAGE_SAMPLE_C_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_CL", 0x00000029>; |
| 516 | //def IMAGE_SAMPLE_C_D : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_D", 0x0000002a>; |
| 517 | //def IMAGE_SAMPLE_C_D_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_D_CL", 0x0000002b>; |
| 518 | //def IMAGE_SAMPLE_C_L : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_L", 0x0000002c>; |
| 519 | //def IMAGE_SAMPLE_C_B : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_B", 0x0000002d>; |
| 520 | //def IMAGE_SAMPLE_C_B_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_B_CL", 0x0000002e>; |
| 521 | //def IMAGE_SAMPLE_C_LZ : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_LZ", 0x0000002f>; |
| 522 | //def IMAGE_SAMPLE_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_O", 0x00000030>; |
| 523 | //def IMAGE_SAMPLE_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_CL_O", 0x00000031>; |
| 524 | //def IMAGE_SAMPLE_D_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_D_O", 0x00000032>; |
| 525 | //def IMAGE_SAMPLE_D_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_D_CL_O", 0x00000033>; |
| 526 | //def IMAGE_SAMPLE_L_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_L_O", 0x00000034>; |
| 527 | //def IMAGE_SAMPLE_B_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_B_O", 0x00000035>; |
| 528 | //def IMAGE_SAMPLE_B_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_B_CL_O", 0x00000036>; |
| 529 | //def IMAGE_SAMPLE_LZ_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_LZ_O", 0x00000037>; |
| 530 | //def IMAGE_SAMPLE_C_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_O", 0x00000038>; |
| 531 | //def IMAGE_SAMPLE_C_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_CL_O", 0x00000039>; |
| 532 | //def IMAGE_SAMPLE_C_D_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_D_O", 0x0000003a>; |
| 533 | //def IMAGE_SAMPLE_C_D_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_D_CL_O", 0x0000003b>; |
| 534 | //def IMAGE_SAMPLE_C_L_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_L_O", 0x0000003c>; |
| 535 | //def IMAGE_SAMPLE_C_B_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_B_O", 0x0000003d>; |
| 536 | //def IMAGE_SAMPLE_C_B_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_B_CL_O", 0x0000003e>; |
| 537 | //def IMAGE_SAMPLE_C_LZ_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_LZ_O", 0x0000003f>; |
| 538 | //def IMAGE_GATHER4 : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4", 0x00000040>; |
| 539 | //def IMAGE_GATHER4_CL : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_CL", 0x00000041>; |
| 540 | //def IMAGE_GATHER4_L : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_L", 0x00000044>; |
| 541 | //def IMAGE_GATHER4_B : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_B", 0x00000045>; |
| 542 | //def IMAGE_GATHER4_B_CL : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_B_CL", 0x00000046>; |
| 543 | //def IMAGE_GATHER4_LZ : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_LZ", 0x00000047>; |
| 544 | //def IMAGE_GATHER4_C : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C", 0x00000048>; |
| 545 | //def IMAGE_GATHER4_C_CL : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_CL", 0x00000049>; |
| 546 | //def IMAGE_GATHER4_C_L : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_L", 0x0000004c>; |
| 547 | //def IMAGE_GATHER4_C_B : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_B", 0x0000004d>; |
| 548 | //def IMAGE_GATHER4_C_B_CL : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_B_CL", 0x0000004e>; |
| 549 | //def IMAGE_GATHER4_C_LZ : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_LZ", 0x0000004f>; |
| 550 | //def IMAGE_GATHER4_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_O", 0x00000050>; |
| 551 | //def IMAGE_GATHER4_CL_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_CL_O", 0x00000051>; |
| 552 | //def IMAGE_GATHER4_L_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_L_O", 0x00000054>; |
| 553 | //def IMAGE_GATHER4_B_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_B_O", 0x00000055>; |
| 554 | //def IMAGE_GATHER4_B_CL_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_B_CL_O", 0x00000056>; |
| 555 | //def IMAGE_GATHER4_LZ_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_LZ_O", 0x00000057>; |
| 556 | //def IMAGE_GATHER4_C_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_O", 0x00000058>; |
| 557 | //def IMAGE_GATHER4_C_CL_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_CL_O", 0x00000059>; |
| 558 | //def IMAGE_GATHER4_C_L_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_L_O", 0x0000005c>; |
| 559 | //def IMAGE_GATHER4_C_B_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_B_O", 0x0000005d>; |
| 560 | //def IMAGE_GATHER4_C_B_CL_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_B_CL_O", 0x0000005e>; |
| 561 | //def IMAGE_GATHER4_C_LZ_O : MIMG_NoPattern_GATHER4 <"IMAGE_GATHER4_C_LZ_O", 0x0000005f>; |
| 562 | //def IMAGE_GET_LOD : MIMG_NoPattern_ <"IMAGE_GET_LOD", 0x00000060>; |
| 563 | //def IMAGE_SAMPLE_CD : MIMG_NoPattern_ <"IMAGE_SAMPLE_CD", 0x00000068>; |
| 564 | //def IMAGE_SAMPLE_CD_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_CD_CL", 0x00000069>; |
| 565 | //def IMAGE_SAMPLE_C_CD : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_CD", 0x0000006a>; |
| 566 | //def IMAGE_SAMPLE_C_CD_CL : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_CD_CL", 0x0000006b>; |
| 567 | //def IMAGE_SAMPLE_CD_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_CD_O", 0x0000006c>; |
| 568 | //def IMAGE_SAMPLE_CD_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_CD_CL_O", 0x0000006d>; |
| 569 | //def IMAGE_SAMPLE_C_CD_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_CD_O", 0x0000006e>; |
| 570 | //def IMAGE_SAMPLE_C_CD_CL_O : MIMG_NoPattern_ <"IMAGE_SAMPLE_C_CD_CL_O", 0x0000006f>; |
| 571 | //def IMAGE_RSRC256 : MIMG_NoPattern_RSRC256 <"IMAGE_RSRC256", 0x0000007e>; |
| 572 | //def IMAGE_SAMPLER : MIMG_NoPattern_ <"IMAGE_SAMPLER", 0x0000007f>; |
| 573 | //def V_NOP : VOP1_ <0x00000000, "V_NOP", []>; |
| 574 | |
| 575 | let neverHasSideEffects = 1 in { |
| 576 | defm V_MOV_B32 : VOP1_32 <0x00000001, "V_MOV_B32", []>; |
| 577 | } // End neverHasSideEffects |
| 578 | defm V_READFIRSTLANE_B32 : VOP1_32 <0x00000002, "V_READFIRSTLANE_B32", []>; |
| 579 | //defm V_CVT_I32_F64 : VOP1_32 <0x00000003, "V_CVT_I32_F64", []>; |
| 580 | //defm V_CVT_F64_I32 : VOP1_64 <0x00000004, "V_CVT_F64_I32", []>; |
| 581 | defm V_CVT_F32_I32 : VOP1_32 <0x00000005, "V_CVT_F32_I32", |
| 582 | [(set VReg_32:$dst, (sint_to_fp AllReg_32:$src0))] |
| 583 | >; |
| 584 | //defm V_CVT_F32_U32 : VOP1_32 <0x00000006, "V_CVT_F32_U32", []>; |
| 585 | //defm V_CVT_U32_F32 : VOP1_32 <0x00000007, "V_CVT_U32_F32", []>; |
| 586 | defm V_CVT_I32_F32 : VOP1_32 <0x00000008, "V_CVT_I32_F32", |
| 587 | [(set VReg_32:$dst, (fp_to_sint AllReg_32:$src0))] |
| 588 | >; |
| 589 | defm V_MOV_FED_B32 : VOP1_32 <0x00000009, "V_MOV_FED_B32", []>; |
| 590 | ////def V_CVT_F16_F32 : VOP1_F16 <0x0000000a, "V_CVT_F16_F32", []>; |
| 591 | //defm V_CVT_F32_F16 : VOP1_32 <0x0000000b, "V_CVT_F32_F16", []>; |
| 592 | //defm V_CVT_RPI_I32_F32 : VOP1_32 <0x0000000c, "V_CVT_RPI_I32_F32", []>; |
| 593 | //defm V_CVT_FLR_I32_F32 : VOP1_32 <0x0000000d, "V_CVT_FLR_I32_F32", []>; |
| 594 | //defm V_CVT_OFF_F32_I4 : VOP1_32 <0x0000000e, "V_CVT_OFF_F32_I4", []>; |
| 595 | //defm V_CVT_F32_F64 : VOP1_32 <0x0000000f, "V_CVT_F32_F64", []>; |
| 596 | //defm V_CVT_F64_F32 : VOP1_64 <0x00000010, "V_CVT_F64_F32", []>; |
| 597 | //defm V_CVT_F32_UBYTE0 : VOP1_32 <0x00000011, "V_CVT_F32_UBYTE0", []>; |
| 598 | //defm V_CVT_F32_UBYTE1 : VOP1_32 <0x00000012, "V_CVT_F32_UBYTE1", []>; |
| 599 | //defm V_CVT_F32_UBYTE2 : VOP1_32 <0x00000013, "V_CVT_F32_UBYTE2", []>; |
| 600 | //defm V_CVT_F32_UBYTE3 : VOP1_32 <0x00000014, "V_CVT_F32_UBYTE3", []>; |
| 601 | //defm V_CVT_U32_F64 : VOP1_32 <0x00000015, "V_CVT_U32_F64", []>; |
| 602 | //defm V_CVT_F64_U32 : VOP1_64 <0x00000016, "V_CVT_F64_U32", []>; |
| 603 | defm V_FRACT_F32 : VOP1_32 <0x00000020, "V_FRACT_F32", |
| 604 | [(set VReg_32:$dst, (AMDGPUfract AllReg_32:$src0))] |
| 605 | >; |
| 606 | defm V_TRUNC_F32 : VOP1_32 <0x00000021, "V_TRUNC_F32", []>; |
| 607 | defm V_CEIL_F32 : VOP1_32 <0x00000022, "V_CEIL_F32", []>; |
| 608 | defm V_RNDNE_F32 : VOP1_32 <0x00000023, "V_RNDNE_F32", |
| 609 | [(set VReg_32:$dst, (frint AllReg_32:$src0))] |
| 610 | >; |
| 611 | defm V_FLOOR_F32 : VOP1_32 <0x00000024, "V_FLOOR_F32", |
| 612 | [(set VReg_32:$dst, (ffloor AllReg_32:$src0))] |
| 613 | >; |
| 614 | defm V_EXP_F32 : VOP1_32 <0x00000025, "V_EXP_F32", |
| 615 | [(set VReg_32:$dst, (fexp2 AllReg_32:$src0))] |
| 616 | >; |
| 617 | defm V_LOG_CLAMP_F32 : VOP1_32 <0x00000026, "V_LOG_CLAMP_F32", []>; |
| 618 | defm V_LOG_F32 : VOP1_32 <0x00000027, "V_LOG_F32", []>; |
| 619 | defm V_RCP_CLAMP_F32 : VOP1_32 <0x00000028, "V_RCP_CLAMP_F32", []>; |
| 620 | defm V_RCP_LEGACY_F32 : VOP1_32 <0x00000029, "V_RCP_LEGACY_F32", []>; |
| 621 | defm V_RCP_F32 : VOP1_32 <0x0000002a, "V_RCP_F32", |
| 622 | [(set VReg_32:$dst, (fdiv FP_ONE, AllReg_32:$src0))] |
| 623 | >; |
| 624 | defm V_RCP_IFLAG_F32 : VOP1_32 <0x0000002b, "V_RCP_IFLAG_F32", []>; |
| 625 | defm V_RSQ_CLAMP_F32 : VOP1_32 <0x0000002c, "V_RSQ_CLAMP_F32", []>; |
| 626 | defm V_RSQ_LEGACY_F32 : VOP1_32 < |
| 627 | 0x0000002d, "V_RSQ_LEGACY_F32", |
| 628 | [(set VReg_32:$dst, (int_AMDGPU_rsq AllReg_32:$src0))] |
| 629 | >; |
| 630 | defm V_RSQ_F32 : VOP1_32 <0x0000002e, "V_RSQ_F32", []>; |
| 631 | defm V_RCP_F64 : VOP1_64 <0x0000002f, "V_RCP_F64", []>; |
| 632 | defm V_RCP_CLAMP_F64 : VOP1_64 <0x00000030, "V_RCP_CLAMP_F64", []>; |
| 633 | defm V_RSQ_F64 : VOP1_64 <0x00000031, "V_RSQ_F64", []>; |
| 634 | defm V_RSQ_CLAMP_F64 : VOP1_64 <0x00000032, "V_RSQ_CLAMP_F64", []>; |
| 635 | defm V_SQRT_F32 : VOP1_32 <0x00000033, "V_SQRT_F32", []>; |
| 636 | defm V_SQRT_F64 : VOP1_64 <0x00000034, "V_SQRT_F64", []>; |
| 637 | defm V_SIN_F32 : VOP1_32 <0x00000035, "V_SIN_F32", []>; |
| 638 | defm V_COS_F32 : VOP1_32 <0x00000036, "V_COS_F32", []>; |
| 639 | defm V_NOT_B32 : VOP1_32 <0x00000037, "V_NOT_B32", []>; |
| 640 | defm V_BFREV_B32 : VOP1_32 <0x00000038, "V_BFREV_B32", []>; |
| 641 | defm V_FFBH_U32 : VOP1_32 <0x00000039, "V_FFBH_U32", []>; |
| 642 | defm V_FFBL_B32 : VOP1_32 <0x0000003a, "V_FFBL_B32", []>; |
| 643 | defm V_FFBH_I32 : VOP1_32 <0x0000003b, "V_FFBH_I32", []>; |
| 644 | //defm V_FREXP_EXP_I32_F64 : VOP1_32 <0x0000003c, "V_FREXP_EXP_I32_F64", []>; |
| 645 | defm V_FREXP_MANT_F64 : VOP1_64 <0x0000003d, "V_FREXP_MANT_F64", []>; |
| 646 | defm V_FRACT_F64 : VOP1_64 <0x0000003e, "V_FRACT_F64", []>; |
| 647 | //defm V_FREXP_EXP_I32_F32 : VOP1_32 <0x0000003f, "V_FREXP_EXP_I32_F32", []>; |
| 648 | defm V_FREXP_MANT_F32 : VOP1_32 <0x00000040, "V_FREXP_MANT_F32", []>; |
| 649 | //def V_CLREXCP : VOP1_ <0x00000041, "V_CLREXCP", []>; |
| 650 | defm V_MOVRELD_B32 : VOP1_32 <0x00000042, "V_MOVRELD_B32", []>; |
| 651 | defm V_MOVRELS_B32 : VOP1_32 <0x00000043, "V_MOVRELS_B32", []>; |
| 652 | defm V_MOVRELSD_B32 : VOP1_32 <0x00000044, "V_MOVRELSD_B32", []>; |
| 653 | |
| 654 | def V_INTERP_P1_F32 : VINTRP < |
| 655 | 0x00000000, |
| 656 | (outs VReg_32:$dst), |
| 657 | (ins VReg_32:$i, i32imm:$attr_chan, i32imm:$attr, M0Reg:$m0), |
| 658 | "V_INTERP_P1_F32", |
| 659 | []> { |
| 660 | let DisableEncoding = "$m0"; |
| 661 | } |
| 662 | |
| 663 | def V_INTERP_P2_F32 : VINTRP < |
| 664 | 0x00000001, |
| 665 | (outs VReg_32:$dst), |
| 666 | (ins VReg_32:$src0, VReg_32:$j, i32imm:$attr_chan, i32imm:$attr, M0Reg:$m0), |
| 667 | "V_INTERP_P2_F32", |
| 668 | []> { |
| 669 | |
| 670 | let Constraints = "$src0 = $dst"; |
| 671 | let DisableEncoding = "$src0,$m0"; |
| 672 | |
| 673 | } |
| 674 | |
| 675 | def V_INTERP_MOV_F32 : VINTRP < |
| 676 | 0x00000002, |
| 677 | (outs VReg_32:$dst), |
| 678 | (ins i32imm:$attr_chan, i32imm:$attr, M0Reg:$m0), |
| 679 | "V_INTERP_MOV_F32", |
| 680 | []> { |
| 681 | let VSRC = 0; |
| 682 | let DisableEncoding = "$m0"; |
| 683 | } |
| 684 | |
| 685 | //def S_NOP : SOPP_ <0x00000000, "S_NOP", []>; |
| 686 | |
| 687 | let isTerminator = 1 in { |
| 688 | |
| 689 | def S_ENDPGM : SOPP <0x00000001, (ins), "S_ENDPGM", |
| 690 | [(IL_retflag)]> { |
| 691 | let SIMM16 = 0; |
| 692 | let isBarrier = 1; |
| 693 | let hasCtrlDep = 1; |
| 694 | } |
| 695 | |
| 696 | let isBranch = 1 in { |
| 697 | def S_BRANCH : SOPP < |
| 698 | 0x00000002, (ins brtarget:$target), "S_BRANCH", |
| 699 | [] |
| 700 | >; |
| 701 | |
| 702 | let DisableEncoding = "$scc" in { |
| 703 | def S_CBRANCH_SCC0 : SOPP < |
| 704 | 0x00000004, (ins brtarget:$target, SCCReg:$scc), |
| 705 | "S_CBRANCH_SCC0", [] |
| 706 | >; |
| 707 | def S_CBRANCH_SCC1 : SOPP < |
| 708 | 0x00000005, (ins brtarget:$target, SCCReg:$scc), |
| 709 | "S_CBRANCH_SCC1", |
| 710 | [] |
| 711 | >; |
| 712 | } // End DisableEncoding = "$scc" |
| 713 | |
| 714 | def S_CBRANCH_VCCZ : SOPP < |
| 715 | 0x00000006, (ins brtarget:$target, VCCReg:$vcc), |
| 716 | "S_CBRANCH_VCCZ", |
| 717 | [] |
| 718 | >; |
| 719 | def S_CBRANCH_VCCNZ : SOPP < |
| 720 | 0x00000007, (ins brtarget:$target, VCCReg:$vcc), |
| 721 | "S_CBRANCH_VCCNZ", |
| 722 | [] |
| 723 | >; |
| 724 | |
| 725 | let DisableEncoding = "$exec" in { |
| 726 | def S_CBRANCH_EXECZ : SOPP < |
| 727 | 0x00000008, (ins brtarget:$target, EXECReg:$exec), |
| 728 | "S_CBRANCH_EXECZ", |
| 729 | [] |
| 730 | >; |
| 731 | def S_CBRANCH_EXECNZ : SOPP < |
| 732 | 0x00000009, (ins brtarget:$target, EXECReg:$exec), |
| 733 | "S_CBRANCH_EXECNZ", |
| 734 | [] |
| 735 | >; |
| 736 | } // End DisableEncoding = "$exec" |
| 737 | |
| 738 | |
| 739 | } // End isBranch = 1 |
| 740 | } // End isTerminator = 1 |
| 741 | |
| 742 | //def S_BARRIER : SOPP_ <0x0000000a, "S_BARRIER", []>; |
| 743 | let hasSideEffects = 1 in { |
| 744 | def S_WAITCNT : SOPP <0x0000000c, (ins i32imm:$simm16), "S_WAITCNT $simm16", |
| 745 | [] |
| 746 | >; |
| 747 | } // End hasSideEffects |
| 748 | //def S_SETHALT : SOPP_ <0x0000000d, "S_SETHALT", []>; |
| 749 | //def S_SLEEP : SOPP_ <0x0000000e, "S_SLEEP", []>; |
| 750 | //def S_SETPRIO : SOPP_ <0x0000000f, "S_SETPRIO", []>; |
| 751 | //def S_SENDMSG : SOPP_ <0x00000010, "S_SENDMSG", []>; |
| 752 | //def S_SENDMSGHALT : SOPP_ <0x00000011, "S_SENDMSGHALT", []>; |
| 753 | //def S_TRAP : SOPP_ <0x00000012, "S_TRAP", []>; |
| 754 | //def S_ICACHE_INV : SOPP_ <0x00000013, "S_ICACHE_INV", []>; |
| 755 | //def S_INCPERFLEVEL : SOPP_ <0x00000014, "S_INCPERFLEVEL", []>; |
| 756 | //def S_DECPERFLEVEL : SOPP_ <0x00000015, "S_DECPERFLEVEL", []>; |
| 757 | //def S_TTRACEDATA : SOPP_ <0x00000016, "S_TTRACEDATA", []>; |
| 758 | |
| 759 | def V_CNDMASK_B32_e32 : VOP2 <0x00000000, (outs VReg_32:$dst), |
| 760 | (ins AllReg_32:$src0, VReg_32:$src1, VCCReg:$vcc), "V_CNDMASK_B32_e32", |
| 761 | [] |
| 762 | >{ |
| 763 | let DisableEncoding = "$vcc"; |
| 764 | } |
| 765 | |
| 766 | def V_CNDMASK_B32_e64 : VOP3 <0x00000100, (outs VReg_32:$dst), |
| 767 | (ins VReg_32:$src0, VReg_32:$src1, SReg_1:$src2, InstFlag:$abs, InstFlag:$clamp, InstFlag:$omod, InstFlag:$neg), |
| 768 | "V_CNDMASK_B32_e64", |
| 769 | [(set (i32 VReg_32:$dst), (select SReg_1:$src2, VReg_32:$src1, VReg_32:$src0))] |
| 770 | >; |
| 771 | |
| 772 | //f32 pattern for V_CNDMASK_B32_e64 |
| 773 | def : Pat < |
| 774 | (f32 (select SReg_1:$src2, VReg_32:$src1, VReg_32:$src0)), |
| 775 | (V_CNDMASK_B32_e64 VReg_32:$src0, VReg_32:$src1, SReg_1:$src2) |
| 776 | >; |
| 777 | |
| 778 | defm V_READLANE_B32 : VOP2_32 <0x00000001, "V_READLANE_B32", []>; |
| 779 | defm V_WRITELANE_B32 : VOP2_32 <0x00000002, "V_WRITELANE_B32", []>; |
| 780 | |
| 781 | defm V_ADD_F32 : VOP2_32 <0x00000003, "V_ADD_F32", []>; |
| 782 | def : Pat < |
| 783 | (f32 (fadd AllReg_32:$src0, VReg_32:$src1)), |
| 784 | (V_ADD_F32_e32 AllReg_32:$src0, VReg_32:$src1) |
| 785 | >; |
| 786 | |
| 787 | defm V_SUB_F32 : VOP2_32 <0x00000004, "V_SUB_F32", []>; |
| 788 | def : Pat < |
| 789 | (f32 (fsub AllReg_32:$src0, VReg_32:$src1)), |
| 790 | (V_SUB_F32_e32 AllReg_32:$src0, VReg_32:$src1) |
| 791 | >; |
| 792 | defm V_SUBREV_F32 : VOP2_32 <0x00000005, "V_SUBREV_F32", []>; |
| 793 | defm V_MAC_LEGACY_F32 : VOP2_32 <0x00000006, "V_MAC_LEGACY_F32", []>; |
| 794 | defm V_MUL_LEGACY_F32 : VOP2_32 < |
| 795 | 0x00000007, "V_MUL_LEGACY_F32", |
| 796 | [(set VReg_32:$dst, (int_AMDGPU_mul AllReg_32:$src0, VReg_32:$src1))] |
| 797 | >; |
| 798 | |
| 799 | defm V_MUL_F32 : VOP2_32 <0x00000008, "V_MUL_F32", |
| 800 | [(set VReg_32:$dst, (fmul AllReg_32:$src0, VReg_32:$src1))] |
| 801 | >; |
| 802 | //defm V_MUL_I32_I24 : VOP2_32 <0x00000009, "V_MUL_I32_I24", []>; |
| 803 | //defm V_MUL_HI_I32_I24 : VOP2_32 <0x0000000a, "V_MUL_HI_I32_I24", []>; |
| 804 | //defm V_MUL_U32_U24 : VOP2_32 <0x0000000b, "V_MUL_U32_U24", []>; |
| 805 | //defm V_MUL_HI_U32_U24 : VOP2_32 <0x0000000c, "V_MUL_HI_U32_U24", []>; |
| 806 | defm V_MIN_LEGACY_F32 : VOP2_32 <0x0000000d, "V_MIN_LEGACY_F32", |
| 807 | [(set VReg_32:$dst, (AMDGPUfmin AllReg_32:$src0, VReg_32:$src1))] |
| 808 | >; |
| 809 | |
| 810 | defm V_MAX_LEGACY_F32 : VOP2_32 <0x0000000e, "V_MAX_LEGACY_F32", |
| 811 | [(set VReg_32:$dst, (AMDGPUfmax AllReg_32:$src0, VReg_32:$src1))] |
| 812 | >; |
| 813 | defm V_MIN_F32 : VOP2_32 <0x0000000f, "V_MIN_F32", []>; |
| 814 | defm V_MAX_F32 : VOP2_32 <0x00000010, "V_MAX_F32", []>; |
| 815 | defm V_MIN_I32 : VOP2_32 <0x00000011, "V_MIN_I32", []>; |
| 816 | defm V_MAX_I32 : VOP2_32 <0x00000012, "V_MAX_I32", []>; |
| 817 | defm V_MIN_U32 : VOP2_32 <0x00000013, "V_MIN_U32", []>; |
| 818 | defm V_MAX_U32 : VOP2_32 <0x00000014, "V_MAX_U32", []>; |
| 819 | defm V_LSHR_B32 : VOP2_32 <0x00000015, "V_LSHR_B32", []>; |
| 820 | defm V_LSHRREV_B32 : VOP2_32 <0x00000016, "V_LSHRREV_B32", []>; |
| 821 | defm V_ASHR_I32 : VOP2_32 <0x00000017, "V_ASHR_I32", []>; |
| 822 | defm V_ASHRREV_I32 : VOP2_32 <0x00000018, "V_ASHRREV_I32", []>; |
| 823 | defm V_LSHL_B32 : VOP2_32 <0x00000019, "V_LSHL_B32", []>; |
| 824 | defm V_LSHLREV_B32 : VOP2_32 <0x0000001a, "V_LSHLREV_B32", []>; |
| 825 | defm V_AND_B32 : VOP2_32 <0x0000001b, "V_AND_B32", |
| 826 | [(set VReg_32:$dst, (and AllReg_32:$src0, VReg_32:$src1))] |
| 827 | >; |
| 828 | defm V_OR_B32 : VOP2_32 <0x0000001c, "V_OR_B32", |
| 829 | [(set VReg_32:$dst, (or AllReg_32:$src0, VReg_32:$src1))] |
| 830 | >; |
| 831 | defm V_XOR_B32 : VOP2_32 <0x0000001d, "V_XOR_B32", |
| 832 | [(set VReg_32:$dst, (xor AllReg_32:$src0, VReg_32:$src1))] |
| 833 | >; |
| 834 | defm V_BFM_B32 : VOP2_32 <0x0000001e, "V_BFM_B32", []>; |
| 835 | defm V_MAC_F32 : VOP2_32 <0x0000001f, "V_MAC_F32", []>; |
| 836 | defm V_MADMK_F32 : VOP2_32 <0x00000020, "V_MADMK_F32", []>; |
| 837 | defm V_MADAK_F32 : VOP2_32 <0x00000021, "V_MADAK_F32", []>; |
| 838 | //defm V_BCNT_U32_B32 : VOP2_32 <0x00000022, "V_BCNT_U32_B32", []>; |
| 839 | //defm V_MBCNT_LO_U32_B32 : VOP2_32 <0x00000023, "V_MBCNT_LO_U32_B32", []>; |
| 840 | //defm V_MBCNT_HI_U32_B32 : VOP2_32 <0x00000024, "V_MBCNT_HI_U32_B32", []>; |
| 841 | let Defs = [VCC] in { // Carry-out goes to VCC |
| 842 | defm V_ADD_I32 : VOP2_32 <0x00000025, "V_ADD_I32", |
| 843 | [(set VReg_32:$dst, (add (i32 AllReg_32:$src0), (i32 VReg_32:$src1)))] |
| 844 | >; |
| 845 | defm V_SUB_I32 : VOP2_32 <0x00000026, "V_SUB_I32", |
| 846 | [(set VReg_32:$dst, (sub (i32 AllReg_32:$src0), (i32 VReg_32:$src1)))] |
| 847 | >; |
| 848 | } // End Defs = [VCC] |
| 849 | defm V_SUBREV_I32 : VOP2_32 <0x00000027, "V_SUBREV_I32", []>; |
| 850 | defm V_ADDC_U32 : VOP2_32 <0x00000028, "V_ADDC_U32", []>; |
| 851 | defm V_SUBB_U32 : VOP2_32 <0x00000029, "V_SUBB_U32", []>; |
| 852 | defm V_SUBBREV_U32 : VOP2_32 <0x0000002a, "V_SUBBREV_U32", []>; |
| 853 | defm V_LDEXP_F32 : VOP2_32 <0x0000002b, "V_LDEXP_F32", []>; |
| 854 | ////def V_CVT_PKACCUM_U8_F32 : VOP2_U8 <0x0000002c, "V_CVT_PKACCUM_U8_F32", []>; |
| 855 | ////def V_CVT_PKNORM_I16_F32 : VOP2_I16 <0x0000002d, "V_CVT_PKNORM_I16_F32", []>; |
| 856 | ////def V_CVT_PKNORM_U16_F32 : VOP2_U16 <0x0000002e, "V_CVT_PKNORM_U16_F32", []>; |
| 857 | defm V_CVT_PKRTZ_F16_F32 : VOP2_32 <0x0000002f, "V_CVT_PKRTZ_F16_F32", |
| 858 | [(set VReg_32:$dst, (int_SI_packf16 AllReg_32:$src0, VReg_32:$src1))] |
| 859 | >; |
| 860 | ////def V_CVT_PK_U16_U32 : VOP2_U16 <0x00000030, "V_CVT_PK_U16_U32", []>; |
| 861 | ////def V_CVT_PK_I16_I32 : VOP2_I16 <0x00000031, "V_CVT_PK_I16_I32", []>; |
| 862 | def S_CMP_EQ_I32 : SOPC_32 <0x00000000, "S_CMP_EQ_I32", []>; |
| 863 | def S_CMP_LG_I32 : SOPC_32 <0x00000001, "S_CMP_LG_I32", []>; |
| 864 | def S_CMP_GT_I32 : SOPC_32 <0x00000002, "S_CMP_GT_I32", []>; |
| 865 | def S_CMP_GE_I32 : SOPC_32 <0x00000003, "S_CMP_GE_I32", []>; |
| 866 | def S_CMP_LT_I32 : SOPC_32 <0x00000004, "S_CMP_LT_I32", []>; |
| 867 | def S_CMP_LE_I32 : SOPC_32 <0x00000005, "S_CMP_LE_I32", []>; |
| 868 | def S_CMP_EQ_U32 : SOPC_32 <0x00000006, "S_CMP_EQ_U32", []>; |
| 869 | def S_CMP_LG_U32 : SOPC_32 <0x00000007, "S_CMP_LG_U32", []>; |
| 870 | def S_CMP_GT_U32 : SOPC_32 <0x00000008, "S_CMP_GT_U32", []>; |
| 871 | def S_CMP_GE_U32 : SOPC_32 <0x00000009, "S_CMP_GE_U32", []>; |
| 872 | def S_CMP_LT_U32 : SOPC_32 <0x0000000a, "S_CMP_LT_U32", []>; |
| 873 | def S_CMP_LE_U32 : SOPC_32 <0x0000000b, "S_CMP_LE_U32", []>; |
| 874 | ////def S_BITCMP0_B32 : SOPC_BITCMP0 <0x0000000c, "S_BITCMP0_B32", []>; |
| 875 | ////def S_BITCMP1_B32 : SOPC_BITCMP1 <0x0000000d, "S_BITCMP1_B32", []>; |
| 876 | ////def S_BITCMP0_B64 : SOPC_BITCMP0 <0x0000000e, "S_BITCMP0_B64", []>; |
| 877 | ////def S_BITCMP1_B64 : SOPC_BITCMP1 <0x0000000f, "S_BITCMP1_B64", []>; |
| 878 | //def S_SETVSKIP : SOPC_ <0x00000010, "S_SETVSKIP", []>; |
| 879 | |
| 880 | let neverHasSideEffects = 1 in { |
| 881 | |
| 882 | def V_MAD_LEGACY_F32 : VOP3_32 <0x00000140, "V_MAD_LEGACY_F32", []>; |
| 883 | def V_MAD_F32 : VOP3_32 <0x00000141, "V_MAD_F32", []>; |
| 884 | //def V_MAD_I32_I24 : VOP3_32 <0x00000142, "V_MAD_I32_I24", []>; |
| 885 | //def V_MAD_U32_U24 : VOP3_32 <0x00000143, "V_MAD_U32_U24", []>; |
| 886 | |
| 887 | } // End neverHasSideEffects |
| 888 | def V_CUBEID_F32 : VOP3_32 <0x00000144, "V_CUBEID_F32", []>; |
| 889 | def V_CUBESC_F32 : VOP3_32 <0x00000145, "V_CUBESC_F32", []>; |
| 890 | def V_CUBETC_F32 : VOP3_32 <0x00000146, "V_CUBETC_F32", []>; |
| 891 | def V_CUBEMA_F32 : VOP3_32 <0x00000147, "V_CUBEMA_F32", []>; |
| 892 | def V_BFE_U32 : VOP3_32 <0x00000148, "V_BFE_U32", []>; |
| 893 | def V_BFE_I32 : VOP3_32 <0x00000149, "V_BFE_I32", []>; |
| 894 | def V_BFI_B32 : VOP3_32 <0x0000014a, "V_BFI_B32", []>; |
| 895 | def V_FMA_F32 : VOP3_32 <0x0000014b, "V_FMA_F32", []>; |
| 896 | def V_FMA_F64 : VOP3_64 <0x0000014c, "V_FMA_F64", []>; |
| 897 | //def V_LERP_U8 : VOP3_U8 <0x0000014d, "V_LERP_U8", []>; |
| 898 | def V_ALIGNBIT_B32 : VOP3_32 <0x0000014e, "V_ALIGNBIT_B32", []>; |
| 899 | def V_ALIGNBYTE_B32 : VOP3_32 <0x0000014f, "V_ALIGNBYTE_B32", []>; |
| 900 | def V_MULLIT_F32 : VOP3_32 <0x00000150, "V_MULLIT_F32", []>; |
| 901 | ////def V_MIN3_F32 : VOP3_MIN3 <0x00000151, "V_MIN3_F32", []>; |
| 902 | ////def V_MIN3_I32 : VOP3_MIN3 <0x00000152, "V_MIN3_I32", []>; |
| 903 | ////def V_MIN3_U32 : VOP3_MIN3 <0x00000153, "V_MIN3_U32", []>; |
| 904 | ////def V_MAX3_F32 : VOP3_MAX3 <0x00000154, "V_MAX3_F32", []>; |
| 905 | ////def V_MAX3_I32 : VOP3_MAX3 <0x00000155, "V_MAX3_I32", []>; |
| 906 | ////def V_MAX3_U32 : VOP3_MAX3 <0x00000156, "V_MAX3_U32", []>; |
| 907 | ////def V_MED3_F32 : VOP3_MED3 <0x00000157, "V_MED3_F32", []>; |
| 908 | ////def V_MED3_I32 : VOP3_MED3 <0x00000158, "V_MED3_I32", []>; |
| 909 | ////def V_MED3_U32 : VOP3_MED3 <0x00000159, "V_MED3_U32", []>; |
| 910 | //def V_SAD_U8 : VOP3_U8 <0x0000015a, "V_SAD_U8", []>; |
| 911 | //def V_SAD_HI_U8 : VOP3_U8 <0x0000015b, "V_SAD_HI_U8", []>; |
| 912 | //def V_SAD_U16 : VOP3_U16 <0x0000015c, "V_SAD_U16", []>; |
| 913 | def V_SAD_U32 : VOP3_32 <0x0000015d, "V_SAD_U32", []>; |
| 914 | ////def V_CVT_PK_U8_F32 : VOP3_U8 <0x0000015e, "V_CVT_PK_U8_F32", []>; |
| 915 | def V_DIV_FIXUP_F32 : VOP3_32 <0x0000015f, "V_DIV_FIXUP_F32", []>; |
| 916 | def V_DIV_FIXUP_F64 : VOP3_64 <0x00000160, "V_DIV_FIXUP_F64", []>; |
| 917 | def V_LSHL_B64 : VOP3_64 <0x00000161, "V_LSHL_B64", []>; |
| 918 | def V_LSHR_B64 : VOP3_64 <0x00000162, "V_LSHR_B64", []>; |
| 919 | def V_ASHR_I64 : VOP3_64 <0x00000163, "V_ASHR_I64", []>; |
| 920 | def V_ADD_F64 : VOP3_64 <0x00000164, "V_ADD_F64", []>; |
| 921 | def V_MUL_F64 : VOP3_64 <0x00000165, "V_MUL_F64", []>; |
| 922 | def V_MIN_F64 : VOP3_64 <0x00000166, "V_MIN_F64", []>; |
| 923 | def V_MAX_F64 : VOP3_64 <0x00000167, "V_MAX_F64", []>; |
| 924 | def V_LDEXP_F64 : VOP3_64 <0x00000168, "V_LDEXP_F64", []>; |
| 925 | def V_MUL_LO_U32 : VOP3_32 <0x00000169, "V_MUL_LO_U32", []>; |
| 926 | def V_MUL_HI_U32 : VOP3_32 <0x0000016a, "V_MUL_HI_U32", []>; |
| 927 | def V_MUL_LO_I32 : VOP3_32 <0x0000016b, "V_MUL_LO_I32", []>; |
| 928 | def V_MUL_HI_I32 : VOP3_32 <0x0000016c, "V_MUL_HI_I32", []>; |
| 929 | def V_DIV_SCALE_F32 : VOP3_32 <0x0000016d, "V_DIV_SCALE_F32", []>; |
| 930 | def V_DIV_SCALE_F64 : VOP3_64 <0x0000016e, "V_DIV_SCALE_F64", []>; |
| 931 | def V_DIV_FMAS_F32 : VOP3_32 <0x0000016f, "V_DIV_FMAS_F32", []>; |
| 932 | def V_DIV_FMAS_F64 : VOP3_64 <0x00000170, "V_DIV_FMAS_F64", []>; |
| 933 | //def V_MSAD_U8 : VOP3_U8 <0x00000171, "V_MSAD_U8", []>; |
| 934 | //def V_QSAD_U8 : VOP3_U8 <0x00000172, "V_QSAD_U8", []>; |
| 935 | //def V_MQSAD_U8 : VOP3_U8 <0x00000173, "V_MQSAD_U8", []>; |
| 936 | def V_TRIG_PREOP_F64 : VOP3_64 <0x00000174, "V_TRIG_PREOP_F64", []>; |
| 937 | def S_ADD_U32 : SOP2_32 <0x00000000, "S_ADD_U32", []>; |
| 938 | def S_SUB_U32 : SOP2_32 <0x00000001, "S_SUB_U32", []>; |
| 939 | def S_ADD_I32 : SOP2_32 <0x00000002, "S_ADD_I32", []>; |
| 940 | def S_SUB_I32 : SOP2_32 <0x00000003, "S_SUB_I32", []>; |
| 941 | def S_ADDC_U32 : SOP2_32 <0x00000004, "S_ADDC_U32", []>; |
| 942 | def S_SUBB_U32 : SOP2_32 <0x00000005, "S_SUBB_U32", []>; |
| 943 | def S_MIN_I32 : SOP2_32 <0x00000006, "S_MIN_I32", []>; |
| 944 | def S_MIN_U32 : SOP2_32 <0x00000007, "S_MIN_U32", []>; |
| 945 | def S_MAX_I32 : SOP2_32 <0x00000008, "S_MAX_I32", []>; |
| 946 | def S_MAX_U32 : SOP2_32 <0x00000009, "S_MAX_U32", []>; |
| 947 | |
| 948 | def S_CSELECT_B32 : SOP2 < |
| 949 | 0x0000000a, (outs SReg_32:$dst), |
| 950 | (ins SReg_32:$src0, SReg_32:$src1, SCCReg:$scc), "S_CSELECT_B32", |
| 951 | [(set (i32 SReg_32:$dst), (select SCCReg:$scc, SReg_32:$src0, SReg_32:$src1))] |
| 952 | >; |
| 953 | |
| 954 | def S_CSELECT_B64 : SOP2_64 <0x0000000b, "S_CSELECT_B64", []>; |
| 955 | |
| 956 | // f32 pattern for S_CSELECT_B32 |
| 957 | def : Pat < |
| 958 | (f32 (select SCCReg:$scc, SReg_32:$src0, SReg_32:$src1)), |
| 959 | (S_CSELECT_B32 SReg_32:$src0, SReg_32:$src1, SCCReg:$scc) |
| 960 | >; |
| 961 | |
| 962 | def S_AND_B32 : SOP2_32 <0x0000000e, "S_AND_B32", []>; |
| 963 | |
| 964 | def S_AND_B64 : SOP2_64 <0x0000000f, "S_AND_B64", |
| 965 | [(set SReg_64:$dst, (and SReg_64:$src0, SReg_64:$src1))] |
| 966 | >; |
| 967 | def S_AND_VCC : SOP2_VCC <0x0000000f, "S_AND_B64", |
| 968 | [(set SReg_1:$vcc, (SIvcc_and SReg_64:$src0, SReg_64:$src1))] |
| 969 | >; |
| 970 | def S_OR_B32 : SOP2_32 <0x00000010, "S_OR_B32", []>; |
| 971 | def S_OR_B64 : SOP2_64 <0x00000011, "S_OR_B64", []>; |
| 972 | def S_XOR_B32 : SOP2_32 <0x00000012, "S_XOR_B32", []>; |
| 973 | def S_XOR_B64 : SOP2_64 <0x00000013, "S_XOR_B64", []>; |
| Tom Stellard | 5a68794 | 2012-12-17 15:14:56 +0000 | [diff] [blame^] | 974 | def S_ANDN2_B32 : SOP2_32 <0x00000014, "S_ANDN2_B32", []>; |
| 975 | def S_ANDN2_B64 : SOP2_64 <0x00000015, "S_ANDN2_B64", []>; |
| 976 | def S_ORN2_B32 : SOP2_32 <0x00000016, "S_ORN2_B32", []>; |
| 977 | def S_ORN2_B64 : SOP2_64 <0x00000017, "S_ORN2_B64", []>; |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 978 | def S_NAND_B32 : SOP2_32 <0x00000018, "S_NAND_B32", []>; |
| 979 | def S_NAND_B64 : SOP2_64 <0x00000019, "S_NAND_B64", []>; |
| 980 | def S_NOR_B32 : SOP2_32 <0x0000001a, "S_NOR_B32", []>; |
| 981 | def S_NOR_B64 : SOP2_64 <0x0000001b, "S_NOR_B64", []>; |
| 982 | def S_XNOR_B32 : SOP2_32 <0x0000001c, "S_XNOR_B32", []>; |
| 983 | def S_XNOR_B64 : SOP2_64 <0x0000001d, "S_XNOR_B64", []>; |
| 984 | def S_LSHL_B32 : SOP2_32 <0x0000001e, "S_LSHL_B32", []>; |
| 985 | def S_LSHL_B64 : SOP2_64 <0x0000001f, "S_LSHL_B64", []>; |
| 986 | def S_LSHR_B32 : SOP2_32 <0x00000020, "S_LSHR_B32", []>; |
| 987 | def S_LSHR_B64 : SOP2_64 <0x00000021, "S_LSHR_B64", []>; |
| 988 | def S_ASHR_I32 : SOP2_32 <0x00000022, "S_ASHR_I32", []>; |
| 989 | def S_ASHR_I64 : SOP2_64 <0x00000023, "S_ASHR_I64", []>; |
| 990 | def S_BFM_B32 : SOP2_32 <0x00000024, "S_BFM_B32", []>; |
| 991 | def S_BFM_B64 : SOP2_64 <0x00000025, "S_BFM_B64", []>; |
| 992 | def S_MUL_I32 : SOP2_32 <0x00000026, "S_MUL_I32", []>; |
| 993 | def S_BFE_U32 : SOP2_32 <0x00000027, "S_BFE_U32", []>; |
| 994 | def S_BFE_I32 : SOP2_32 <0x00000028, "S_BFE_I32", []>; |
| 995 | def S_BFE_U64 : SOP2_64 <0x00000029, "S_BFE_U64", []>; |
| 996 | def S_BFE_I64 : SOP2_64 <0x0000002a, "S_BFE_I64", []>; |
| 997 | //def S_CBRANCH_G_FORK : SOP2_ <0x0000002b, "S_CBRANCH_G_FORK", []>; |
| 998 | def S_ABSDIFF_I32 : SOP2_32 <0x0000002c, "S_ABSDIFF_I32", []>; |
| 999 | |
| 1000 | class V_MOV_IMM <Operand immType, SDNode immNode> : InstSI < |
| 1001 | (outs VReg_32:$dst), |
| 1002 | (ins immType:$src0), |
| 1003 | "V_MOV_IMM", |
| 1004 | [(set VReg_32:$dst, (immNode:$src0))] |
| 1005 | >; |
| 1006 | |
| 1007 | let isCodeGenOnly = 1, isPseudo = 1 in { |
| 1008 | |
| 1009 | def V_MOV_IMM_I32 : V_MOV_IMM<i32imm, imm>; |
| 1010 | def V_MOV_IMM_F32 : V_MOV_IMM<f32imm, fpimm>; |
| 1011 | |
| 1012 | def S_MOV_IMM_I32 : InstSI < |
| 1013 | (outs SReg_32:$dst), |
| 1014 | (ins i32imm:$src0), |
| 1015 | "S_MOV_IMM_I32", |
| 1016 | [(set SReg_32:$dst, (imm:$src0))] |
| 1017 | >; |
| 1018 | |
| 1019 | // i64 immediates aren't really supported in hardware, but LLVM will use the i64 |
| 1020 | // type for indices on load and store instructions. The pattern for |
| 1021 | // S_MOV_IMM_I64 will only match i64 immediates that can fit into 32-bits, |
| 1022 | // which the hardware can handle. |
| 1023 | def S_MOV_IMM_I64 : InstSI < |
| 1024 | (outs SReg_64:$dst), |
| 1025 | (ins i64imm:$src0), |
| 1026 | "S_MOV_IMM_I64 $dst, $src0", |
| 1027 | [(set SReg_64:$dst, (IMM32bitIn64bit:$src0))] |
| 1028 | >; |
| 1029 | |
| 1030 | } // End isCodeGenOnly, isPseudo = 1 |
| 1031 | |
| 1032 | class SI_LOAD_LITERAL<Operand ImmType> : |
| 1033 | Enc32 <(outs), (ins ImmType:$imm), "LOAD_LITERAL $imm", []> { |
| 1034 | |
| 1035 | bits<32> imm; |
| 1036 | let Inst{31-0} = imm; |
| 1037 | } |
| 1038 | |
| 1039 | def SI_LOAD_LITERAL_I32 : SI_LOAD_LITERAL<i32imm>; |
| 1040 | def SI_LOAD_LITERAL_F32 : SI_LOAD_LITERAL<f32imm>; |
| 1041 | |
| 1042 | let isCodeGenOnly = 1, isPseudo = 1 in { |
| 1043 | |
| 1044 | def SET_M0 : InstSI < |
| 1045 | (outs SReg_32:$dst), |
| 1046 | (ins i32imm:$src0), |
| 1047 | "SET_M0", |
| 1048 | [(set SReg_32:$dst, (int_SI_set_M0 imm:$src0))] |
| 1049 | >; |
| 1050 | |
| 1051 | def LOAD_CONST : AMDGPUShaderInst < |
| 1052 | (outs GPRF32:$dst), |
| 1053 | (ins i32imm:$src), |
| 1054 | "LOAD_CONST $dst, $src", |
| 1055 | [(set GPRF32:$dst, (int_AMDGPU_load_const imm:$src))] |
| 1056 | >; |
| 1057 | |
| 1058 | let usesCustomInserter = 1 in { |
| 1059 | |
| 1060 | def SI_V_CNDLT : InstSI < |
| 1061 | (outs VReg_32:$dst), |
| 1062 | (ins VReg_32:$src0, VReg_32:$src1, VReg_32:$src2), |
| 1063 | "SI_V_CNDLT $dst, $src0, $src1, $src2", |
| 1064 | [(set VReg_32:$dst, (int_AMDGPU_cndlt VReg_32:$src0, VReg_32:$src1, VReg_32:$src2))] |
| 1065 | >; |
| 1066 | |
| 1067 | def SI_INTERP : InstSI < |
| 1068 | (outs VReg_32:$dst), |
| 1069 | (ins VReg_32:$i, VReg_32:$j, i32imm:$attr_chan, i32imm:$attr, SReg_32:$params), |
| 1070 | "SI_INTERP $dst, $i, $j, $attr_chan, $attr, $params", |
| 1071 | [] |
| 1072 | >; |
| 1073 | |
| 1074 | def SI_INTERP_CONST : InstSI < |
| 1075 | (outs VReg_32:$dst), |
| 1076 | (ins i32imm:$attr_chan, i32imm:$attr, SReg_32:$params), |
| 1077 | "SI_INTERP_CONST $dst, $attr_chan, $attr, $params", |
| 1078 | [(set VReg_32:$dst, (int_SI_fs_interp_constant imm:$attr_chan, |
| 1079 | imm:$attr, SReg_32:$params))] |
| 1080 | >; |
| 1081 | |
| 1082 | def SI_KIL : InstSI < |
| 1083 | (outs), |
| 1084 | (ins VReg_32:$src), |
| 1085 | "SI_KIL $src", |
| 1086 | [(int_AMDGPU_kill VReg_32:$src)] |
| 1087 | >; |
| 1088 | |
| 1089 | def SI_WQM : InstSI < |
| 1090 | (outs), |
| 1091 | (ins), |
| 1092 | "SI_WQM", |
| 1093 | [(int_SI_wqm)] |
| 1094 | >; |
| 1095 | |
| 1096 | } // end usesCustomInserter |
| 1097 | |
| 1098 | // SI Psuedo branch instructions. These are used by the CFG structurizer pass |
| 1099 | // and should be lowered to ISA instructions prior to codegen. |
| 1100 | |
| 1101 | let isBranch = 1, isTerminator = 1, mayLoad = 0, mayStore = 0, |
| 1102 | hasSideEffects = 0 in { |
| 1103 | def SI_IF_NZ : InstSI < |
| 1104 | (outs), |
| 1105 | (ins brtarget:$target, SReg_1:$vcc), |
| 1106 | "SI_BRANCH_NZ", |
| 1107 | [(IL_brcond bb:$target, SReg_1:$vcc)] |
| 1108 | >; |
| 1109 | |
| 1110 | def SI_IF_Z : InstSI < |
| 1111 | (outs), |
| 1112 | (ins brtarget:$target, SReg_1:$vcc), |
| 1113 | "SI_BRANCH_Z", |
| 1114 | [] |
| 1115 | >; |
| 1116 | } // end isBranch = 1, isTerminator = 1, mayLoad = 0, mayStore = 0, |
| 1117 | // hasSideEffects = 0 |
| 1118 | } // end IsCodeGenOnly, isPseudo |
| 1119 | |
| 1120 | /* int_SI_vs_load_input */ |
| 1121 | def : Pat< |
| 1122 | (int_SI_vs_load_input SReg_128:$tlst, IMM12bit:$attr_offset, |
| 1123 | VReg_32:$buf_idx_vgpr), |
| 1124 | (BUFFER_LOAD_FORMAT_XYZW imm:$attr_offset, 0, 1, 0, 0, 0, |
| 1125 | VReg_32:$buf_idx_vgpr, SReg_128:$tlst, |
| 1126 | 0, 0, (i32 SREG_LIT_0)) |
| 1127 | >; |
| 1128 | |
| 1129 | /* int_SI_export */ |
| 1130 | def : Pat < |
| 1131 | (int_SI_export imm:$en, imm:$vm, imm:$done, imm:$tgt, imm:$compr, |
| 1132 | VReg_32:$src0,VReg_32:$src1, VReg_32:$src2, VReg_32:$src3), |
| 1133 | (EXP imm:$en, imm:$tgt, imm:$compr, imm:$done, imm:$vm, |
| 1134 | VReg_32:$src0, VReg_32:$src1, VReg_32:$src2, VReg_32:$src3) |
| 1135 | >; |
| 1136 | |
| 1137 | /* int_SI_sample */ |
| 1138 | def : Pat < |
| 1139 | (int_SI_sample imm:$writemask, VReg_128:$coord, SReg_256:$rsrc, SReg_128:$sampler), |
| 1140 | (IMAGE_SAMPLE imm:$writemask, 0, 0, 0, 0, 0, 0, 0, VReg_128:$coord, |
| 1141 | SReg_256:$rsrc, SReg_128:$sampler) |
| 1142 | >; |
| 1143 | |
| 1144 | /* int_SI_sample_lod */ |
| 1145 | def : Pat < |
| 1146 | (int_SI_sample_lod imm:$writemask, VReg_128:$coord, SReg_256:$rsrc, SReg_128:$sampler), |
| 1147 | (IMAGE_SAMPLE_L imm:$writemask, 0, 0, 0, 0, 0, 0, 0, VReg_128:$coord, |
| 1148 | SReg_256:$rsrc, SReg_128:$sampler) |
| 1149 | >; |
| 1150 | |
| 1151 | /* int_SI_sample_bias */ |
| 1152 | def : Pat < |
| 1153 | (int_SI_sample_bias imm:$writemask, VReg_128:$coord, SReg_256:$rsrc, SReg_128:$sampler), |
| 1154 | (IMAGE_SAMPLE_B imm:$writemask, 0, 0, 0, 0, 0, 0, 0, VReg_128:$coord, |
| 1155 | SReg_256:$rsrc, SReg_128:$sampler) |
| 1156 | >; |
| 1157 | |
| 1158 | def CLAMP_SI : CLAMP<VReg_32>; |
| 1159 | def FABS_SI : FABS<VReg_32>; |
| 1160 | def FNEG_SI : FNEG<VReg_32>; |
| 1161 | |
| 1162 | def : Extract_Element <f32, v4f32, VReg_128, 0, sel_x>; |
| 1163 | def : Extract_Element <f32, v4f32, VReg_128, 1, sel_y>; |
| 1164 | def : Extract_Element <f32, v4f32, VReg_128, 2, sel_z>; |
| 1165 | def : Extract_Element <f32, v4f32, VReg_128, 3, sel_w>; |
| 1166 | |
| 1167 | def : Insert_Element <f32, v4f32, VReg_32, VReg_128, 4, sel_x>; |
| 1168 | def : Insert_Element <f32, v4f32, VReg_32, VReg_128, 5, sel_y>; |
| 1169 | def : Insert_Element <f32, v4f32, VReg_32, VReg_128, 6, sel_z>; |
| 1170 | def : Insert_Element <f32, v4f32, VReg_32, VReg_128, 7, sel_w>; |
| 1171 | |
| 1172 | def : Vector_Build <v4f32, VReg_128, f32, VReg_32>; |
| 1173 | def : Vector_Build <v4i32, SReg_128, i32, SReg_32>; |
| 1174 | |
| 1175 | def : BitConvert <i32, f32, SReg_32>; |
| 1176 | def : BitConvert <i32, f32, VReg_32>; |
| 1177 | |
| 1178 | def : BitConvert <f32, i32, SReg_32>; |
| 1179 | def : BitConvert <f32, i32, VReg_32>; |
| 1180 | |
| 1181 | def : Pat < |
| 1182 | (i64 (SIsreg1_bitcast SReg_1:$vcc)), |
| 1183 | (S_MOV_B64 (COPY_TO_REGCLASS SReg_1:$vcc, SReg_64)) |
| 1184 | >; |
| 1185 | |
| 1186 | def : Pat < |
| 1187 | (i1 (SIsreg1_bitcast SReg_64:$vcc)), |
| 1188 | (COPY_TO_REGCLASS SReg_64:$vcc, SReg_1) |
| 1189 | >; |
| 1190 | |
| 1191 | def : Pat < |
| 1192 | (i64 (SIvcc_bitcast VCCReg:$vcc)), |
| 1193 | (S_MOV_B64 (COPY_TO_REGCLASS VCCReg:$vcc, SReg_64)) |
| 1194 | >; |
| 1195 | |
| 1196 | def : Pat < |
| 1197 | (i1 (SIvcc_bitcast SReg_64:$vcc)), |
| 1198 | (COPY_TO_REGCLASS SReg_64:$vcc, VCCReg) |
| 1199 | >; |
| 1200 | |
| 1201 | /********** ===================== **********/ |
| 1202 | /********** Interpolation Paterns **********/ |
| 1203 | /********** ===================== **********/ |
| 1204 | |
| 1205 | def : Pat < |
| 1206 | (int_SI_fs_interp_linear_center imm:$attr_chan, imm:$attr, SReg_32:$params), |
| 1207 | (SI_INTERP (f32 LINEAR_CENTER_I), (f32 LINEAR_CENTER_J), imm:$attr_chan, |
| 1208 | imm:$attr, SReg_32:$params) |
| 1209 | >; |
| 1210 | |
| 1211 | def : Pat < |
| 1212 | (int_SI_fs_interp_linear_centroid imm:$attr_chan, imm:$attr, SReg_32:$params), |
| 1213 | (SI_INTERP (f32 LINEAR_CENTROID_I), (f32 LINEAR_CENTROID_J), imm:$attr_chan, |
| 1214 | imm:$attr, SReg_32:$params) |
| 1215 | >; |
| 1216 | |
| 1217 | def : Pat < |
| 1218 | (int_SI_fs_interp_persp_center imm:$attr_chan, imm:$attr, SReg_32:$params), |
| 1219 | (SI_INTERP (f32 PERSP_CENTER_I), (f32 PERSP_CENTER_J), imm:$attr_chan, |
| 1220 | imm:$attr, SReg_32:$params) |
| 1221 | >; |
| 1222 | |
| 1223 | def : Pat < |
| 1224 | (int_SI_fs_interp_persp_centroid imm:$attr_chan, imm:$attr, SReg_32:$params), |
| 1225 | (SI_INTERP (f32 PERSP_CENTROID_I), (f32 PERSP_CENTROID_J), imm:$attr_chan, |
| 1226 | imm:$attr, SReg_32:$params) |
| 1227 | >; |
| 1228 | |
| 1229 | def : Pat < |
| 1230 | (int_SI_fs_read_face), |
| 1231 | (f32 FRONT_FACE) |
| 1232 | >; |
| 1233 | |
| 1234 | def : Pat < |
| 1235 | (int_SI_fs_read_pos 0), |
| 1236 | (f32 POS_X_FLOAT) |
| 1237 | >; |
| 1238 | |
| 1239 | def : Pat < |
| 1240 | (int_SI_fs_read_pos 1), |
| 1241 | (f32 POS_Y_FLOAT) |
| 1242 | >; |
| 1243 | |
| 1244 | def : Pat < |
| 1245 | (int_SI_fs_read_pos 2), |
| 1246 | (f32 POS_Z_FLOAT) |
| 1247 | >; |
| 1248 | |
| 1249 | def : Pat < |
| 1250 | (int_SI_fs_read_pos 3), |
| 1251 | (f32 POS_W_FLOAT) |
| 1252 | >; |
| 1253 | |
| 1254 | /********** ================== **********/ |
| 1255 | /********** Intrinsic Patterns **********/ |
| 1256 | /********** ================== **********/ |
| 1257 | |
| 1258 | /* llvm.AMDGPU.pow */ |
| 1259 | /* XXX: We are using IEEE MUL, not the 0 * anything = 0 MUL, is this correct? */ |
| 1260 | def : POW_Common <V_LOG_F32_e32, V_EXP_F32_e32, V_MUL_F32_e32, VReg_32>; |
| 1261 | |
| 1262 | def : Pat < |
| 1263 | (int_AMDGPU_div AllReg_32:$src0, AllReg_32:$src1), |
| 1264 | (V_MUL_LEGACY_F32_e32 AllReg_32:$src0, (V_RCP_LEGACY_F32_e32 AllReg_32:$src1)) |
| 1265 | >; |
| 1266 | |
| 1267 | def : Pat< |
| 1268 | (fdiv AllReg_32:$src0, AllReg_32:$src1), |
| 1269 | (V_MUL_F32_e32 AllReg_32:$src0, (V_RCP_F32_e32 AllReg_32:$src1)) |
| 1270 | >; |
| 1271 | |
| 1272 | def : Pat < |
| 1273 | (int_AMDGPU_kilp), |
| 1274 | (SI_KIL (V_MOV_IMM_I32 0xbf800000)) |
| 1275 | >; |
| 1276 | |
| 1277 | def : Pat < |
| 1278 | (int_AMDGPU_cube VReg_128:$src), |
| 1279 | (INSERT_SUBREG (INSERT_SUBREG (INSERT_SUBREG (INSERT_SUBREG (v4f32 (IMPLICIT_DEF)), |
| 1280 | (V_CUBETC_F32 (EXTRACT_SUBREG VReg_128:$src, sel_x), |
| 1281 | (EXTRACT_SUBREG VReg_128:$src, sel_y), |
| 1282 | (EXTRACT_SUBREG VReg_128:$src, sel_z), |
| 1283 | 0, 0, 0, 0), sel_x), |
| 1284 | (V_CUBESC_F32 (EXTRACT_SUBREG VReg_128:$src, sel_x), |
| 1285 | (EXTRACT_SUBREG VReg_128:$src, sel_y), |
| 1286 | (EXTRACT_SUBREG VReg_128:$src, sel_z), |
| 1287 | 0, 0, 0, 0), sel_y), |
| 1288 | (V_CUBEMA_F32 (EXTRACT_SUBREG VReg_128:$src, sel_x), |
| 1289 | (EXTRACT_SUBREG VReg_128:$src, sel_y), |
| 1290 | (EXTRACT_SUBREG VReg_128:$src, sel_z), |
| 1291 | 0, 0, 0, 0), sel_z), |
| 1292 | (V_CUBEID_F32 (EXTRACT_SUBREG VReg_128:$src, sel_x), |
| 1293 | (EXTRACT_SUBREG VReg_128:$src, sel_y), |
| 1294 | (EXTRACT_SUBREG VReg_128:$src, sel_z), |
| 1295 | 0, 0, 0, 0), sel_w) |
| 1296 | >; |
| 1297 | |
| 1298 | /********** ================== **********/ |
| 1299 | /********** VOP3 Patterns **********/ |
| 1300 | /********** ================== **********/ |
| 1301 | |
| 1302 | def : Pat <(f32 (IL_mad AllReg_32:$src0, VReg_32:$src1, VReg_32:$src2)), |
| 1303 | (V_MAD_LEGACY_F32 AllReg_32:$src0, VReg_32:$src1, VReg_32:$src2, |
| 1304 | 0, 0, 0, 0)>; |
| 1305 | |
| 1306 | } // End isSI predicate |