| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 1 | //===-- SIInstructions.td - SI Instruction Defintions ---------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // This file was originally auto-generated from a GPU register header file and |
| 10 | // all the instruction definitions were originally commented out. Instructions |
| 11 | // that are not yet supported remain commented out. |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| Eric Christopher | 7792e32 | 2015-01-30 23:24:40 +0000 | [diff] [blame] | 14 | def isGCN : Predicate<"Subtarget->getGeneration() " |
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 15 | ">= SISubtarget::SOUTHERN_ISLANDS">, |
| Tom Stellard | d7e6f13 | 2015-04-08 01:09:26 +0000 | [diff] [blame] | 16 | AssemblerPredicate<"FeatureGCN">; |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 17 | def isSI : Predicate<"Subtarget->getGeneration() " |
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 18 | "== SISubtarget::SOUTHERN_ISLANDS">, |
| Matt Arsenault | d6adfb4 | 2015-09-24 19:52:21 +0000 | [diff] [blame] | 19 | AssemblerPredicate<"FeatureSouthernIslands">; |
| 20 | |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 21 | def has16BankLDS : Predicate<"Subtarget->getLDSBankCount() == 16">; |
| 22 | def has32BankLDS : Predicate<"Subtarget->getLDSBankCount() == 32">; |
| 23 | |
| Valery Pykhtin | 2828b9b | 2016-09-19 14:39:49 +0000 | [diff] [blame] | 24 | include "VOPInstructions.td" |
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 25 | include "SOPInstructions.td" |
| Valery Pykhtin | 1b13886 | 2016-09-01 09:56:47 +0000 | [diff] [blame] | 26 | include "SMInstructions.td" |
| Valery Pykhtin | 8bc6596 | 2016-09-05 11:22:51 +0000 | [diff] [blame] | 27 | include "FLATInstructions.td" |
| Valery Pykhtin | b66e5eb | 2016-09-10 13:09:16 +0000 | [diff] [blame] | 28 | include "BUFInstructions.td" |
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 29 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 30 | let SubtargetPredicate = isGCN in { |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 31 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 32 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 3a35d8f | 2014-10-01 14:44:45 +0000 | [diff] [blame] | 33 | // EXP Instructions |
| 34 | //===----------------------------------------------------------------------===// |
| 35 | |
| 36 | defm EXP : EXP_m; |
| 37 | |
| 38 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 39 | // VINTRP Instructions |
| 40 | //===----------------------------------------------------------------------===// |
| 41 | |
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 42 | let Uses = [M0, EXEC] in { |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 43 | |
| Tom Stellard | ae38f30 | 2015-01-14 01:13:19 +0000 | [diff] [blame] | 44 | // FIXME: Specify SchedRW for VINTRP insturctions. |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 45 | |
| 46 | multiclass V_INTERP_P1_F32_m : VINTRP_m < |
| 47 | 0x00000000, |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 48 | (outs VGPR_32:$dst), |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 49 | (ins VGPR_32:$i, i32imm:$attr_chan, i32imm:$attr), |
| 50 | "v_interp_p1_f32 $dst, $i, $attr_chan, $attr, [m0]", |
| 51 | [(set f32:$dst, (AMDGPUinterp_p1 i32:$i, (i32 imm:$attr_chan), |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 52 | (i32 imm:$attr)))] |
| 53 | >; |
| 54 | |
| 55 | let OtherPredicates = [has32BankLDS] in { |
| 56 | |
| 57 | defm V_INTERP_P1_F32 : V_INTERP_P1_F32_m; |
| 58 | |
| 59 | } // End OtherPredicates = [has32BankLDS] |
| 60 | |
| Tom Stellard | e1818af | 2016-02-18 03:42:32 +0000 | [diff] [blame] | 61 | let OtherPredicates = [has16BankLDS], Constraints = "@earlyclobber $dst", isAsmParserOnly=1 in { |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 62 | |
| 63 | defm V_INTERP_P1_F32_16bank : V_INTERP_P1_F32_m; |
| 64 | |
| Tom Stellard | e1818af | 2016-02-18 03:42:32 +0000 | [diff] [blame] | 65 | } // End OtherPredicates = [has32BankLDS], Constraints = "@earlyclobber $dst", isAsmParserOnly=1 |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 66 | |
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 67 | let DisableEncoding = "$src0", Constraints = "$src0 = $dst" in { |
| 68 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 69 | defm V_INTERP_P2_F32 : VINTRP_m < |
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 70 | 0x00000001, |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 71 | (outs VGPR_32:$dst), |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 72 | (ins VGPR_32:$src0, VGPR_32:$j, i32imm:$attr_chan, i32imm:$attr), |
| 73 | "v_interp_p2_f32 $dst, [$src0], $j, $attr_chan, $attr, [m0]", |
| 74 | [(set f32:$dst, (AMDGPUinterp_p2 f32:$src0, i32:$j, (i32 imm:$attr_chan), |
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 75 | (i32 imm:$attr)))]>; |
| 76 | |
| 77 | } // End DisableEncoding = "$src0", Constraints = "$src0 = $dst" |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 78 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 79 | defm V_INTERP_MOV_F32 : VINTRP_m < |
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 80 | 0x00000002, |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 81 | (outs VGPR_32:$dst), |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 82 | (ins InterpSlot:$src0, i32imm:$attr_chan, i32imm:$attr), |
| 83 | "v_interp_mov_f32 $dst, $src0, $attr_chan, $attr, [m0]", |
| 84 | [(set f32:$dst, (AMDGPUinterp_mov (i32 imm:$src0), (i32 imm:$attr_chan), |
| 85 | (i32 imm:$attr)))]>; |
| 86 | |
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 87 | } // End Uses = [M0, EXEC] |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 88 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 89 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 90 | // Pseudo Instructions |
| 91 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 92 | |
| 93 | let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] in { |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 94 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 95 | // For use in patterns |
| Tom Stellard | cc4c871 | 2016-02-16 18:14:56 +0000 | [diff] [blame] | 96 | def V_CNDMASK_B64_PSEUDO : VOP3Common <(outs VReg_64:$vdst), |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 97 | (ins VSrc_b64:$src0, VSrc_b64:$src1, SSrc_b64:$src2), "", []> { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 98 | let isPseudo = 1; |
| 99 | let isCodeGenOnly = 1; |
| Matt Arsenault | 22e4179 | 2016-08-27 01:00:37 +0000 | [diff] [blame] | 100 | let usesCustomInserter = 1; |
| Tom Stellard | 60024a0 | 2014-09-24 01:33:24 +0000 | [diff] [blame] | 101 | } |
| 102 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 103 | // 64-bit vector move instruction. This is mainly used by the SIFoldOperands |
| 104 | // pass to enable folding of inline immediates. |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 105 | def V_MOV_B64_PSEUDO : PseudoInstSI <(outs VReg_64:$vdst), (ins VSrc_b64:$src0)> { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 106 | let VALU = 1; |
| 107 | } |
| 108 | } // End let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] |
| 109 | |
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 110 | let usesCustomInserter = 1, SALU = 1 in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 111 | def GET_GROUPSTATICSIZE : PseudoInstSI <(outs SReg_32:$sdst), (ins), |
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 112 | [(set SReg_32:$sdst, (int_amdgcn_groupstaticsize))]>; |
| 113 | } // End let usesCustomInserter = 1, SALU = 1 |
| 114 | |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 115 | def S_MOV_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 116 | (ins SSrc_b64:$src0)> { |
| 117 | let SALU = 1; |
| 118 | let isAsCheapAsAMove = 1; |
| 119 | let isTerminator = 1; |
| 120 | } |
| 121 | |
| 122 | def S_XOR_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 123 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { |
| 124 | let SALU = 1; |
| 125 | let isAsCheapAsAMove = 1; |
| 126 | let isTerminator = 1; |
| 127 | } |
| 128 | |
| 129 | def S_ANDN2_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 130 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { |
| 131 | let SALU = 1; |
| 132 | let isAsCheapAsAMove = 1; |
| 133 | let isTerminator = 1; |
| 134 | } |
| 135 | |
| Matt Arsenault | 8fb3738 | 2013-10-11 21:03:36 +0000 | [diff] [blame] | 136 | // SI pseudo instructions. These are used by the CFG structurizer pass |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 137 | // and should be lowered to ISA instructions prior to codegen. |
| 138 | |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 139 | // Dummy terminator instruction to use after control flow instructions |
| 140 | // replaced with exec mask operations. |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 141 | def SI_MASK_BRANCH : PseudoInstSI < |
| Matt Arsenault | f98a596 | 2016-08-27 00:42:21 +0000 | [diff] [blame] | 142 | (outs), (ins brtarget:$target)> { |
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 143 | let isBranch = 0; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 144 | let isTerminator = 1; |
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 145 | let isBarrier = 0; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 146 | let SALU = 1; |
| Matt Arsenault | 78fc9da | 2016-08-22 19:33:16 +0000 | [diff] [blame] | 147 | let Uses = [EXEC]; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 148 | } |
| 149 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 150 | let isTerminator = 1 in { |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 151 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 152 | def SI_IF: CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 153 | (outs SReg_64:$dst), (ins SReg_64:$vcc, brtarget:$target), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 154 | [(set i64:$dst, (int_amdgcn_if i1:$vcc, bb:$target))], 1, 1> { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 155 | let Constraints = ""; |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 156 | let Size = 12; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 157 | let mayLoad = 1; |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 158 | let mayStore = 1; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 159 | let hasSideEffects = 1; |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 160 | } |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 161 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 162 | def SI_ELSE : CFPseudoInstSI < |
| 163 | (outs SReg_64:$dst), (ins SReg_64:$src, brtarget:$target, i1imm:$execfix), [], 1, 1> { |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 164 | let Constraints = "$src = $dst"; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 165 | let Size = 12; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 166 | let mayStore = 1; |
| 167 | let mayLoad = 1; |
| 168 | let hasSideEffects = 1; |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 169 | } |
| 170 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 171 | def SI_LOOP : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 172 | (outs), (ins SReg_64:$saved, brtarget:$target), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 173 | [(int_amdgcn_loop i64:$saved, bb:$target)], 1, 1> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 174 | let Size = 8; |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 175 | let isBranch = 1; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 176 | let hasSideEffects = 1; |
| 177 | let mayLoad = 1; |
| 178 | let mayStore = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 179 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 180 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 181 | } // End isBranch = 1, isTerminator = 1 |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 182 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 183 | def SI_END_CF : CFPseudoInstSI < |
| 184 | (outs), (ins SReg_64:$saved), |
| 185 | [(int_amdgcn_end_cf i64:$saved)], 1, 1> { |
| 186 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 187 | let isAsCheapAsAMove = 1; |
| 188 | let isReMaterializable = 1; |
| 189 | let mayLoad = 1; |
| 190 | let mayStore = 1; |
| 191 | let hasSideEffects = 1; |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 192 | } |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 193 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 194 | def SI_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 195 | (outs SReg_64:$dst), (ins SReg_64:$src), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 196 | [(set i64:$dst, (int_amdgcn_break i64:$src))], 1> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 197 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 198 | let isAsCheapAsAMove = 1; |
| 199 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 200 | } |
| Matt Arsenault | 48d70cb | 2016-07-09 17:18:39 +0000 | [diff] [blame] | 201 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 202 | def SI_IF_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 203 | (outs SReg_64:$dst), (ins SReg_64:$vcc, SReg_64:$src), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 204 | [(set i64:$dst, (int_amdgcn_if_break i1:$vcc, i64:$src))]> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 205 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 206 | let isAsCheapAsAMove = 1; |
| 207 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 208 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 209 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 210 | def SI_ELSE_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 211 | (outs SReg_64:$dst), (ins SReg_64:$src0, SReg_64:$src1), |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 212 | [(set i64:$dst, (int_amdgcn_else_break i64:$src0, i64:$src1))]> { |
| 213 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 214 | let isAsCheapAsAMove = 1; |
| 215 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 216 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 217 | |
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 218 | let Uses = [EXEC], Defs = [EXEC,VCC] in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 219 | def SI_KILL : PseudoInstSI < |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 220 | (outs), (ins VSrc_b32:$src), |
| Matt Arsenault | 03006fd | 2016-07-19 16:27:56 +0000 | [diff] [blame] | 221 | [(AMDGPUkill i32:$src)]> { |
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 222 | let isConvergent = 1; |
| 223 | let usesCustomInserter = 1; |
| 224 | } |
| 225 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 226 | def SI_KILL_TERMINATOR : SPseudoInstSI < |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 227 | (outs), (ins VSrc_b32:$src)> { |
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 228 | let isTerminator = 1; |
| 229 | } |
| 230 | |
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 231 | } // End Uses = [EXEC], Defs = [EXEC,VCC] |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 232 | |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 233 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 234 | def SI_PS_LIVE : PseudoInstSI < |
| 235 | (outs SReg_64:$dst), (ins), |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 236 | [(set i1:$dst, (int_amdgcn_ps_live))]> { |
| 237 | let SALU = 1; |
| 238 | } |
| Nicolai Haehnle | b0c9748 | 2016-04-22 04:04:08 +0000 | [diff] [blame] | 239 | |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 240 | // Used as an isel pseudo to directly emit initialization with an |
| 241 | // s_mov_b32 rather than a copy of another initialized |
| 242 | // register. MachineCSE skips copies, and we don't want to have to |
| 243 | // fold operands before it runs. |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 244 | def SI_INIT_M0 : SPseudoInstSI <(outs), (ins SSrc_b32:$src)> { |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 245 | let Defs = [M0]; |
| 246 | let usesCustomInserter = 1; |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 247 | let isAsCheapAsAMove = 1; |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 248 | let isReMaterializable = 1; |
| 249 | } |
| 250 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 251 | def SI_RETURN : SPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 252 | (outs), (ins variable_ops), [(AMDGPUreturn)]> { |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 253 | let isTerminator = 1; |
| 254 | let isBarrier = 1; |
| 255 | let isReturn = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 256 | let hasSideEffects = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 257 | let hasNoSchedulingInfo = 1; |
| Nicolai Haehnle | a246dcc | 2016-09-03 12:26:32 +0000 | [diff] [blame] | 258 | let DisableWQM = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 259 | } |
| 260 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 261 | let Defs = [M0, EXEC], |
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 262 | UseNamedOperandTable = 1 in { |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 263 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 264 | class SI_INDIRECT_SRC<RegisterClass rc> : VPseudoInstSI < |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 265 | (outs VGPR_32:$vdst), |
| 266 | (ins rc:$src, VS_32:$idx, i32imm:$offset)> { |
| 267 | let usesCustomInserter = 1; |
| 268 | } |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 269 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 270 | class SI_INDIRECT_DST<RegisterClass rc> : VPseudoInstSI < |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 271 | (outs rc:$vdst), |
| 272 | (ins rc:$src, VS_32:$idx, i32imm:$offset, VGPR_32:$val)> { |
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 273 | let Constraints = "$src = $vdst"; |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 274 | let usesCustomInserter = 1; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 275 | } |
| 276 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 277 | // TODO: We can support indirect SGPR access. |
| 278 | def SI_INDIRECT_SRC_V1 : SI_INDIRECT_SRC<VGPR_32>; |
| 279 | def SI_INDIRECT_SRC_V2 : SI_INDIRECT_SRC<VReg_64>; |
| 280 | def SI_INDIRECT_SRC_V4 : SI_INDIRECT_SRC<VReg_128>; |
| 281 | def SI_INDIRECT_SRC_V8 : SI_INDIRECT_SRC<VReg_256>; |
| 282 | def SI_INDIRECT_SRC_V16 : SI_INDIRECT_SRC<VReg_512>; |
| 283 | |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 284 | def SI_INDIRECT_DST_V1 : SI_INDIRECT_DST<VGPR_32>; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 285 | def SI_INDIRECT_DST_V2 : SI_INDIRECT_DST<VReg_64>; |
| 286 | def SI_INDIRECT_DST_V4 : SI_INDIRECT_DST<VReg_128>; |
| 287 | def SI_INDIRECT_DST_V8 : SI_INDIRECT_DST<VReg_256>; |
| 288 | def SI_INDIRECT_DST_V16 : SI_INDIRECT_DST<VReg_512>; |
| 289 | |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 290 | } // End Uses = [EXEC], Defs = [M0, EXEC] |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 291 | |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 292 | multiclass SI_SPILL_SGPR <RegisterClass sgpr_class> { |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 293 | let UseNamedOperandTable = 1, SGPRSpill = 1, Uses = [EXEC] in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 294 | def _SAVE : PseudoInstSI < |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 295 | (outs), |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 296 | (ins sgpr_class:$data, i32imm:$addr)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 297 | let mayStore = 1; |
| 298 | let mayLoad = 0; |
| 299 | } |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 300 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 301 | def _RESTORE : PseudoInstSI < |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 302 | (outs sgpr_class:$data), |
| 303 | (ins i32imm:$addr)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 304 | let mayStore = 0; |
| 305 | let mayLoad = 1; |
| 306 | } |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 307 | } // End UseNamedOperandTable = 1 |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 308 | } |
| 309 | |
| Matt Arsenault | 2510a31 | 2016-09-03 06:57:55 +0000 | [diff] [blame] | 310 | // You cannot use M0 as the output of v_readlane_b32 instructions or |
| 311 | // use it in the sdata operand of SMEM instructions. We still need to |
| 312 | // be able to spill the physical register m0, so allow it for |
| 313 | // SI_SPILL_32_* instructions. |
| 314 | defm SI_SPILL_S32 : SI_SPILL_SGPR <SReg_32>; |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 315 | defm SI_SPILL_S64 : SI_SPILL_SGPR <SReg_64>; |
| 316 | defm SI_SPILL_S128 : SI_SPILL_SGPR <SReg_128>; |
| 317 | defm SI_SPILL_S256 : SI_SPILL_SGPR <SReg_256>; |
| 318 | defm SI_SPILL_S512 : SI_SPILL_SGPR <SReg_512>; |
| 319 | |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 320 | multiclass SI_SPILL_VGPR <RegisterClass vgpr_class> { |
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 321 | let UseNamedOperandTable = 1, VGPRSpill = 1, |
| 322 | SchedRW = [WriteVMEM] in { |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 323 | def _SAVE : VPseudoInstSI < |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 324 | (outs), |
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 325 | (ins vgpr_class:$vdata, i32imm:$vaddr, SReg_128:$srsrc, |
| 326 | SReg_32:$soffset, i32imm:$offset)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 327 | let mayStore = 1; |
| 328 | let mayLoad = 0; |
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 329 | // (2 * 4) + (8 * num_subregs) bytes maximum |
| 330 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 331 | } |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 332 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 333 | def _RESTORE : VPseudoInstSI < |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 334 | (outs vgpr_class:$vdata), |
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 335 | (ins i32imm:$vaddr, SReg_128:$srsrc, SReg_32:$soffset, |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 336 | i32imm:$offset)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 337 | let mayStore = 0; |
| 338 | let mayLoad = 1; |
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 339 | |
| 340 | // (2 * 4) + (8 * num_subregs) bytes maximum |
| 341 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 342 | } |
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 343 | } // End UseNamedOperandTable = 1, VGPRSpill = 1, SchedRW = [WriteVMEM] |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 344 | } |
| 345 | |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 346 | defm SI_SPILL_V32 : SI_SPILL_VGPR <VGPR_32>; |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 347 | defm SI_SPILL_V64 : SI_SPILL_VGPR <VReg_64>; |
| 348 | defm SI_SPILL_V96 : SI_SPILL_VGPR <VReg_96>; |
| 349 | defm SI_SPILL_V128 : SI_SPILL_VGPR <VReg_128>; |
| 350 | defm SI_SPILL_V256 : SI_SPILL_VGPR <VReg_256>; |
| 351 | defm SI_SPILL_V512 : SI_SPILL_VGPR <VReg_512>; |
| 352 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 353 | def SI_PC_ADD_REL_OFFSET : SPseudoInstSI < |
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 354 | (outs SReg_64:$dst), |
| Tom Stellard | bf3e6e5 | 2016-06-14 20:29:59 +0000 | [diff] [blame] | 355 | (ins si_ga:$ptr), |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 356 | [(set SReg_64:$dst, (i64 (SIpc_add_rel_offset (tglobaladdr:$ptr))))]> { |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 357 | let Defs = [SCC]; |
| Matt Arsenault | d092a06 | 2015-10-02 18:58:37 +0000 | [diff] [blame] | 358 | } |
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 359 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 360 | } // End SubtargetPredicate = isGCN |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 361 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 362 | let Predicates = [isGCN] in { |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 363 | |
| Nicolai Haehnle | 3b57200 | 2016-07-28 11:39:24 +0000 | [diff] [blame] | 364 | def : Pat< |
| 365 | (int_amdgcn_else i64:$src, bb:$target), |
| 366 | (SI_ELSE $src, $target, 0) |
| 367 | >; |
| 368 | |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 369 | def : Pat < |
| 370 | (int_AMDGPU_kilp), |
| Michel Danzer | 9e61c4b | 2014-02-27 01:47:09 +0000 | [diff] [blame] | 371 | (SI_KILL 0xbf800000) |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 372 | >; |
| 373 | |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 374 | def : Pat < |
| 375 | (int_SI_export imm:$en, imm:$vm, imm:$done, imm:$tgt, imm:$compr, |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 376 | f32:$src0, f32:$src1, f32:$src2, f32:$src3), |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 377 | (EXP imm:$en, imm:$tgt, imm:$compr, imm:$done, imm:$vm, |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 378 | $src0, $src1, $src2, $src3) |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 379 | >; |
| 380 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 381 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 382 | // VOP1 Patterns |
| 383 | //===----------------------------------------------------------------------===// |
| 384 | |
| Matt Arsenault | 22ca3f8 | 2014-07-15 23:50:10 +0000 | [diff] [blame] | 385 | let Predicates = [UnsafeFPMath] in { |
| Matt Arsenault | 0bbcd8b | 2015-02-14 04:30:08 +0000 | [diff] [blame] | 386 | |
| 387 | //def : RcpPat<V_RCP_F64_e32, f64>; |
| 388 | //defm : RsqPat<V_RSQ_F64_e32, f64>; |
| 389 | //defm : RsqPat<V_RSQ_F32_e32, f32>; |
| 390 | |
| 391 | def : RsqPat<V_RSQ_F32_e32, f32>; |
| 392 | def : RsqPat<V_RSQ_F64_e32, f64>; |
| Matt Arsenault | 7401516 | 2016-05-28 00:19:52 +0000 | [diff] [blame] | 393 | |
| 394 | // Convert (x - floor(x)) to fract(x) |
| 395 | def : Pat < |
| 396 | (f32 (fsub (f32 (VOP3Mods f32:$x, i32:$mods)), |
| 397 | (f32 (ffloor (f32 (VOP3Mods f32:$x, i32:$mods)))))), |
| 398 | (V_FRACT_F32_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 399 | >; |
| 400 | |
| 401 | // Convert (x + (-floor(x))) to fract(x) |
| 402 | def : Pat < |
| 403 | (f64 (fadd (f64 (VOP3Mods f64:$x, i32:$mods)), |
| 404 | (f64 (fneg (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))))))), |
| 405 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 406 | >; |
| 407 | |
| 408 | } // End Predicates = [UnsafeFPMath] |
| Matt Arsenault | e9fa3b8 | 2014-07-15 20:18:31 +0000 | [diff] [blame] | 409 | |
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 410 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 58ac744 | 2014-04-29 23:12:48 +0000 | [diff] [blame] | 411 | // VOP2 Patterns |
| 412 | //===----------------------------------------------------------------------===// |
| 413 | |
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 414 | def : Pat < |
| 415 | (i32 (add (i32 (ctpop i32:$popcnt)), i32:$val)), |
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 416 | (V_BCNT_U32_B32_e64 $popcnt, $val) |
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 417 | >; |
| 418 | |
| Tom Stellard | 5224df3 | 2015-03-10 16:16:44 +0000 | [diff] [blame] | 419 | def : Pat < |
| 420 | (i32 (select i1:$src0, i32:$src1, i32:$src2)), |
| 421 | (V_CNDMASK_B32_e64 $src2, $src1, $src0) |
| 422 | >; |
| 423 | |
| Tom Stellard | db5a11f | 2015-07-13 15:47:57 +0000 | [diff] [blame] | 424 | // Pattern for V_MAC_F32 |
| 425 | def : Pat < |
| 426 | (fmad (VOP3NoMods0 f32:$src0, i32:$src0_modifiers, i1:$clamp, i32:$omod), |
| 427 | (VOP3NoMods f32:$src1, i32:$src1_modifiers), |
| 428 | (VOP3NoMods f32:$src2, i32:$src2_modifiers)), |
| 429 | (V_MAC_F32_e64 $src0_modifiers, $src0, $src1_modifiers, $src1, |
| 430 | $src2_modifiers, $src2, $clamp, $omod) |
| 431 | >; |
| 432 | |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 433 | /********** ============================================ **********/ |
| 434 | /********** Extraction, Insertion, Building and Casting **********/ |
| 435 | /********** ============================================ **********/ |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 436 | |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 437 | foreach Index = 0-2 in { |
| 438 | def Extract_Element_v2i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 439 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 440 | >; |
| 441 | def Insert_Element_v2i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 442 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 443 | >; |
| 444 | |
| 445 | def Extract_Element_v2f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 446 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 447 | >; |
| 448 | def Insert_Element_v2f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 449 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 450 | >; |
| 451 | } |
| 452 | |
| 453 | foreach Index = 0-3 in { |
| 454 | def Extract_Element_v4i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 455 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 456 | >; |
| 457 | def Insert_Element_v4i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 458 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 459 | >; |
| 460 | |
| 461 | def Extract_Element_v4f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 462 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 463 | >; |
| 464 | def Insert_Element_v4f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 465 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 466 | >; |
| 467 | } |
| 468 | |
| 469 | foreach Index = 0-7 in { |
| 470 | def Extract_Element_v8i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 471 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 472 | >; |
| 473 | def Insert_Element_v8i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 474 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 475 | >; |
| 476 | |
| 477 | def Extract_Element_v8f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 478 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 479 | >; |
| 480 | def Insert_Element_v8f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 481 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 482 | >; |
| 483 | } |
| 484 | |
| 485 | foreach Index = 0-15 in { |
| 486 | def Extract_Element_v16i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 487 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 488 | >; |
| 489 | def Insert_Element_v16i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 490 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 491 | >; |
| 492 | |
| 493 | def Extract_Element_v16f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 494 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 495 | >; |
| 496 | def Insert_Element_v16f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 497 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 498 | >; |
| 499 | } |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 500 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 501 | // FIXME: Why do only some of these type combinations for SReg and |
| 502 | // VReg? |
| 503 | // 32-bit bitcast |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 504 | def : BitConvert <i32, f32, VGPR_32>; |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 505 | def : BitConvert <f32, i32, VGPR_32>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 506 | def : BitConvert <i32, f32, SReg_32>; |
| 507 | def : BitConvert <f32, i32, SReg_32>; |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 508 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 509 | // 64-bit bitcast |
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 510 | def : BitConvert <i64, f64, VReg_64>; |
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 511 | def : BitConvert <f64, i64, VReg_64>; |
| Tom Stellard | ed2f614 | 2013-07-18 21:43:42 +0000 | [diff] [blame] | 512 | def : BitConvert <v2i32, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 513 | def : BitConvert <v2f32, v2i32, VReg_64>; |
| Tom Stellard | 7ea3d6d | 2014-03-31 14:01:55 +0000 | [diff] [blame] | 514 | def : BitConvert <i64, v2i32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 515 | def : BitConvert <v2i32, i64, VReg_64>; |
| Matt Arsenault | 064c206 | 2014-06-11 17:40:32 +0000 | [diff] [blame] | 516 | def : BitConvert <i64, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 517 | def : BitConvert <v2f32, i64, VReg_64>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 518 | def : BitConvert <f64, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 519 | def : BitConvert <v2f32, f64, VReg_64>; |
| Matt Arsenault | 2acc7a4 | 2014-06-11 19:31:13 +0000 | [diff] [blame] | 520 | def : BitConvert <f64, v2i32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 521 | def : BitConvert <v2i32, f64, VReg_64>; |
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 522 | def : BitConvert <v4i32, v4f32, VReg_128>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 523 | def : BitConvert <v4f32, v4i32, VReg_128>; |
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 524 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 525 | // 128-bit bitcast |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 526 | def : BitConvert <v2i64, v4i32, SReg_128>; |
| 527 | def : BitConvert <v4i32, v2i64, SReg_128>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 528 | def : BitConvert <v2f64, v4f32, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 529 | def : BitConvert <v2f64, v4i32, VReg_128>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 530 | def : BitConvert <v4f32, v2f64, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 531 | def : BitConvert <v4i32, v2f64, VReg_128>; |
| Matt Arsenault | e57206d | 2016-05-25 18:07:36 +0000 | [diff] [blame] | 532 | def : BitConvert <v2i64, v2f64, VReg_128>; |
| 533 | def : BitConvert <v2f64, v2i64, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 534 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 535 | // 256-bit bitcast |
| Tom Stellard | 967bf58 | 2014-02-13 23:34:15 +0000 | [diff] [blame] | 536 | def : BitConvert <v8i32, v8f32, SReg_256>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 537 | def : BitConvert <v8f32, v8i32, SReg_256>; |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 538 | def : BitConvert <v8i32, v8f32, VReg_256>; |
| 539 | def : BitConvert <v8f32, v8i32, VReg_256>; |
| Tom Stellard | 20ee94f | 2013-08-14 22:22:09 +0000 | [diff] [blame] | 540 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 541 | // 512-bit bitcast |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 542 | def : BitConvert <v16i32, v16f32, VReg_512>; |
| 543 | def : BitConvert <v16f32, v16i32, VReg_512>; |
| 544 | |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 545 | /********** =================== **********/ |
| 546 | /********** Src & Dst modifiers **********/ |
| 547 | /********** =================== **********/ |
| 548 | |
| 549 | def : Pat < |
| Matt Arsenault | 1cffa4c | 2014-11-13 19:49:04 +0000 | [diff] [blame] | 550 | (AMDGPUclamp (VOP3Mods0Clamp f32:$src0, i32:$src0_modifiers, i32:$omod), |
| 551 | (f32 FP_ZERO), (f32 FP_ONE)), |
| 552 | (V_ADD_F32_e64 $src0_modifiers, $src0, 0, 0, 1, $omod) |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 553 | >; |
| 554 | |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 555 | /********** ================================ **********/ |
| 556 | /********** Floating point absolute/negative **********/ |
| 557 | /********** ================================ **********/ |
| 558 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 559 | // Prevent expanding both fneg and fabs. |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 560 | |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 561 | def : Pat < |
| 562 | (fneg (fabs f32:$src)), |
| Matt Arsenault | 124384f | 2016-09-09 23:32:53 +0000 | [diff] [blame] | 563 | (S_OR_B32 $src, (S_MOV_B32 0x80000000)) // Set sign bit |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 564 | >; |
| 565 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 566 | // FIXME: Should use S_OR_B32 |
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 567 | def : Pat < |
| 568 | (fneg (fabs f64:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 569 | (REG_SEQUENCE VReg_64, |
| 570 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 571 | sub0, |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 572 | (V_OR_B32_e32 (EXTRACT_SUBREG f64:$src, sub1), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 573 | (V_MOV_B32_e32 0x80000000)), // Set sign bit. |
| 574 | sub1) |
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 575 | >; |
| 576 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 577 | def : Pat < |
| 578 | (fabs f32:$src), |
| Matt Arsenault | fa5f767 | 2016-09-14 15:19:03 +0000 | [diff] [blame] | 579 | (V_AND_B32_e64 $src, (V_MOV_B32_e32 0x7fffffff)) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 580 | >; |
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 581 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 582 | def : Pat < |
| 583 | (fneg f32:$src), |
| 584 | (V_XOR_B32_e32 $src, (V_MOV_B32_e32 0x80000000)) |
| 585 | >; |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 586 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 587 | def : Pat < |
| 588 | (fabs f64:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 589 | (REG_SEQUENCE VReg_64, |
| 590 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 591 | sub0, |
| Matt Arsenault | fa5f767 | 2016-09-14 15:19:03 +0000 | [diff] [blame] | 592 | (V_AND_B32_e64 (EXTRACT_SUBREG f64:$src, sub1), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 593 | (V_MOV_B32_e32 0x7fffffff)), // Set sign bit. |
| 594 | sub1) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 595 | >; |
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 596 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 597 | def : Pat < |
| 598 | (fneg f64:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 599 | (REG_SEQUENCE VReg_64, |
| 600 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 601 | sub0, |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 602 | (V_XOR_B32_e32 (EXTRACT_SUBREG f64:$src, sub1), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 603 | (V_MOV_B32_e32 0x80000000)), |
| 604 | sub1) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 605 | >; |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 606 | |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 607 | /********** ================== **********/ |
| 608 | /********** Immediate Patterns **********/ |
| 609 | /********** ================== **********/ |
| 610 | |
| 611 | def : Pat < |
| Tom Stellard | df94dc3 | 2013-08-14 23:24:24 +0000 | [diff] [blame] | 612 | (SGPRImm<(i32 imm)>:$imm), |
| 613 | (S_MOV_B32 imm:$imm) |
| 614 | >; |
| 615 | |
| 616 | def : Pat < |
| 617 | (SGPRImm<(f32 fpimm)>:$imm), |
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 618 | (S_MOV_B32 (f32 (bitcast_fpimm_to_i32 $imm))) |
| Tom Stellard | df94dc3 | 2013-08-14 23:24:24 +0000 | [diff] [blame] | 619 | >; |
| 620 | |
| 621 | def : Pat < |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 622 | (i32 imm:$imm), |
| 623 | (V_MOV_B32_e32 imm:$imm) |
| 624 | >; |
| 625 | |
| 626 | def : Pat < |
| 627 | (f32 fpimm:$imm), |
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 628 | (V_MOV_B32_e32 (f32 (bitcast_fpimm_to_i32 $imm))) |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 629 | >; |
| 630 | |
| 631 | def : Pat < |
| Matt Arsenault | ac0fc84 | 2016-09-17 16:09:55 +0000 | [diff] [blame] | 632 | (i32 frameindex:$fi), |
| 633 | (V_MOV_B32_e32 (i32 (frameindex_to_targetframeindex $fi))) |
| 634 | >; |
| 635 | |
| 636 | def : Pat < |
| Christian Konig | b559b07 | 2013-02-16 11:28:36 +0000 | [diff] [blame] | 637 | (i64 InlineImm<i64>:$imm), |
| 638 | (S_MOV_B64 InlineImm<i64>:$imm) |
| 639 | >; |
| 640 | |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 641 | // XXX - Should this use a s_cmp to set SCC? |
| 642 | |
| 643 | // Set to sign-extended 64-bit value (true = -1, false = 0) |
| 644 | def : Pat < |
| 645 | (i1 imm:$imm), |
| 646 | (S_MOV_B64 (i64 (as_i64imm $imm))) |
| 647 | >; |
| 648 | |
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 649 | def : Pat < |
| 650 | (f64 InlineFPImm<f64>:$imm), |
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 651 | (S_MOV_B64 (f64 (bitcast_fpimm_to_i64 InlineFPImm<f64>:$imm))) |
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 652 | >; |
| 653 | |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 654 | /********** ================== **********/ |
| 655 | /********** Intrinsic Patterns **********/ |
| 656 | /********** ================== **********/ |
| 657 | |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 658 | def : POW_Common <V_LOG_F32_e32, V_EXP_F32_e32, V_MUL_LEGACY_F32_e32>; |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 659 | |
| 660 | def : Pat < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 661 | (int_AMDGPU_cube v4f32:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 662 | (REG_SEQUENCE VReg_128, |
| Tom Stellard | b4a313a | 2014-08-01 00:32:39 +0000 | [diff] [blame] | 663 | (V_CUBETC_F32 0 /* src0_modifiers */, (EXTRACT_SUBREG $src, sub0), |
| 664 | 0 /* src1_modifiers */, (EXTRACT_SUBREG $src, sub1), |
| 665 | 0 /* src2_modifiers */, (EXTRACT_SUBREG $src, sub2), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 666 | 0 /* clamp */, 0 /* omod */), sub0, |
| Tom Stellard | b4a313a | 2014-08-01 00:32:39 +0000 | [diff] [blame] | 667 | (V_CUBESC_F32 0 /* src0_modifiers */, (EXTRACT_SUBREG $src, sub0), |
| 668 | 0 /* src1_modifiers */,(EXTRACT_SUBREG $src, sub1), |
| 669 | 0 /* src2_modifiers */,(EXTRACT_SUBREG $src, sub2), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 670 | 0 /* clamp */, 0 /* omod */), sub1, |
| Tom Stellard | b4a313a | 2014-08-01 00:32:39 +0000 | [diff] [blame] | 671 | (V_CUBEMA_F32 0 /* src1_modifiers */,(EXTRACT_SUBREG $src, sub0), |
| 672 | 0 /* src1_modifiers */,(EXTRACT_SUBREG $src, sub1), |
| 673 | 0 /* src1_modifiers */,(EXTRACT_SUBREG $src, sub2), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 674 | 0 /* clamp */, 0 /* omod */), sub2, |
| Tom Stellard | b4a313a | 2014-08-01 00:32:39 +0000 | [diff] [blame] | 675 | (V_CUBEID_F32 0 /* src1_modifiers */,(EXTRACT_SUBREG $src, sub0), |
| 676 | 0 /* src1_modifiers */,(EXTRACT_SUBREG $src, sub1), |
| 677 | 0 /* src1_modifiers */,(EXTRACT_SUBREG $src, sub2), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 678 | 0 /* clamp */, 0 /* omod */), sub3) |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 679 | >; |
| 680 | |
| Michel Danzer | 0cc991e | 2013-02-22 11:22:58 +0000 | [diff] [blame] | 681 | def : Pat < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 682 | (i32 (sext i1:$src0)), |
| 683 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src0) |
| Michel Danzer | 0cc991e | 2013-02-22 11:22:58 +0000 | [diff] [blame] | 684 | >; |
| 685 | |
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 686 | class Ext32Pat <SDNode ext> : Pat < |
| 687 | (i32 (ext i1:$src0)), |
| Michel Danzer | 5d26fdf | 2014-02-05 09:48:05 +0000 | [diff] [blame] | 688 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src0) |
| 689 | >; |
| 690 | |
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 691 | def : Ext32Pat <zext>; |
| 692 | def : Ext32Pat <anyext>; |
| 693 | |
| Michel Danzer | 8caa904 | 2013-04-10 17:17:56 +0000 | [diff] [blame] | 694 | // The multiplication scales from [0,1] to the unsigned integer range |
| 695 | def : Pat < |
| 696 | (AMDGPUurecip i32:$src0), |
| 697 | (V_CVT_U32_F32_e32 |
| 698 | (V_MUL_F32_e32 CONST.FP_UINT_MAX_PLUS_1, |
| 699 | (V_RCP_IFLAG_F32_e32 (V_CVT_F32_U32_e32 $src0)))) |
| 700 | >; |
| 701 | |
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 702 | //===----------------------------------------------------------------------===// |
| 703 | // VOP3 Patterns |
| 704 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 705 | |
| Matt Arsenault | eb26020 | 2014-05-22 18:00:15 +0000 | [diff] [blame] | 706 | def : IMad24Pat<V_MAD_I32_I24>; |
| 707 | def : UMad24Pat<V_MAD_U32_U24>; |
| 708 | |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 709 | defm : BFIPatterns <V_BFI_B32, S_MOV_B32, SReg_64>; |
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 710 | def : ROTRPattern <V_ALIGNBIT_B32>; |
| 711 | |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 712 | /********** ====================== **********/ |
| 713 | /********** Indirect adressing **********/ |
| 714 | /********** ====================== **********/ |
| 715 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 716 | multiclass SI_INDIRECT_Pattern <ValueType vt, ValueType eltvt, string VecSize> { |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 717 | // Extract with offset |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 718 | def : Pat< |
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 719 | (eltvt (extractelt vt:$src, (MOVRELOffset i32:$idx, (i32 imm:$offset)))), |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 720 | (!cast<Instruction>("SI_INDIRECT_SRC_"#VecSize) $src, $idx, imm:$offset) |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 721 | >; |
| 722 | |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 723 | // Insert with offset |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 724 | def : Pat< |
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 725 | (insertelt vt:$src, eltvt:$val, (MOVRELOffset i32:$idx, (i32 imm:$offset))), |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 726 | (!cast<Instruction>("SI_INDIRECT_DST_"#VecSize) $src, $idx, imm:$offset, $val) |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 727 | >; |
| 728 | } |
| 729 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 730 | defm : SI_INDIRECT_Pattern <v2f32, f32, "V2">; |
| 731 | defm : SI_INDIRECT_Pattern <v4f32, f32, "V4">; |
| 732 | defm : SI_INDIRECT_Pattern <v8f32, f32, "V8">; |
| 733 | defm : SI_INDIRECT_Pattern <v16f32, f32, "V16">; |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 734 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 735 | defm : SI_INDIRECT_Pattern <v2i32, i32, "V2">; |
| 736 | defm : SI_INDIRECT_Pattern <v4i32, i32, "V4">; |
| 737 | defm : SI_INDIRECT_Pattern <v8i32, i32, "V8">; |
| 738 | defm : SI_INDIRECT_Pattern <v16i32, i32, "V16">; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 739 | |
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 740 | //===----------------------------------------------------------------------===// |
| Wei Ding | 1041a64 | 2016-08-24 14:59:47 +0000 | [diff] [blame] | 741 | // SAD Patterns |
| 742 | //===----------------------------------------------------------------------===// |
| 743 | |
| 744 | def : Pat < |
| 745 | (add (sub_oneuse (umax i32:$src0, i32:$src1), |
| 746 | (umin i32:$src0, i32:$src1)), |
| 747 | i32:$src2), |
| 748 | (V_SAD_U32 $src0, $src1, $src2) |
| 749 | >; |
| 750 | |
| 751 | def : Pat < |
| 752 | (add (select_oneuse (i1 (setugt i32:$src0, i32:$src1)), |
| 753 | (sub i32:$src0, i32:$src1), |
| 754 | (sub i32:$src1, i32:$src0)), |
| 755 | i32:$src2), |
| 756 | (V_SAD_U32 $src0, $src1, $src2) |
| 757 | >; |
| 758 | |
| 759 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 760 | // Conversion Patterns |
| 761 | //===----------------------------------------------------------------------===// |
| 762 | |
| 763 | def : Pat<(i32 (sext_inreg i32:$src, i1)), |
| 764 | (S_BFE_I32 i32:$src, 65536)>; // 0 | 1 << 16 |
| 765 | |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 766 | // Handle sext_inreg in i64 |
| 767 | def : Pat < |
| 768 | (i64 (sext_inreg i64:$src, i1)), |
| Matt Arsenault | 9481221 | 2014-11-14 18:18:16 +0000 | [diff] [blame] | 769 | (S_BFE_I64 i64:$src, 0x10000) // 0 | 1 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 770 | >; |
| 771 | |
| 772 | def : Pat < |
| 773 | (i64 (sext_inreg i64:$src, i8)), |
| Matt Arsenault | 9481221 | 2014-11-14 18:18:16 +0000 | [diff] [blame] | 774 | (S_BFE_I64 i64:$src, 0x80000) // 0 | 8 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 775 | >; |
| 776 | |
| 777 | def : Pat < |
| 778 | (i64 (sext_inreg i64:$src, i16)), |
| Matt Arsenault | 9481221 | 2014-11-14 18:18:16 +0000 | [diff] [blame] | 779 | (S_BFE_I64 i64:$src, 0x100000) // 0 | 16 << 16 |
| 780 | >; |
| 781 | |
| 782 | def : Pat < |
| 783 | (i64 (sext_inreg i64:$src, i32)), |
| 784 | (S_BFE_I64 i64:$src, 0x200000) // 0 | 32 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 785 | >; |
| 786 | |
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 787 | def : Pat < |
| 788 | (i64 (zext i32:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 789 | (REG_SEQUENCE SReg_64, $src, sub0, (S_MOV_B32 0), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 790 | >; |
| 791 | |
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 792 | def : Pat < |
| 793 | (i64 (anyext i32:$src)), |
| 794 | (REG_SEQUENCE SReg_64, $src, sub0, (i32 (IMPLICIT_DEF)), sub1) |
| 795 | >; |
| 796 | |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 797 | class ZExt_i64_i1_Pat <SDNode ext> : Pat < |
| 798 | (i64 (ext i1:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 799 | (REG_SEQUENCE VReg_64, |
| 800 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src), sub0, |
| 801 | (S_MOV_B32 0), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 802 | >; |
| 803 | |
| 804 | |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 805 | def : ZExt_i64_i1_Pat<zext>; |
| 806 | def : ZExt_i64_i1_Pat<anyext>; |
| 807 | |
| Tom Stellard | bc4497b | 2016-02-12 23:45:29 +0000 | [diff] [blame] | 808 | // FIXME: We need to use COPY_TO_REGCLASS to work-around the fact that |
| 809 | // REG_SEQUENCE patterns don't support instructions with multiple outputs. |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 810 | def : Pat < |
| 811 | (i64 (sext i32:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 812 | (REG_SEQUENCE SReg_64, $src, sub0, |
| Artem Tamazov | 38e496b | 2016-04-29 17:04:50 +0000 | [diff] [blame] | 813 | (i32 (COPY_TO_REGCLASS (S_ASHR_I32 $src, 31), SReg_32_XM0)), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 814 | >; |
| 815 | |
| 816 | def : Pat < |
| 817 | (i64 (sext i1:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 818 | (REG_SEQUENCE VReg_64, |
| 819 | (V_CNDMASK_B32_e64 0, -1, $src), sub0, |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 820 | (V_CNDMASK_B32_e64 0, -1, $src), sub1) |
| 821 | >; |
| 822 | |
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 823 | class FPToI1Pat<Instruction Inst, int KOne, ValueType vt, SDPatternOperator fp_to_int> : Pat < |
| 824 | (i1 (fp_to_int (vt (VOP3Mods vt:$src0, i32:$src0_modifiers)))), |
| 825 | (i1 (Inst 0, KOne, $src0_modifiers, $src0, DSTCLAMP.NONE, DSTOMOD.NONE)) |
| 826 | >; |
| 827 | |
| 828 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_ONE, f32, fp_to_uint>; |
| 829 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_NEG_ONE, f32, fp_to_sint>; |
| 830 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_ONE, f64, fp_to_uint>; |
| 831 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_NEG_ONE, f64, fp_to_sint>; |
| 832 | |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 833 | // If we need to perform a logical operation on i1 values, we need to |
| 834 | // use vector comparisons since there is only one SCC register. Vector |
| 835 | // comparisions still write to a pair of SGPRs, so treat these as |
| 836 | // 64-bit comparisons. When legalizing SGPR copies, instructions |
| 837 | // resulting in the copies from SCC to these instructions will be |
| 838 | // moved to the VALU. |
| 839 | def : Pat < |
| 840 | (i1 (and i1:$src0, i1:$src1)), |
| 841 | (S_AND_B64 $src0, $src1) |
| 842 | >; |
| 843 | |
| 844 | def : Pat < |
| 845 | (i1 (or i1:$src0, i1:$src1)), |
| 846 | (S_OR_B64 $src0, $src1) |
| 847 | >; |
| 848 | |
| 849 | def : Pat < |
| 850 | (i1 (xor i1:$src0, i1:$src1)), |
| 851 | (S_XOR_B64 $src0, $src1) |
| 852 | >; |
| 853 | |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 854 | def : Pat < |
| 855 | (f32 (sint_to_fp i1:$src)), |
| 856 | (V_CNDMASK_B32_e64 (i32 0), CONST.FP32_NEG_ONE, $src) |
| 857 | >; |
| 858 | |
| 859 | def : Pat < |
| 860 | (f32 (uint_to_fp i1:$src)), |
| 861 | (V_CNDMASK_B32_e64 (i32 0), CONST.FP32_ONE, $src) |
| 862 | >; |
| 863 | |
| 864 | def : Pat < |
| 865 | (f64 (sint_to_fp i1:$src)), |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 866 | (V_CVT_F64_I32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src)) |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 867 | >; |
| 868 | |
| 869 | def : Pat < |
| 870 | (f64 (uint_to_fp i1:$src)), |
| 871 | (V_CVT_F64_U32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src)) |
| 872 | >; |
| 873 | |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 874 | //===----------------------------------------------------------------------===// |
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 875 | // Miscellaneous Patterns |
| 876 | //===----------------------------------------------------------------------===// |
| 877 | |
| 878 | def : Pat < |
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 879 | (i32 (trunc i64:$a)), |
| 880 | (EXTRACT_SUBREG $a, sub0) |
| 881 | >; |
| 882 | |
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 883 | def : Pat < |
| 884 | (i1 (trunc i32:$a)), |
| Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame^] | 885 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), $a), 1) |
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 886 | >; |
| 887 | |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 888 | def : Pat < |
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 889 | (i1 (trunc i64:$a)), |
| Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame^] | 890 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), |
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 891 | (EXTRACT_SUBREG $a, sub0)), 1) |
| 892 | >; |
| 893 | |
| 894 | def : Pat < |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 895 | (i32 (bswap i32:$a)), |
| 896 | (V_BFI_B32 (S_MOV_B32 0x00ff00ff), |
| 897 | (V_ALIGNBIT_B32 $a, $a, 24), |
| 898 | (V_ALIGNBIT_B32 $a, $a, 8)) |
| 899 | >; |
| 900 | |
| Matt Arsenault | 477b1782 | 2014-12-12 02:30:29 +0000 | [diff] [blame] | 901 | def : Pat < |
| 902 | (f32 (select i1:$src2, f32:$src1, f32:$src0)), |
| 903 | (V_CNDMASK_B32_e64 $src0, $src1, $src2) |
| 904 | >; |
| 905 | |
| Marek Olsak | 63a7b08 | 2015-03-24 13:40:21 +0000 | [diff] [blame] | 906 | multiclass BFMPatterns <ValueType vt, InstSI BFM, InstSI MOV> { |
| 907 | def : Pat < |
| 908 | (vt (shl (vt (add (vt (shl 1, vt:$a)), -1)), vt:$b)), |
| 909 | (BFM $a, $b) |
| 910 | >; |
| 911 | |
| 912 | def : Pat < |
| 913 | (vt (add (vt (shl 1, vt:$a)), -1)), |
| 914 | (BFM $a, (MOV 0)) |
| 915 | >; |
| 916 | } |
| 917 | |
| 918 | defm : BFMPatterns <i32, S_BFM_B32, S_MOV_B32>; |
| 919 | // FIXME: defm : BFMPatterns <i64, S_BFM_B64, S_MOV_B64>; |
| 920 | |
| Marek Olsak | 949f5da | 2015-03-24 13:40:34 +0000 | [diff] [blame] | 921 | def : BFEPattern <V_BFE_U32, S_MOV_B32>; |
| 922 | |
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 923 | def : Pat< |
| 924 | (fcanonicalize f32:$src), |
| 925 | (V_MUL_F32_e64 0, CONST.FP32_ONE, 0, $src, 0, 0) |
| 926 | >; |
| 927 | |
| 928 | def : Pat< |
| 929 | (fcanonicalize f64:$src), |
| 930 | (V_MUL_F64 0, CONST.FP64_ONE, 0, $src, 0, 0) |
| 931 | >; |
| 932 | |
| Marek Olsak | 43650e4 | 2015-03-24 13:40:08 +0000 | [diff] [blame] | 933 | //===----------------------------------------------------------------------===// |
| 934 | // Fract Patterns |
| 935 | //===----------------------------------------------------------------------===// |
| 936 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 937 | let Predicates = [isSI] in { |
| 938 | |
| 939 | // V_FRACT is buggy on SI, so the F32 version is never used and (x-floor(x)) is |
| 940 | // used instead. However, SI doesn't have V_FLOOR_F64, so the most efficient |
| 941 | // way to implement it is using V_FRACT_F64. |
| 942 | // The workaround for the V_FRACT bug is: |
| 943 | // fract(x) = isnan(x) ? x : min(V_FRACT(x), 0.99999999999999999) |
| 944 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 945 | // Convert floor(x) to (x - fract(x)) |
| 946 | def : Pat < |
| 947 | (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))), |
| 948 | (V_ADD_F64 |
| 949 | $mods, |
| 950 | $x, |
| 951 | SRCMODS.NEG, |
| 952 | (V_CNDMASK_B64_PSEUDO |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 953 | (V_MIN_F64 |
| 954 | SRCMODS.NONE, |
| 955 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE), |
| 956 | SRCMODS.NONE, |
| 957 | (V_MOV_B64_PSEUDO 0x3fefffffffffffff), |
| 958 | DSTCLAMP.NONE, DSTOMOD.NONE), |
| Marek Olsak | 1354b87 | 2015-07-27 11:37:42 +0000 | [diff] [blame] | 959 | $x, |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 960 | (V_CMP_CLASS_F64_e64 SRCMODS.NONE, $x, 3/*NaN*/)), |
| 961 | DSTCLAMP.NONE, DSTOMOD.NONE) |
| 962 | >; |
| 963 | |
| 964 | } // End Predicates = [isSI] |
| 965 | |
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 966 | //============================================================================// |
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 967 | // Miscellaneous Optimization Patterns |
| 968 | //============================================================================// |
| 969 | |
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 970 | def : SHA256MaPattern <V_BFI_B32, V_XOR_B32_e64>; |
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 971 | |
| Matt Arsenault | c89f291 | 2016-03-07 21:54:48 +0000 | [diff] [blame] | 972 | def : IntMed3Pat<V_MED3_I32, smax, smax_oneuse, smin_oneuse>; |
| 973 | def : IntMed3Pat<V_MED3_U32, umax, umax_oneuse, umin_oneuse>; |
| 974 | |
| Tom Stellard | 245c15f | 2015-05-26 15:55:52 +0000 | [diff] [blame] | 975 | //============================================================================// |
| 976 | // Assembler aliases |
| 977 | //============================================================================// |
| 978 | |
| 979 | def : MnemonicAlias<"v_add_u32", "v_add_i32">; |
| 980 | def : MnemonicAlias<"v_sub_u32", "v_sub_i32">; |
| 981 | def : MnemonicAlias<"v_subrev_u32", "v_subrev_i32">; |
| 982 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 983 | } // End isGCN predicate |