| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 1 | //===-- SIInstructions.td - SI Instruction Defintions ---------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // This file was originally auto-generated from a GPU register header file and |
| 10 | // all the instruction definitions were originally commented out. Instructions |
| 11 | // that are not yet supported remain commented out. |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| Eric Christopher | 7792e32 | 2015-01-30 23:24:40 +0000 | [diff] [blame] | 14 | def isGCN : Predicate<"Subtarget->getGeneration() " |
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 15 | ">= SISubtarget::SOUTHERN_ISLANDS">, |
| Tom Stellard | d7e6f13 | 2015-04-08 01:09:26 +0000 | [diff] [blame] | 16 | AssemblerPredicate<"FeatureGCN">; |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 17 | def isSI : Predicate<"Subtarget->getGeneration() " |
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 18 | "== SISubtarget::SOUTHERN_ISLANDS">, |
| Matt Arsenault | d6adfb4 | 2015-09-24 19:52:21 +0000 | [diff] [blame] | 19 | AssemblerPredicate<"FeatureSouthernIslands">; |
| 20 | |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 21 | def has16BankLDS : Predicate<"Subtarget->getLDSBankCount() == 16">; |
| 22 | def has32BankLDS : Predicate<"Subtarget->getLDSBankCount() == 32">; |
| Matt Arsenault | cc88ce3 | 2016-10-12 18:00:51 +0000 | [diff] [blame] | 23 | def HasVGPRIndexMode : Predicate<"Subtarget->hasVGPRIndexMode()">, |
| 24 | AssemblerPredicate<"FeatureVGPRIndexMode">; |
| 25 | def HasMovrel : Predicate<"Subtarget->hasMovrel()">, |
| 26 | AssemblerPredicate<"FeatureMovrel">; |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 27 | |
| Valery Pykhtin | 2828b9b | 2016-09-19 14:39:49 +0000 | [diff] [blame] | 28 | include "VOPInstructions.td" |
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 29 | include "SOPInstructions.td" |
| Valery Pykhtin | 1b13886 | 2016-09-01 09:56:47 +0000 | [diff] [blame] | 30 | include "SMInstructions.td" |
| Valery Pykhtin | 8bc6596 | 2016-09-05 11:22:51 +0000 | [diff] [blame] | 31 | include "FLATInstructions.td" |
| Valery Pykhtin | b66e5eb | 2016-09-10 13:09:16 +0000 | [diff] [blame] | 32 | include "BUFInstructions.td" |
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 33 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 34 | let SubtargetPredicate = isGCN in { |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 35 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 36 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 3a35d8f | 2014-10-01 14:44:45 +0000 | [diff] [blame] | 37 | // EXP Instructions |
| 38 | //===----------------------------------------------------------------------===// |
| 39 | |
| Matt Arsenault | 7bee6ac | 2016-12-05 20:23:10 +0000 | [diff] [blame] | 40 | defm EXP : EXP_m<0, AMDGPUexport>; |
| 41 | defm EXP_DONE : EXP_m<1, AMDGPUexport_done>; |
| Tom Stellard | 3a35d8f | 2014-10-01 14:44:45 +0000 | [diff] [blame] | 42 | |
| 43 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 44 | // VINTRP Instructions |
| 45 | //===----------------------------------------------------------------------===// |
| 46 | |
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 47 | let Uses = [M0, EXEC] in { |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 48 | |
| Tom Stellard | ae38f30 | 2015-01-14 01:13:19 +0000 | [diff] [blame] | 49 | // FIXME: Specify SchedRW for VINTRP insturctions. |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 50 | |
| 51 | multiclass V_INTERP_P1_F32_m : VINTRP_m < |
| 52 | 0x00000000, |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 53 | (outs VGPR_32:$vdst), |
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 54 | (ins VGPR_32:$vsrc, Attr:$attr, AttrChan:$attrchan), |
| 55 | "v_interp_p1_f32 $vdst, $vsrc, $attr$attrchan", |
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 56 | [(set f32:$vdst, (AMDGPUinterp_p1 f32:$vsrc, (i32 imm:$attrchan), |
| 57 | (i32 imm:$attr)))] |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 58 | >; |
| 59 | |
| 60 | let OtherPredicates = [has32BankLDS] in { |
| 61 | |
| 62 | defm V_INTERP_P1_F32 : V_INTERP_P1_F32_m; |
| 63 | |
| 64 | } // End OtherPredicates = [has32BankLDS] |
| 65 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 66 | let OtherPredicates = [has16BankLDS], Constraints = "@earlyclobber $vdst", isAsmParserOnly=1 in { |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 67 | |
| 68 | defm V_INTERP_P1_F32_16bank : V_INTERP_P1_F32_m; |
| 69 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 70 | } // End OtherPredicates = [has32BankLDS], Constraints = "@earlyclobber $vdst", isAsmParserOnly=1 |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 71 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 72 | let DisableEncoding = "$src0", Constraints = "$src0 = $vdst" in { |
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 73 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 74 | defm V_INTERP_P2_F32 : VINTRP_m < |
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 75 | 0x00000001, |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 76 | (outs VGPR_32:$vdst), |
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 77 | (ins VGPR_32:$src0, VGPR_32:$vsrc, Attr:$attr, AttrChan:$attrchan), |
| 78 | "v_interp_p2_f32 $vdst, $vsrc, $attr$attrchan", |
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 79 | [(set f32:$vdst, (AMDGPUinterp_p2 f32:$src0, f32:$vsrc, (i32 imm:$attrchan), |
| 80 | (i32 imm:$attr)))]>; |
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 81 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 82 | } // End DisableEncoding = "$src0", Constraints = "$src0 = $vdst" |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 83 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 84 | defm V_INTERP_MOV_F32 : VINTRP_m < |
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 85 | 0x00000002, |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 86 | (outs VGPR_32:$vdst), |
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 87 | (ins InterpSlot:$vsrc, Attr:$attr, AttrChan:$attrchan), |
| 88 | "v_interp_mov_f32 $vdst, $vsrc, $attr$attrchan", |
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 89 | [(set f32:$vdst, (AMDGPUinterp_mov (i32 imm:$vsrc), (i32 imm:$attrchan), |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 90 | (i32 imm:$attr)))]>; |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 91 | |
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 92 | } // End Uses = [M0, EXEC] |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 93 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 94 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 95 | // Pseudo Instructions |
| 96 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 97 | |
| 98 | let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] in { |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 99 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 100 | // For use in patterns |
| Tom Stellard | cc4c871 | 2016-02-16 18:14:56 +0000 | [diff] [blame] | 101 | def V_CNDMASK_B64_PSEUDO : VOP3Common <(outs VReg_64:$vdst), |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 102 | (ins VSrc_b64:$src0, VSrc_b64:$src1, SSrc_b64:$src2), "", []> { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 103 | let isPseudo = 1; |
| 104 | let isCodeGenOnly = 1; |
| Matt Arsenault | 22e4179 | 2016-08-27 01:00:37 +0000 | [diff] [blame] | 105 | let usesCustomInserter = 1; |
| Tom Stellard | 60024a0 | 2014-09-24 01:33:24 +0000 | [diff] [blame] | 106 | } |
| 107 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 108 | // 64-bit vector move instruction. This is mainly used by the SIFoldOperands |
| 109 | // pass to enable folding of inline immediates. |
| Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 110 | def V_MOV_B64_PSEUDO : VPseudoInstSI <(outs VReg_64:$vdst), |
| 111 | (ins VSrc_b64:$src0)>; |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 112 | } // End let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] |
| 113 | |
| Wei Ding | 205bfdb | 2017-02-10 02:15:29 +0000 | [diff] [blame] | 114 | def S_TRAP_PSEUDO : SPseudoInstSI <(outs), (ins i16imm:$simm16)> { |
| Wei Ding | ee21a36 | 2017-01-24 06:41:21 +0000 | [diff] [blame] | 115 | let hasSideEffects = 1; |
| 116 | let SALU = 1; |
| 117 | let usesCustomInserter = 1; |
| 118 | } |
| 119 | |
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 120 | let usesCustomInserter = 1, SALU = 1 in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 121 | def GET_GROUPSTATICSIZE : PseudoInstSI <(outs SReg_32:$sdst), (ins), |
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 122 | [(set SReg_32:$sdst, (int_amdgcn_groupstaticsize))]>; |
| 123 | } // End let usesCustomInserter = 1, SALU = 1 |
| 124 | |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 125 | def S_MOV_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 126 | (ins SSrc_b64:$src0)> { |
| 127 | let SALU = 1; |
| 128 | let isAsCheapAsAMove = 1; |
| 129 | let isTerminator = 1; |
| 130 | } |
| 131 | |
| 132 | def S_XOR_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 133 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { |
| 134 | let SALU = 1; |
| 135 | let isAsCheapAsAMove = 1; |
| 136 | let isTerminator = 1; |
| 137 | } |
| 138 | |
| 139 | def S_ANDN2_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 140 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { |
| 141 | let SALU = 1; |
| 142 | let isAsCheapAsAMove = 1; |
| 143 | let isTerminator = 1; |
| 144 | } |
| 145 | |
| Stanislav Mekhanoshin | ea91cca | 2016-11-15 19:00:15 +0000 | [diff] [blame] | 146 | def WAVE_BARRIER : SPseudoInstSI<(outs), (ins), |
| 147 | [(int_amdgcn_wave_barrier)]> { |
| 148 | let SchedRW = []; |
| 149 | let hasNoSchedulingInfo = 1; |
| 150 | let hasSideEffects = 1; |
| 151 | let mayLoad = 1; |
| 152 | let mayStore = 1; |
| 153 | let isBarrier = 1; |
| 154 | let isConvergent = 1; |
| 155 | } |
| 156 | |
| Matt Arsenault | 8fb3738 | 2013-10-11 21:03:36 +0000 | [diff] [blame] | 157 | // SI pseudo instructions. These are used by the CFG structurizer pass |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 158 | // and should be lowered to ISA instructions prior to codegen. |
| 159 | |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 160 | // Dummy terminator instruction to use after control flow instructions |
| 161 | // replaced with exec mask operations. |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 162 | def SI_MASK_BRANCH : PseudoInstSI < |
| Matt Arsenault | f98a596 | 2016-08-27 00:42:21 +0000 | [diff] [blame] | 163 | (outs), (ins brtarget:$target)> { |
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 164 | let isBranch = 0; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 165 | let isTerminator = 1; |
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 166 | let isBarrier = 0; |
| Matt Arsenault | 78fc9da | 2016-08-22 19:33:16 +0000 | [diff] [blame] | 167 | let Uses = [EXEC]; |
| Matt Arsenault | c59a923 | 2016-10-06 18:12:07 +0000 | [diff] [blame] | 168 | let SchedRW = []; |
| 169 | let hasNoSchedulingInfo = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 170 | } |
| 171 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 172 | let isTerminator = 1 in { |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 173 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 174 | def SI_IF: CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 175 | (outs SReg_64:$dst), (ins SReg_64:$vcc, brtarget:$target), |
| Matt Arsenault | c5b641a | 2017-03-17 20:41:45 +0000 | [diff] [blame] | 176 | [(set i64:$dst, (AMDGPUif i1:$vcc, bb:$target))], 1, 1> { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 177 | let Constraints = ""; |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 178 | let Size = 12; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 179 | let hasSideEffects = 1; |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 180 | } |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 181 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 182 | def SI_ELSE : CFPseudoInstSI < |
| Matt Arsenault | c5b641a | 2017-03-17 20:41:45 +0000 | [diff] [blame] | 183 | (outs SReg_64:$dst), |
| 184 | (ins SReg_64:$src, brtarget:$target, i1imm:$execfix), [], 1, 1> { |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 185 | let Constraints = "$src = $dst"; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 186 | let Size = 12; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 187 | let hasSideEffects = 1; |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 188 | } |
| 189 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 190 | def SI_LOOP : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 191 | (outs), (ins SReg_64:$saved, brtarget:$target), |
| Matt Arsenault | c5b641a | 2017-03-17 20:41:45 +0000 | [diff] [blame] | 192 | [(AMDGPUloop i64:$saved, bb:$target)], 1, 1> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 193 | let Size = 8; |
| Matt Arsenault | c5b641a | 2017-03-17 20:41:45 +0000 | [diff] [blame] | 194 | let isBranch = 0; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 195 | let hasSideEffects = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 196 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 197 | |
| Matt Arsenault | c5b641a | 2017-03-17 20:41:45 +0000 | [diff] [blame] | 198 | } // End isTerminator = 1 |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 199 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 200 | def SI_END_CF : CFPseudoInstSI < |
| 201 | (outs), (ins SReg_64:$saved), |
| 202 | [(int_amdgcn_end_cf i64:$saved)], 1, 1> { |
| 203 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 204 | let isAsCheapAsAMove = 1; |
| 205 | let isReMaterializable = 1; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 206 | let hasSideEffects = 1; |
| Matt Arsenault | c5b641a | 2017-03-17 20:41:45 +0000 | [diff] [blame] | 207 | let mayLoad = 1; // FIXME: Should not need memory flags |
| 208 | let mayStore = 1; |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 209 | } |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 210 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 211 | def SI_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 212 | (outs SReg_64:$dst), (ins SReg_64:$src), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 213 | [(set i64:$dst, (int_amdgcn_break i64:$src))], 1> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 214 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 215 | let isAsCheapAsAMove = 1; |
| 216 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 217 | } |
| Matt Arsenault | 48d70cb | 2016-07-09 17:18:39 +0000 | [diff] [blame] | 218 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 219 | def SI_IF_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 220 | (outs SReg_64:$dst), (ins SReg_64:$vcc, SReg_64:$src), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 221 | [(set i64:$dst, (int_amdgcn_if_break i1:$vcc, i64:$src))]> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 222 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 223 | let isAsCheapAsAMove = 1; |
| 224 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 225 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 226 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 227 | def SI_ELSE_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 228 | (outs SReg_64:$dst), (ins SReg_64:$src0, SReg_64:$src1), |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 229 | [(set i64:$dst, (int_amdgcn_else_break i64:$src0, i64:$src1))]> { |
| 230 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 231 | let isAsCheapAsAMove = 1; |
| 232 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 233 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 234 | |
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 235 | let Uses = [EXEC], Defs = [EXEC,VCC] in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 236 | def SI_KILL : PseudoInstSI < |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 237 | (outs), (ins VSrc_b32:$src), |
| Matt Arsenault | 03006fd | 2016-07-19 16:27:56 +0000 | [diff] [blame] | 238 | [(AMDGPUkill i32:$src)]> { |
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 239 | let isConvergent = 1; |
| 240 | let usesCustomInserter = 1; |
| 241 | } |
| 242 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 243 | def SI_KILL_TERMINATOR : SPseudoInstSI < |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 244 | (outs), (ins VSrc_b32:$src)> { |
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 245 | let isTerminator = 1; |
| 246 | } |
| 247 | |
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 248 | } // End Uses = [EXEC], Defs = [EXEC,VCC] |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 249 | |
| Matt Arsenault | 327188a | 2016-12-15 21:57:11 +0000 | [diff] [blame] | 250 | // Branch on undef scc. Used to avoid intermediate copy from |
| 251 | // IMPLICIT_DEF to SCC. |
| 252 | def SI_BR_UNDEF : SPseudoInstSI <(outs), (ins sopp_brtarget:$simm16)> { |
| 253 | let isTerminator = 1; |
| 254 | let usesCustomInserter = 1; |
| 255 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 256 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 257 | def SI_PS_LIVE : PseudoInstSI < |
| 258 | (outs SReg_64:$dst), (ins), |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 259 | [(set i1:$dst, (int_amdgcn_ps_live))]> { |
| 260 | let SALU = 1; |
| 261 | } |
| Nicolai Haehnle | b0c9748 | 2016-04-22 04:04:08 +0000 | [diff] [blame] | 262 | |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 263 | // Used as an isel pseudo to directly emit initialization with an |
| 264 | // s_mov_b32 rather than a copy of another initialized |
| 265 | // register. MachineCSE skips copies, and we don't want to have to |
| 266 | // fold operands before it runs. |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 267 | def SI_INIT_M0 : SPseudoInstSI <(outs), (ins SSrc_b32:$src)> { |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 268 | let Defs = [M0]; |
| 269 | let usesCustomInserter = 1; |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 270 | let isAsCheapAsAMove = 1; |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 271 | let isReMaterializable = 1; |
| 272 | } |
| 273 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 274 | def SI_RETURN : SPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 275 | (outs), (ins variable_ops), [(AMDGPUreturn)]> { |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 276 | let isTerminator = 1; |
| 277 | let isBarrier = 1; |
| 278 | let isReturn = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 279 | let hasSideEffects = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 280 | let hasNoSchedulingInfo = 1; |
| Nicolai Haehnle | a246dcc | 2016-09-03 12:26:32 +0000 | [diff] [blame] | 281 | let DisableWQM = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 282 | } |
| 283 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 284 | let Defs = [M0, EXEC], |
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 285 | UseNamedOperandTable = 1 in { |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 286 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 287 | class SI_INDIRECT_SRC<RegisterClass rc> : VPseudoInstSI < |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 288 | (outs VGPR_32:$vdst), |
| 289 | (ins rc:$src, VS_32:$idx, i32imm:$offset)> { |
| 290 | let usesCustomInserter = 1; |
| 291 | } |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 292 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 293 | class SI_INDIRECT_DST<RegisterClass rc> : VPseudoInstSI < |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 294 | (outs rc:$vdst), |
| 295 | (ins rc:$src, VS_32:$idx, i32imm:$offset, VGPR_32:$val)> { |
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 296 | let Constraints = "$src = $vdst"; |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 297 | let usesCustomInserter = 1; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 298 | } |
| 299 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 300 | // TODO: We can support indirect SGPR access. |
| 301 | def SI_INDIRECT_SRC_V1 : SI_INDIRECT_SRC<VGPR_32>; |
| 302 | def SI_INDIRECT_SRC_V2 : SI_INDIRECT_SRC<VReg_64>; |
| 303 | def SI_INDIRECT_SRC_V4 : SI_INDIRECT_SRC<VReg_128>; |
| 304 | def SI_INDIRECT_SRC_V8 : SI_INDIRECT_SRC<VReg_256>; |
| 305 | def SI_INDIRECT_SRC_V16 : SI_INDIRECT_SRC<VReg_512>; |
| 306 | |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 307 | def SI_INDIRECT_DST_V1 : SI_INDIRECT_DST<VGPR_32>; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 308 | def SI_INDIRECT_DST_V2 : SI_INDIRECT_DST<VReg_64>; |
| 309 | def SI_INDIRECT_DST_V4 : SI_INDIRECT_DST<VReg_128>; |
| 310 | def SI_INDIRECT_DST_V8 : SI_INDIRECT_DST<VReg_256>; |
| 311 | def SI_INDIRECT_DST_V16 : SI_INDIRECT_DST<VReg_512>; |
| 312 | |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 313 | } // End Uses = [EXEC], Defs = [M0, EXEC] |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 314 | |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 315 | multiclass SI_SPILL_SGPR <RegisterClass sgpr_class> { |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 316 | let UseNamedOperandTable = 1, SGPRSpill = 1, Uses = [EXEC] in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 317 | def _SAVE : PseudoInstSI < |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 318 | (outs), |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 319 | (ins sgpr_class:$data, i32imm:$addr)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 320 | let mayStore = 1; |
| 321 | let mayLoad = 0; |
| 322 | } |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 323 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 324 | def _RESTORE : PseudoInstSI < |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 325 | (outs sgpr_class:$data), |
| 326 | (ins i32imm:$addr)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 327 | let mayStore = 0; |
| 328 | let mayLoad = 1; |
| 329 | } |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 330 | } // End UseNamedOperandTable = 1 |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 331 | } |
| 332 | |
| Matt Arsenault | 2510a31 | 2016-09-03 06:57:55 +0000 | [diff] [blame] | 333 | // You cannot use M0 as the output of v_readlane_b32 instructions or |
| 334 | // use it in the sdata operand of SMEM instructions. We still need to |
| 335 | // be able to spill the physical register m0, so allow it for |
| 336 | // SI_SPILL_32_* instructions. |
| 337 | defm SI_SPILL_S32 : SI_SPILL_SGPR <SReg_32>; |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 338 | defm SI_SPILL_S64 : SI_SPILL_SGPR <SReg_64>; |
| 339 | defm SI_SPILL_S128 : SI_SPILL_SGPR <SReg_128>; |
| 340 | defm SI_SPILL_S256 : SI_SPILL_SGPR <SReg_256>; |
| 341 | defm SI_SPILL_S512 : SI_SPILL_SGPR <SReg_512>; |
| 342 | |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 343 | multiclass SI_SPILL_VGPR <RegisterClass vgpr_class> { |
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 344 | let UseNamedOperandTable = 1, VGPRSpill = 1, |
| 345 | SchedRW = [WriteVMEM] in { |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 346 | def _SAVE : VPseudoInstSI < |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 347 | (outs), |
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 348 | (ins vgpr_class:$vdata, i32imm:$vaddr, SReg_128:$srsrc, |
| 349 | SReg_32:$soffset, i32imm:$offset)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 350 | let mayStore = 1; |
| 351 | let mayLoad = 0; |
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 352 | // (2 * 4) + (8 * num_subregs) bytes maximum |
| 353 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 354 | } |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 355 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 356 | def _RESTORE : VPseudoInstSI < |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 357 | (outs vgpr_class:$vdata), |
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 358 | (ins i32imm:$vaddr, SReg_128:$srsrc, SReg_32:$soffset, |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 359 | i32imm:$offset)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 360 | let mayStore = 0; |
| 361 | let mayLoad = 1; |
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 362 | |
| 363 | // (2 * 4) + (8 * num_subregs) bytes maximum |
| 364 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 365 | } |
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 366 | } // End UseNamedOperandTable = 1, VGPRSpill = 1, SchedRW = [WriteVMEM] |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 367 | } |
| 368 | |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 369 | defm SI_SPILL_V32 : SI_SPILL_VGPR <VGPR_32>; |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 370 | defm SI_SPILL_V64 : SI_SPILL_VGPR <VReg_64>; |
| 371 | defm SI_SPILL_V96 : SI_SPILL_VGPR <VReg_96>; |
| 372 | defm SI_SPILL_V128 : SI_SPILL_VGPR <VReg_128>; |
| 373 | defm SI_SPILL_V256 : SI_SPILL_VGPR <VReg_256>; |
| 374 | defm SI_SPILL_V512 : SI_SPILL_VGPR <VReg_512>; |
| 375 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 376 | def SI_PC_ADD_REL_OFFSET : SPseudoInstSI < |
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 377 | (outs SReg_64:$dst), |
| Konstantin Zhuravlyov | c96b5d7 | 2016-10-14 04:37:34 +0000 | [diff] [blame] | 378 | (ins si_ga:$ptr_lo, si_ga:$ptr_hi), |
| 379 | [(set SReg_64:$dst, |
| 380 | (i64 (SIpc_add_rel_offset (tglobaladdr:$ptr_lo), (tglobaladdr:$ptr_hi))))]> { |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 381 | let Defs = [SCC]; |
| Matt Arsenault | d092a06 | 2015-10-02 18:58:37 +0000 | [diff] [blame] | 382 | } |
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 383 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 384 | } // End SubtargetPredicate = isGCN |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 385 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 386 | let Predicates = [isGCN] in { |
| Wei Ding | 205bfdb | 2017-02-10 02:15:29 +0000 | [diff] [blame] | 387 | def : Pat< |
| 388 | (trap), |
| Wei Ding | f2cce02 | 2017-02-22 23:22:19 +0000 | [diff] [blame] | 389 | (S_TRAP_PSEUDO TRAPID.LLVM_TRAP) |
| Wei Ding | 205bfdb | 2017-02-10 02:15:29 +0000 | [diff] [blame] | 390 | >; |
| 391 | |
| 392 | def : Pat< |
| 393 | (debugtrap), |
| Wei Ding | f2cce02 | 2017-02-22 23:22:19 +0000 | [diff] [blame] | 394 | (S_TRAP_PSEUDO TRAPID.LLVM_DEBUG_TRAP) |
| Wei Ding | 205bfdb | 2017-02-10 02:15:29 +0000 | [diff] [blame] | 395 | >; |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 396 | |
| Nicolai Haehnle | 3b57200 | 2016-07-28 11:39:24 +0000 | [diff] [blame] | 397 | def : Pat< |
| Matt Arsenault | c5b641a | 2017-03-17 20:41:45 +0000 | [diff] [blame] | 398 | (AMDGPUelse i64:$src, bb:$target), |
| Nicolai Haehnle | 3b57200 | 2016-07-28 11:39:24 +0000 | [diff] [blame] | 399 | (SI_ELSE $src, $target, 0) |
| 400 | >; |
| 401 | |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 402 | def : Pat < |
| 403 | (int_AMDGPU_kilp), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 404 | (SI_KILL (i32 0xbf800000)) |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 405 | >; |
| 406 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 407 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 408 | // VOP1 Patterns |
| 409 | //===----------------------------------------------------------------------===// |
| 410 | |
| Matt Arsenault | 22ca3f8 | 2014-07-15 23:50:10 +0000 | [diff] [blame] | 411 | let Predicates = [UnsafeFPMath] in { |
| Matt Arsenault | 0bbcd8b | 2015-02-14 04:30:08 +0000 | [diff] [blame] | 412 | |
| 413 | //def : RcpPat<V_RCP_F64_e32, f64>; |
| 414 | //defm : RsqPat<V_RSQ_F64_e32, f64>; |
| 415 | //defm : RsqPat<V_RSQ_F32_e32, f32>; |
| 416 | |
| 417 | def : RsqPat<V_RSQ_F32_e32, f32>; |
| 418 | def : RsqPat<V_RSQ_F64_e32, f64>; |
| Matt Arsenault | 7401516 | 2016-05-28 00:19:52 +0000 | [diff] [blame] | 419 | |
| 420 | // Convert (x - floor(x)) to fract(x) |
| 421 | def : Pat < |
| 422 | (f32 (fsub (f32 (VOP3Mods f32:$x, i32:$mods)), |
| 423 | (f32 (ffloor (f32 (VOP3Mods f32:$x, i32:$mods)))))), |
| 424 | (V_FRACT_F32_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 425 | >; |
| 426 | |
| 427 | // Convert (x + (-floor(x))) to fract(x) |
| 428 | def : Pat < |
| 429 | (f64 (fadd (f64 (VOP3Mods f64:$x, i32:$mods)), |
| 430 | (f64 (fneg (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))))))), |
| 431 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 432 | >; |
| 433 | |
| 434 | } // End Predicates = [UnsafeFPMath] |
| Matt Arsenault | e9fa3b8 | 2014-07-15 20:18:31 +0000 | [diff] [blame] | 435 | |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 436 | |
| 437 | // f16_to_fp patterns |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 438 | def : Pat < |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 439 | (f32 (f16_to_fp i32:$src0)), |
| 440 | (V_CVT_F32_F16_e64 SRCMODS.NONE, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 441 | >; |
| 442 | |
| 443 | def : Pat < |
| 444 | (f32 (f16_to_fp (and_oneuse i32:$src0, 0x7fff))), |
| 445 | (V_CVT_F32_F16_e64 SRCMODS.ABS, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 446 | >; |
| 447 | |
| 448 | def : Pat < |
| 449 | (f32 (f16_to_fp (or_oneuse i32:$src0, 0x8000))), |
| 450 | (V_CVT_F32_F16_e64 SRCMODS.NEG_ABS, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 451 | >; |
| 452 | |
| 453 | def : Pat < |
| 454 | (f32 (f16_to_fp (xor_oneuse i32:$src0, 0x8000))), |
| 455 | (V_CVT_F32_F16_e64 SRCMODS.NEG, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 456 | >; |
| 457 | |
| 458 | def : Pat < |
| 459 | (f64 (fpextend f16:$src)), |
| 460 | (V_CVT_F64_F32_e32 (V_CVT_F32_F16_e32 $src)) |
| 461 | >; |
| 462 | |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 463 | // fp_to_fp16 patterns |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 464 | def : Pat < |
| Matt Arsenault | 86e02ce | 2017-03-15 19:04:26 +0000 | [diff] [blame] | 465 | (i32 (AMDGPUfp_to_f16 (f32 (VOP3Mods0 f32:$src0, i32:$src0_modifiers, i1:$clamp, i32:$omod)))), |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 466 | (V_CVT_F16_F32_e64 $src0_modifiers, f32:$src0, $clamp, $omod) |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 467 | >; |
| 468 | |
| 469 | def : Pat < |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 470 | (i32 (fp_to_sint f16:$src)), |
| 471 | (V_CVT_I32_F32_e32 (V_CVT_F32_F16_e32 $src)) |
| 472 | >; |
| 473 | |
| 474 | def : Pat < |
| 475 | (i32 (fp_to_uint f16:$src)), |
| 476 | (V_CVT_U32_F32_e32 (V_CVT_F32_F16_e32 $src)) |
| 477 | >; |
| 478 | |
| 479 | def : Pat < |
| 480 | (f16 (sint_to_fp i32:$src)), |
| 481 | (V_CVT_F16_F32_e32 (V_CVT_F32_I32_e32 $src)) |
| 482 | >; |
| 483 | |
| 484 | def : Pat < |
| 485 | (f16 (uint_to_fp i32:$src)), |
| 486 | (V_CVT_F16_F32_e32 (V_CVT_F32_U32_e32 $src)) |
| 487 | >; |
| 488 | |
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 489 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 58ac744 | 2014-04-29 23:12:48 +0000 | [diff] [blame] | 490 | // VOP2 Patterns |
| 491 | //===----------------------------------------------------------------------===// |
| 492 | |
| Konstantin Zhuravlyov | bf998c7 | 2016-11-16 03:39:12 +0000 | [diff] [blame] | 493 | multiclass FMADPat <ValueType vt, Instruction inst> { |
| 494 | def : Pat < |
| 495 | (vt (fmad (VOP3NoMods0 vt:$src0, i32:$src0_modifiers, i1:$clamp, i32:$omod), |
| 496 | (VOP3NoMods vt:$src1, i32:$src1_modifiers), |
| 497 | (VOP3NoMods vt:$src2, i32:$src2_modifiers))), |
| 498 | (inst $src0_modifiers, $src0, $src1_modifiers, $src1, |
| 499 | $src2_modifiers, $src2, $clamp, $omod) |
| 500 | >; |
| 501 | } |
| 502 | |
| 503 | defm : FMADPat <f16, V_MAC_F16_e64>; |
| 504 | defm : FMADPat <f32, V_MAC_F32_e64>; |
| 505 | |
| Wei Ding | 4d3d4ca | 2017-02-24 23:00:29 +0000 | [diff] [blame] | 506 | class FMADModsPat<Instruction inst, SDPatternOperator mad_opr> : Pat< |
| 507 | (f32 (mad_opr (VOP3Mods f32:$src0, i32:$src0_mod), |
| 508 | (VOP3Mods f32:$src1, i32:$src1_mod), |
| 509 | (VOP3Mods f32:$src2, i32:$src2_mod))), |
| 510 | (inst $src0_mod, $src0, $src1_mod, $src1, |
| 511 | $src2_mod, $src2, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 512 | >; |
| 513 | |
| 514 | def : FMADModsPat<V_MAD_F32, AMDGPUfmad_ftz>; |
| 515 | |
| Konstantin Zhuravlyov | bf998c7 | 2016-11-16 03:39:12 +0000 | [diff] [blame] | 516 | multiclass SelectPat <ValueType vt, Instruction inst> { |
| Konstantin Zhuravlyov | 2a87a42 | 2016-11-16 03:16:26 +0000 | [diff] [blame] | 517 | def : Pat < |
| 518 | (vt (select i1:$src0, vt:$src1, vt:$src2)), |
| 519 | (inst $src2, $src1, $src0) |
| 520 | >; |
| 521 | } |
| 522 | |
| Konstantin Zhuravlyov | bf998c7 | 2016-11-16 03:39:12 +0000 | [diff] [blame] | 523 | defm : SelectPat <i16, V_CNDMASK_B32_e64>; |
| 524 | defm : SelectPat <i32, V_CNDMASK_B32_e64>; |
| 525 | defm : SelectPat <f16, V_CNDMASK_B32_e64>; |
| 526 | defm : SelectPat <f32, V_CNDMASK_B32_e64>; |
| Konstantin Zhuravlyov | 2a87a42 | 2016-11-16 03:16:26 +0000 | [diff] [blame] | 527 | |
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 528 | def : Pat < |
| 529 | (i32 (add (i32 (ctpop i32:$popcnt)), i32:$val)), |
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 530 | (V_BCNT_U32_B32_e64 $popcnt, $val) |
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 531 | >; |
| 532 | |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 533 | /********** ============================================ **********/ |
| 534 | /********** Extraction, Insertion, Building and Casting **********/ |
| 535 | /********** ============================================ **********/ |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 536 | |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 537 | foreach Index = 0-2 in { |
| 538 | def Extract_Element_v2i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 539 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 540 | >; |
| 541 | def Insert_Element_v2i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 542 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 543 | >; |
| 544 | |
| 545 | def Extract_Element_v2f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 546 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 547 | >; |
| 548 | def Insert_Element_v2f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 549 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 550 | >; |
| 551 | } |
| 552 | |
| 553 | foreach Index = 0-3 in { |
| 554 | def Extract_Element_v4i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 555 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 556 | >; |
| 557 | def Insert_Element_v4i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 558 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 559 | >; |
| 560 | |
| 561 | def Extract_Element_v4f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 562 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 563 | >; |
| 564 | def Insert_Element_v4f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 565 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 566 | >; |
| 567 | } |
| 568 | |
| 569 | foreach Index = 0-7 in { |
| 570 | def Extract_Element_v8i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 571 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 572 | >; |
| 573 | def Insert_Element_v8i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 574 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 575 | >; |
| 576 | |
| 577 | def Extract_Element_v8f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 578 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 579 | >; |
| 580 | def Insert_Element_v8f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 581 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 582 | >; |
| 583 | } |
| 584 | |
| 585 | foreach Index = 0-15 in { |
| 586 | def Extract_Element_v16i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 587 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 588 | >; |
| 589 | def Insert_Element_v16i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 590 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 591 | >; |
| 592 | |
| 593 | def Extract_Element_v16f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 594 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 595 | >; |
| 596 | def Insert_Element_v16f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 597 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 598 | >; |
| 599 | } |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 600 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 601 | // FIXME: Why do only some of these type combinations for SReg and |
| 602 | // VReg? |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 603 | // 16-bit bitcast |
| 604 | def : BitConvert <i16, f16, VGPR_32>; |
| 605 | def : BitConvert <f16, i16, VGPR_32>; |
| 606 | def : BitConvert <i16, f16, SReg_32>; |
| 607 | def : BitConvert <f16, i16, SReg_32>; |
| 608 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 609 | // 32-bit bitcast |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 610 | def : BitConvert <i32, f32, VGPR_32>; |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 611 | def : BitConvert <f32, i32, VGPR_32>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 612 | def : BitConvert <i32, f32, SReg_32>; |
| 613 | def : BitConvert <f32, i32, SReg_32>; |
| Matt Arsenault | 9be7b0d | 2017-02-27 18:49:11 +0000 | [diff] [blame] | 614 | def : BitConvert <v2i16, i32, SReg_32>; |
| 615 | def : BitConvert <i32, v2i16, SReg_32>; |
| 616 | def : BitConvert <v2f16, i32, SReg_32>; |
| 617 | def : BitConvert <i32, v2f16, SReg_32>; |
| 618 | def : BitConvert <v2i16, v2f16, SReg_32>; |
| 619 | def : BitConvert <v2f16, v2i16, SReg_32>; |
| Matt Arsenault | eb522e6 | 2017-02-27 22:15:25 +0000 | [diff] [blame] | 620 | def : BitConvert <v2f16, f32, SReg_32>; |
| 621 | def : BitConvert <f32, v2f16, SReg_32>; |
| 622 | def : BitConvert <v2i16, f32, SReg_32>; |
| 623 | def : BitConvert <f32, v2i16, SReg_32>; |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 624 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 625 | // 64-bit bitcast |
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 626 | def : BitConvert <i64, f64, VReg_64>; |
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 627 | def : BitConvert <f64, i64, VReg_64>; |
| Tom Stellard | ed2f614 | 2013-07-18 21:43:42 +0000 | [diff] [blame] | 628 | def : BitConvert <v2i32, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 629 | def : BitConvert <v2f32, v2i32, VReg_64>; |
| Tom Stellard | 7ea3d6d | 2014-03-31 14:01:55 +0000 | [diff] [blame] | 630 | def : BitConvert <i64, v2i32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 631 | def : BitConvert <v2i32, i64, VReg_64>; |
| Matt Arsenault | 064c206 | 2014-06-11 17:40:32 +0000 | [diff] [blame] | 632 | def : BitConvert <i64, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 633 | def : BitConvert <v2f32, i64, VReg_64>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 634 | def : BitConvert <f64, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 635 | def : BitConvert <v2f32, f64, VReg_64>; |
| Matt Arsenault | 2acc7a4 | 2014-06-11 19:31:13 +0000 | [diff] [blame] | 636 | def : BitConvert <f64, v2i32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 637 | def : BitConvert <v2i32, f64, VReg_64>; |
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 638 | def : BitConvert <v4i32, v4f32, VReg_128>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 639 | def : BitConvert <v4f32, v4i32, VReg_128>; |
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 640 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 641 | // 128-bit bitcast |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 642 | def : BitConvert <v2i64, v4i32, SReg_128>; |
| 643 | def : BitConvert <v4i32, v2i64, SReg_128>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 644 | def : BitConvert <v2f64, v4f32, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 645 | def : BitConvert <v2f64, v4i32, VReg_128>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 646 | def : BitConvert <v4f32, v2f64, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 647 | def : BitConvert <v4i32, v2f64, VReg_128>; |
| Matt Arsenault | e57206d | 2016-05-25 18:07:36 +0000 | [diff] [blame] | 648 | def : BitConvert <v2i64, v2f64, VReg_128>; |
| 649 | def : BitConvert <v2f64, v2i64, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 650 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 651 | // 256-bit bitcast |
| Tom Stellard | 967bf58 | 2014-02-13 23:34:15 +0000 | [diff] [blame] | 652 | def : BitConvert <v8i32, v8f32, SReg_256>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 653 | def : BitConvert <v8f32, v8i32, SReg_256>; |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 654 | def : BitConvert <v8i32, v8f32, VReg_256>; |
| 655 | def : BitConvert <v8f32, v8i32, VReg_256>; |
| Tom Stellard | 20ee94f | 2013-08-14 22:22:09 +0000 | [diff] [blame] | 656 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 657 | // 512-bit bitcast |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 658 | def : BitConvert <v16i32, v16f32, VReg_512>; |
| 659 | def : BitConvert <v16f32, v16i32, VReg_512>; |
| 660 | |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 661 | /********** =================== **********/ |
| 662 | /********** Src & Dst modifiers **********/ |
| 663 | /********** =================== **********/ |
| 664 | |
| Matt Arsenault | 2fdf2a1 | 2017-02-21 23:35:48 +0000 | [diff] [blame] | 665 | |
| 666 | // If denormals are not enabled, it only impacts the compare of the |
| 667 | // inputs. The output result is not flushed. |
| 668 | class ClampPat<Instruction inst, ValueType vt> : Pat < |
| 669 | (vt (AMDGPUclamp |
| 670 | (VOP3Mods0Clamp vt:$src0, i32:$src0_modifiers, i32:$omod))), |
| 671 | (inst i32:$src0_modifiers, vt:$src0, |
| 672 | i32:$src0_modifiers, vt:$src0, DSTCLAMP.ENABLE, $omod) |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 673 | >; |
| 674 | |
| Matt Arsenault | 2fdf2a1 | 2017-02-21 23:35:48 +0000 | [diff] [blame] | 675 | def : ClampPat<V_MAX_F32_e64, f32>; |
| Matt Arsenault | 79a45db | 2017-02-22 23:53:37 +0000 | [diff] [blame] | 676 | def : ClampPat<V_MAX_F64, f64>; |
| Matt Arsenault | 2fdf2a1 | 2017-02-21 23:35:48 +0000 | [diff] [blame] | 677 | def : ClampPat<V_MAX_F16_e64, f16>; |
| 678 | |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 679 | /********** ================================ **********/ |
| 680 | /********** Floating point absolute/negative **********/ |
| 681 | /********** ================================ **********/ |
| 682 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 683 | // Prevent expanding both fneg and fabs. |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 684 | |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 685 | def : Pat < |
| 686 | (fneg (fabs f32:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 687 | (S_OR_B32 $src, (S_MOV_B32(i32 0x80000000))) // Set sign bit |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 688 | >; |
| 689 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 690 | // FIXME: Should use S_OR_B32 |
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 691 | def : Pat < |
| 692 | (fneg (fabs f64:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 693 | (REG_SEQUENCE VReg_64, |
| 694 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 695 | sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 696 | (V_OR_B32_e32 (i32 (EXTRACT_SUBREG f64:$src, sub1)), |
| 697 | (V_MOV_B32_e32 (i32 0x80000000))), // Set sign bit. |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 698 | sub1) |
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 699 | >; |
| 700 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 701 | def : Pat < |
| 702 | (fabs f32:$src), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 703 | (V_AND_B32_e64 $src, (V_MOV_B32_e32 (i32 0x7fffffff))) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 704 | >; |
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 705 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 706 | def : Pat < |
| 707 | (fneg f32:$src), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 708 | (V_XOR_B32_e32 $src, (V_MOV_B32_e32 (i32 0x80000000))) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 709 | >; |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 710 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 711 | def : Pat < |
| 712 | (fabs f64:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 713 | (REG_SEQUENCE VReg_64, |
| 714 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 715 | sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 716 | (V_AND_B32_e64 (i32 (EXTRACT_SUBREG f64:$src, sub1)), |
| 717 | (V_MOV_B32_e32 (i32 0x7fffffff))), // Set sign bit. |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 718 | sub1) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 719 | >; |
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 720 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 721 | def : Pat < |
| 722 | (fneg f64:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 723 | (REG_SEQUENCE VReg_64, |
| 724 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 725 | sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 726 | (V_XOR_B32_e32 (i32 (EXTRACT_SUBREG f64:$src, sub1)), |
| 727 | (i32 (V_MOV_B32_e32 (i32 0x80000000)))), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 728 | sub1) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 729 | >; |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 730 | |
| Matt Arsenault | c79dc70 | 2016-11-15 02:25:28 +0000 | [diff] [blame] | 731 | def : Pat < |
| Konstantin Zhuravlyov | 7d88275 | 2017-01-13 19:49:25 +0000 | [diff] [blame] | 732 | (fcopysign f16:$src0, f16:$src1), |
| 733 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, $src1) |
| 734 | >; |
| 735 | |
| 736 | def : Pat < |
| 737 | (fcopysign f32:$src0, f16:$src1), |
| 738 | (V_BFI_B32 (S_MOV_B32 (i32 0x7fffffff)), $src0, |
| 739 | (V_LSHLREV_B32_e64 (i32 16), $src1)) |
| 740 | >; |
| 741 | |
| 742 | def : Pat < |
| 743 | (fcopysign f64:$src0, f16:$src1), |
| 744 | (REG_SEQUENCE SReg_64, |
| 745 | (i32 (EXTRACT_SUBREG $src0, sub0)), sub0, |
| 746 | (V_BFI_B32 (S_MOV_B32 (i32 0x7fffffff)), (i32 (EXTRACT_SUBREG $src0, sub1)), |
| 747 | (V_LSHLREV_B32_e64 (i32 16), $src1)), sub1) |
| 748 | >; |
| 749 | |
| 750 | def : Pat < |
| 751 | (fcopysign f16:$src0, f32:$src1), |
| 752 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, |
| 753 | (V_LSHRREV_B32_e64 (i32 16), $src1)) |
| 754 | >; |
| 755 | |
| 756 | def : Pat < |
| 757 | (fcopysign f16:$src0, f64:$src1), |
| 758 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, |
| 759 | (V_LSHRREV_B32_e64 (i32 16), (EXTRACT_SUBREG $src1, sub1))) |
| 760 | >; |
| 761 | |
| 762 | def : Pat < |
| Matt Arsenault | c79dc70 | 2016-11-15 02:25:28 +0000 | [diff] [blame] | 763 | (fneg f16:$src), |
| 764 | (V_XOR_B32_e32 $src, (V_MOV_B32_e32 (i32 0x00008000))) |
| 765 | >; |
| 766 | |
| 767 | def : Pat < |
| 768 | (fabs f16:$src), |
| 769 | (V_AND_B32_e64 $src, (V_MOV_B32_e32 (i32 0x00007fff))) |
| 770 | >; |
| 771 | |
| 772 | def : Pat < |
| 773 | (fneg (fabs f16:$src)), |
| 774 | (S_OR_B32 $src, (S_MOV_B32 (i32 0x00008000))) // Set sign bit |
| 775 | >; |
| 776 | |
| Matt Arsenault | eb522e6 | 2017-02-27 22:15:25 +0000 | [diff] [blame] | 777 | def : Pat < |
| 778 | (fneg v2f16:$src), |
| 779 | (V_XOR_B32_e64 (S_MOV_B32 (i32 0x80008000)), $src) |
| 780 | >; |
| 781 | |
| 782 | def : Pat < |
| 783 | (fabs v2f16:$src), |
| 784 | (V_AND_B32_e64 (S_MOV_B32 (i32 0x7fff7fff)), $src) |
| 785 | >; |
| 786 | |
| 787 | // This is really (fneg (fabs v2f16:$src)) |
| 788 | // |
| 789 | // fabs is not reported as free because there is modifier for it in |
| 790 | // VOP3P instructions, so it is turned into the bit op. |
| 791 | def : Pat < |
| 792 | (fneg (v2f16 (bitconvert (and_oneuse i32:$src, 0x7fff7fff)))), |
| 793 | (S_OR_B32 (S_MOV_B32 (i32 0x80008000)), $src) // Set sign bit |
| 794 | >; |
| 795 | |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 796 | /********** ================== **********/ |
| 797 | /********** Immediate Patterns **********/ |
| 798 | /********** ================== **********/ |
| 799 | |
| 800 | def : Pat < |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 801 | (VGPRImm<(i32 imm)>:$imm), |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 802 | (V_MOV_B32_e32 imm:$imm) |
| 803 | >; |
| 804 | |
| 805 | def : Pat < |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 806 | (VGPRImm<(f32 fpimm)>:$imm), |
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 807 | (V_MOV_B32_e32 (f32 (bitcast_fpimm_to_i32 $imm))) |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 808 | >; |
| 809 | |
| 810 | def : Pat < |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 811 | (i32 imm:$imm), |
| 812 | (S_MOV_B32 imm:$imm) |
| 813 | >; |
| 814 | |
| Matt Arsenault | e96d037 | 2016-12-08 20:14:46 +0000 | [diff] [blame] | 815 | // FIXME: Workaround for ordering issue with peephole optimizer where |
| 816 | // a register class copy interferes with immediate folding. Should |
| 817 | // use s_mov_b32, which can be shrunk to s_movk_i32 |
| 818 | def : Pat < |
| 819 | (VGPRImm<(f16 fpimm)>:$imm), |
| 820 | (V_MOV_B32_e32 (f16 (bitcast_fpimm_to_i32 $imm))) |
| 821 | >; |
| 822 | |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 823 | def : Pat < |
| 824 | (f32 fpimm:$imm), |
| 825 | (S_MOV_B32 (f32 (bitcast_fpimm_to_i32 $imm))) |
| 826 | >; |
| 827 | |
| 828 | def : Pat < |
| Matt Arsenault | e96d037 | 2016-12-08 20:14:46 +0000 | [diff] [blame] | 829 | (f16 fpimm:$imm), |
| 830 | (S_MOV_B32 (i32 (bitcast_fpimm_to_i32 $imm))) |
| 831 | >; |
| 832 | |
| 833 | def : Pat < |
| Matt Arsenault | ac0fc84 | 2016-09-17 16:09:55 +0000 | [diff] [blame] | 834 | (i32 frameindex:$fi), |
| 835 | (V_MOV_B32_e32 (i32 (frameindex_to_targetframeindex $fi))) |
| 836 | >; |
| 837 | |
| 838 | def : Pat < |
| Christian Konig | b559b07 | 2013-02-16 11:28:36 +0000 | [diff] [blame] | 839 | (i64 InlineImm<i64>:$imm), |
| 840 | (S_MOV_B64 InlineImm<i64>:$imm) |
| 841 | >; |
| 842 | |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 843 | // XXX - Should this use a s_cmp to set SCC? |
| 844 | |
| 845 | // Set to sign-extended 64-bit value (true = -1, false = 0) |
| 846 | def : Pat < |
| 847 | (i1 imm:$imm), |
| 848 | (S_MOV_B64 (i64 (as_i64imm $imm))) |
| 849 | >; |
| 850 | |
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 851 | def : Pat < |
| 852 | (f64 InlineFPImm<f64>:$imm), |
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 853 | (S_MOV_B64 (f64 (bitcast_fpimm_to_i64 InlineFPImm<f64>:$imm))) |
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 854 | >; |
| 855 | |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 856 | /********** ================== **********/ |
| 857 | /********** Intrinsic Patterns **********/ |
| 858 | /********** ================== **********/ |
| 859 | |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 860 | def : POW_Common <V_LOG_F32_e32, V_EXP_F32_e32, V_MUL_LEGACY_F32_e32>; |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 861 | |
| 862 | def : Pat < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 863 | (i32 (sext i1:$src0)), |
| 864 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src0) |
| Michel Danzer | 0cc991e | 2013-02-22 11:22:58 +0000 | [diff] [blame] | 865 | >; |
| 866 | |
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 867 | class Ext32Pat <SDNode ext> : Pat < |
| 868 | (i32 (ext i1:$src0)), |
| Michel Danzer | 5d26fdf | 2014-02-05 09:48:05 +0000 | [diff] [blame] | 869 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src0) |
| 870 | >; |
| 871 | |
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 872 | def : Ext32Pat <zext>; |
| 873 | def : Ext32Pat <anyext>; |
| 874 | |
| Michel Danzer | 8caa904 | 2013-04-10 17:17:56 +0000 | [diff] [blame] | 875 | // The multiplication scales from [0,1] to the unsigned integer range |
| 876 | def : Pat < |
| 877 | (AMDGPUurecip i32:$src0), |
| 878 | (V_CVT_U32_F32_e32 |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 879 | (V_MUL_F32_e32 (i32 CONST.FP_UINT_MAX_PLUS_1), |
| Michel Danzer | 8caa904 | 2013-04-10 17:17:56 +0000 | [diff] [blame] | 880 | (V_RCP_IFLAG_F32_e32 (V_CVT_F32_U32_e32 $src0)))) |
| 881 | >; |
| 882 | |
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 883 | //===----------------------------------------------------------------------===// |
| 884 | // VOP3 Patterns |
| 885 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 886 | |
| Matt Arsenault | eb26020 | 2014-05-22 18:00:15 +0000 | [diff] [blame] | 887 | def : IMad24Pat<V_MAD_I32_I24>; |
| 888 | def : UMad24Pat<V_MAD_U32_U24>; |
| 889 | |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 890 | defm : BFIPatterns <V_BFI_B32, S_MOV_B32, SReg_64>; |
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 891 | def : ROTRPattern <V_ALIGNBIT_B32>; |
| 892 | |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 893 | /********** ====================== **********/ |
| Simon Pilgrim | e995a808 | 2016-11-18 11:04:02 +0000 | [diff] [blame] | 894 | /********** Indirect addressing **********/ |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 895 | /********** ====================== **********/ |
| 896 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 897 | multiclass SI_INDIRECT_Pattern <ValueType vt, ValueType eltvt, string VecSize> { |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 898 | // Extract with offset |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 899 | def : Pat< |
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 900 | (eltvt (extractelt vt:$src, (MOVRELOffset i32:$idx, (i32 imm:$offset)))), |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 901 | (!cast<Instruction>("SI_INDIRECT_SRC_"#VecSize) $src, $idx, imm:$offset) |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 902 | >; |
| 903 | |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 904 | // Insert with offset |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 905 | def : Pat< |
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 906 | (insertelt vt:$src, eltvt:$val, (MOVRELOffset i32:$idx, (i32 imm:$offset))), |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 907 | (!cast<Instruction>("SI_INDIRECT_DST_"#VecSize) $src, $idx, imm:$offset, $val) |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 908 | >; |
| 909 | } |
| 910 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 911 | defm : SI_INDIRECT_Pattern <v2f32, f32, "V2">; |
| 912 | defm : SI_INDIRECT_Pattern <v4f32, f32, "V4">; |
| 913 | defm : SI_INDIRECT_Pattern <v8f32, f32, "V8">; |
| 914 | defm : SI_INDIRECT_Pattern <v16f32, f32, "V16">; |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 915 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 916 | defm : SI_INDIRECT_Pattern <v2i32, i32, "V2">; |
| 917 | defm : SI_INDIRECT_Pattern <v4i32, i32, "V4">; |
| 918 | defm : SI_INDIRECT_Pattern <v8i32, i32, "V8">; |
| 919 | defm : SI_INDIRECT_Pattern <v16i32, i32, "V16">; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 920 | |
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 921 | //===----------------------------------------------------------------------===// |
| Wei Ding | 1041a64 | 2016-08-24 14:59:47 +0000 | [diff] [blame] | 922 | // SAD Patterns |
| 923 | //===----------------------------------------------------------------------===// |
| 924 | |
| 925 | def : Pat < |
| 926 | (add (sub_oneuse (umax i32:$src0, i32:$src1), |
| 927 | (umin i32:$src0, i32:$src1)), |
| 928 | i32:$src2), |
| 929 | (V_SAD_U32 $src0, $src1, $src2) |
| 930 | >; |
| 931 | |
| 932 | def : Pat < |
| 933 | (add (select_oneuse (i1 (setugt i32:$src0, i32:$src1)), |
| 934 | (sub i32:$src0, i32:$src1), |
| 935 | (sub i32:$src1, i32:$src0)), |
| 936 | i32:$src2), |
| 937 | (V_SAD_U32 $src0, $src1, $src2) |
| 938 | >; |
| 939 | |
| 940 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 941 | // Conversion Patterns |
| 942 | //===----------------------------------------------------------------------===// |
| 943 | |
| 944 | def : Pat<(i32 (sext_inreg i32:$src, i1)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 945 | (S_BFE_I32 i32:$src, (i32 65536))>; // 0 | 1 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 946 | |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 947 | // Handle sext_inreg in i64 |
| 948 | def : Pat < |
| 949 | (i64 (sext_inreg i64:$src, i1)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 950 | (S_BFE_I64 i64:$src, (i32 0x10000)) // 0 | 1 << 16 |
| 951 | >; |
| 952 | |
| 953 | def : Pat < |
| Matt Arsenault | 682eb43 | 2017-01-11 22:35:22 +0000 | [diff] [blame] | 954 | (i16 (sext_inreg i16:$src, i1)), |
| 955 | (S_BFE_I32 $src, (i32 0x00010000)) // 0 | 1 << 16 |
| 956 | >; |
| 957 | |
| 958 | def : Pat < |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 959 | (i16 (sext_inreg i16:$src, i8)), |
| 960 | (S_BFE_I32 $src, (i32 0x80000)) // 0 | 8 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 961 | >; |
| 962 | |
| 963 | def : Pat < |
| 964 | (i64 (sext_inreg i64:$src, i8)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 965 | (S_BFE_I64 i64:$src, (i32 0x80000)) // 0 | 8 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 966 | >; |
| 967 | |
| 968 | def : Pat < |
| 969 | (i64 (sext_inreg i64:$src, i16)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 970 | (S_BFE_I64 i64:$src, (i32 0x100000)) // 0 | 16 << 16 |
| Matt Arsenault | 9481221 | 2014-11-14 18:18:16 +0000 | [diff] [blame] | 971 | >; |
| 972 | |
| 973 | def : Pat < |
| 974 | (i64 (sext_inreg i64:$src, i32)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 975 | (S_BFE_I64 i64:$src, (i32 0x200000)) // 0 | 32 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 976 | >; |
| 977 | |
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 978 | def : Pat < |
| 979 | (i64 (zext i32:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 980 | (REG_SEQUENCE SReg_64, $src, sub0, (S_MOV_B32 (i32 0)), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 981 | >; |
| 982 | |
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 983 | def : Pat < |
| 984 | (i64 (anyext i32:$src)), |
| 985 | (REG_SEQUENCE SReg_64, $src, sub0, (i32 (IMPLICIT_DEF)), sub1) |
| 986 | >; |
| 987 | |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 988 | class ZExt_i64_i1_Pat <SDNode ext> : Pat < |
| 989 | (i64 (ext i1:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 990 | (REG_SEQUENCE VReg_64, |
| 991 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src), sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 992 | (S_MOV_B32 (i32 0)), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 993 | >; |
| 994 | |
| 995 | |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 996 | def : ZExt_i64_i1_Pat<zext>; |
| 997 | def : ZExt_i64_i1_Pat<anyext>; |
| 998 | |
| Tom Stellard | bc4497b | 2016-02-12 23:45:29 +0000 | [diff] [blame] | 999 | // FIXME: We need to use COPY_TO_REGCLASS to work-around the fact that |
| 1000 | // REG_SEQUENCE patterns don't support instructions with multiple outputs. |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 1001 | def : Pat < |
| 1002 | (i64 (sext i32:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 1003 | (REG_SEQUENCE SReg_64, $src, sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1004 | (i32 (COPY_TO_REGCLASS (S_ASHR_I32 $src, (i32 31)), SReg_32_XM0)), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 1005 | >; |
| 1006 | |
| 1007 | def : Pat < |
| 1008 | (i64 (sext i1:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 1009 | (REG_SEQUENCE VReg_64, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1010 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src), sub0, |
| 1011 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 1012 | >; |
| 1013 | |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1014 | class FPToI1Pat<Instruction Inst, int KOne, ValueType kone_type, ValueType vt, SDPatternOperator fp_to_int> : Pat < |
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 1015 | (i1 (fp_to_int (vt (VOP3Mods vt:$src0, i32:$src0_modifiers)))), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1016 | (i1 (Inst 0, (kone_type KOne), $src0_modifiers, $src0, DSTCLAMP.NONE, DSTOMOD.NONE)) |
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 1017 | >; |
| 1018 | |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1019 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_ONE, i32, f32, fp_to_uint>; |
| 1020 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_NEG_ONE, i32, f32, fp_to_sint>; |
| 1021 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_ONE, i64, f64, fp_to_uint>; |
| 1022 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_NEG_ONE, i64, f64, fp_to_sint>; |
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 1023 | |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 1024 | // If we need to perform a logical operation on i1 values, we need to |
| 1025 | // use vector comparisons since there is only one SCC register. Vector |
| Simon Pilgrim | e995a808 | 2016-11-18 11:04:02 +0000 | [diff] [blame] | 1026 | // comparisons still write to a pair of SGPRs, so treat these as |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 1027 | // 64-bit comparisons. When legalizing SGPR copies, instructions |
| 1028 | // resulting in the copies from SCC to these instructions will be |
| 1029 | // moved to the VALU. |
| 1030 | def : Pat < |
| 1031 | (i1 (and i1:$src0, i1:$src1)), |
| 1032 | (S_AND_B64 $src0, $src1) |
| 1033 | >; |
| 1034 | |
| 1035 | def : Pat < |
| 1036 | (i1 (or i1:$src0, i1:$src1)), |
| 1037 | (S_OR_B64 $src0, $src1) |
| 1038 | >; |
| 1039 | |
| 1040 | def : Pat < |
| 1041 | (i1 (xor i1:$src0, i1:$src1)), |
| 1042 | (S_XOR_B64 $src0, $src1) |
| 1043 | >; |
| 1044 | |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1045 | def : Pat < |
| 1046 | (f32 (sint_to_fp i1:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1047 | (V_CNDMASK_B32_e64 (i32 0), (i32 CONST.FP32_NEG_ONE), $src) |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1048 | >; |
| 1049 | |
| 1050 | def : Pat < |
| 1051 | (f32 (uint_to_fp i1:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1052 | (V_CNDMASK_B32_e64 (i32 0), (i32 CONST.FP32_ONE), $src) |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1053 | >; |
| 1054 | |
| 1055 | def : Pat < |
| 1056 | (f64 (sint_to_fp i1:$src)), |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 1057 | (V_CVT_F64_I32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src)) |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1058 | >; |
| 1059 | |
| 1060 | def : Pat < |
| 1061 | (f64 (uint_to_fp i1:$src)), |
| 1062 | (V_CVT_F64_U32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src)) |
| 1063 | >; |
| 1064 | |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 1065 | //===----------------------------------------------------------------------===// |
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 1066 | // Miscellaneous Patterns |
| 1067 | //===----------------------------------------------------------------------===// |
| 1068 | |
| 1069 | def : Pat < |
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 1070 | (i32 (trunc i64:$a)), |
| 1071 | (EXTRACT_SUBREG $a, sub0) |
| 1072 | >; |
| 1073 | |
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 1074 | def : Pat < |
| 1075 | (i1 (trunc i32:$a)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1076 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), $a), (i32 1)) |
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 1077 | >; |
| 1078 | |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1079 | def : Pat < |
| Jan Vesely | 70293a0 | 2017-02-23 16:12:21 +0000 | [diff] [blame] | 1080 | (i1 (trunc i16:$a)), |
| 1081 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), $a), (i32 1)) |
| 1082 | >; |
| 1083 | |
| 1084 | def : Pat < |
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 1085 | (i1 (trunc i64:$a)), |
| Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 1086 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1087 | (i32 (EXTRACT_SUBREG $a, sub0))), (i32 1)) |
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 1088 | >; |
| 1089 | |
| 1090 | def : Pat < |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1091 | (i32 (bswap i32:$a)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1092 | (V_BFI_B32 (S_MOV_B32 (i32 0x00ff00ff)), |
| 1093 | (V_ALIGNBIT_B32 $a, $a, (i32 24)), |
| 1094 | (V_ALIGNBIT_B32 $a, $a, (i32 8))) |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1095 | >; |
| 1096 | |
| Marek Olsak | 63a7b08 | 2015-03-24 13:40:21 +0000 | [diff] [blame] | 1097 | multiclass BFMPatterns <ValueType vt, InstSI BFM, InstSI MOV> { |
| 1098 | def : Pat < |
| 1099 | (vt (shl (vt (add (vt (shl 1, vt:$a)), -1)), vt:$b)), |
| 1100 | (BFM $a, $b) |
| 1101 | >; |
| 1102 | |
| 1103 | def : Pat < |
| 1104 | (vt (add (vt (shl 1, vt:$a)), -1)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1105 | (BFM $a, (MOV (i32 0))) |
| Marek Olsak | 63a7b08 | 2015-03-24 13:40:21 +0000 | [diff] [blame] | 1106 | >; |
| 1107 | } |
| 1108 | |
| 1109 | defm : BFMPatterns <i32, S_BFM_B32, S_MOV_B32>; |
| 1110 | // FIXME: defm : BFMPatterns <i64, S_BFM_B64, S_MOV_B64>; |
| Matt Arsenault | a9e16e6 | 2017-02-23 00:23:43 +0000 | [diff] [blame] | 1111 | defm : BFEPattern <V_BFE_U32, V_BFE_I32, S_MOV_B32>; |
| Marek Olsak | 949f5da | 2015-03-24 13:40:34 +0000 | [diff] [blame] | 1112 | |
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1113 | def : Pat< |
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame] | 1114 | (fcanonicalize (f16 (VOP3Mods f16:$src, i32:$src_mods))), |
| 1115 | (V_MUL_F16_e64 0, (i32 CONST.FP16_ONE), $src_mods, $src, 0, 0) |
| Matt Arsenault | ce84130 | 2016-12-22 03:05:37 +0000 | [diff] [blame] | 1116 | >; |
| 1117 | |
| 1118 | def : Pat< |
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame] | 1119 | (fcanonicalize (f32 (VOP3Mods f32:$src, i32:$src_mods))), |
| 1120 | (V_MUL_F32_e64 0, (i32 CONST.FP32_ONE), $src_mods, $src, 0, 0) |
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1121 | >; |
| 1122 | |
| 1123 | def : Pat< |
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame] | 1124 | (fcanonicalize (f64 (VOP3Mods f64:$src, i32:$src_mods))), |
| 1125 | (V_MUL_F64 0, CONST.FP64_ONE, $src_mods, $src, 0, 0) |
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1126 | >; |
| 1127 | |
| Matt Arsenault | eb522e6 | 2017-02-27 22:15:25 +0000 | [diff] [blame] | 1128 | def : Pat< |
| 1129 | (fcanonicalize (v2f16 (VOP3PMods v2f16:$src, i32:$src_mods))), |
| 1130 | (V_PK_MUL_F16 SRCMODS.OP_SEL_1, (i32 CONST.V2FP16_ONE), $src_mods, $src, DSTCLAMP.NONE) |
| 1131 | >; |
| 1132 | |
| 1133 | |
| Matt Arsenault | 4165efd | 2017-01-17 07:26:53 +0000 | [diff] [blame] | 1134 | // Allow integer inputs |
| 1135 | class ExpPattern<SDPatternOperator node, ValueType vt, Instruction Inst> : Pat< |
| 1136 | (node (i8 timm:$tgt), (i8 timm:$en), vt:$src0, vt:$src1, vt:$src2, vt:$src3, (i1 timm:$compr), (i1 timm:$vm)), |
| 1137 | (Inst i8:$tgt, vt:$src0, vt:$src1, vt:$src2, vt:$src3, i1:$vm, i1:$compr, i8:$en) |
| 1138 | >; |
| 1139 | |
| 1140 | def : ExpPattern<AMDGPUexport, i32, EXP>; |
| 1141 | def : ExpPattern<AMDGPUexport_done, i32, EXP_DONE>; |
| 1142 | |
| Matt Arsenault | eb522e6 | 2017-02-27 22:15:25 +0000 | [diff] [blame] | 1143 | def : Pat < |
| 1144 | (v2i16 (build_vector i16:$src0, i16:$src1)), |
| 1145 | (v2i16 (S_PACK_LL_B32_B16 $src0, $src1)) |
| 1146 | >; |
| 1147 | |
| 1148 | // With multiple uses of the shift, this will duplicate the shift and |
| 1149 | // increase register pressure. |
| 1150 | def : Pat < |
| 1151 | (v2i16 (build_vector i16:$src0, (i16 (trunc (srl_oneuse i32:$src1, (i32 16)))))), |
| 1152 | (v2i16 (S_PACK_LH_B32_B16 i16:$src0, i32:$src1)) |
| 1153 | >; |
| 1154 | |
| 1155 | def : Pat < |
| 1156 | (v2i16 (build_vector (i16 (trunc (srl_oneuse i32:$src0, (i32 16)))), |
| 1157 | (i16 (trunc (srl_oneuse i32:$src1, (i32 16)))))), |
| 1158 | (v2i16 (S_PACK_HH_B32_B16 $src0, $src1)) |
| 1159 | >; |
| 1160 | |
| 1161 | // TODO: Should source modifiers be matched to v_pack_b32_f16? |
| 1162 | def : Pat < |
| 1163 | (v2f16 (build_vector f16:$src0, f16:$src1)), |
| 1164 | (v2f16 (S_PACK_LL_B32_B16 $src0, $src1)) |
| 1165 | >; |
| 1166 | |
| 1167 | // def : Pat < |
| 1168 | // (v2f16 (scalar_to_vector f16:$src0)), |
| 1169 | // (COPY $src0) |
| 1170 | // >; |
| 1171 | |
| 1172 | // def : Pat < |
| 1173 | // (v2i16 (scalar_to_vector i16:$src0)), |
| 1174 | // (COPY $src0) |
| 1175 | // >; |
| 1176 | |
| Marek Olsak | 43650e4 | 2015-03-24 13:40:08 +0000 | [diff] [blame] | 1177 | //===----------------------------------------------------------------------===// |
| 1178 | // Fract Patterns |
| 1179 | //===----------------------------------------------------------------------===// |
| 1180 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1181 | let Predicates = [isSI] in { |
| 1182 | |
| 1183 | // V_FRACT is buggy on SI, so the F32 version is never used and (x-floor(x)) is |
| 1184 | // used instead. However, SI doesn't have V_FLOOR_F64, so the most efficient |
| 1185 | // way to implement it is using V_FRACT_F64. |
| 1186 | // The workaround for the V_FRACT bug is: |
| 1187 | // fract(x) = isnan(x) ? x : min(V_FRACT(x), 0.99999999999999999) |
| 1188 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1189 | // Convert floor(x) to (x - fract(x)) |
| 1190 | def : Pat < |
| 1191 | (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))), |
| 1192 | (V_ADD_F64 |
| 1193 | $mods, |
| 1194 | $x, |
| 1195 | SRCMODS.NEG, |
| 1196 | (V_CNDMASK_B64_PSEUDO |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1197 | (V_MIN_F64 |
| 1198 | SRCMODS.NONE, |
| 1199 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE), |
| 1200 | SRCMODS.NONE, |
| 1201 | (V_MOV_B64_PSEUDO 0x3fefffffffffffff), |
| 1202 | DSTCLAMP.NONE, DSTOMOD.NONE), |
| Marek Olsak | 1354b87 | 2015-07-27 11:37:42 +0000 | [diff] [blame] | 1203 | $x, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1204 | (V_CMP_CLASS_F64_e64 SRCMODS.NONE, $x, (i32 3 /*NaN*/))), |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1205 | DSTCLAMP.NONE, DSTOMOD.NONE) |
| 1206 | >; |
| 1207 | |
| 1208 | } // End Predicates = [isSI] |
| 1209 | |
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 1210 | //============================================================================// |
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 1211 | // Miscellaneous Optimization Patterns |
| 1212 | //============================================================================// |
| 1213 | |
| Matt Arsenault | 10268f9 | 2017-02-27 22:40:39 +0000 | [diff] [blame] | 1214 | // Undo sub x, c -> add x, -c canonicalization since c is more likely |
| 1215 | // an inline immediate than -c. |
| 1216 | // TODO: Also do for 64-bit. |
| 1217 | def : Pat< |
| 1218 | (add i32:$src0, (i32 NegSubInlineConst32:$src1)), |
| 1219 | (S_SUB_I32 $src0, NegSubInlineConst32:$src1) |
| 1220 | >; |
| 1221 | |
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 1222 | def : SHA256MaPattern <V_BFI_B32, V_XOR_B32_e64>; |
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 1223 | |
| Matt Arsenault | c89f291 | 2016-03-07 21:54:48 +0000 | [diff] [blame] | 1224 | def : IntMed3Pat<V_MED3_I32, smax, smax_oneuse, smin_oneuse>; |
| 1225 | def : IntMed3Pat<V_MED3_U32, umax, umax_oneuse, umin_oneuse>; |
| 1226 | |
| Matt Arsenault | f84e5d9 | 2017-01-31 03:07:46 +0000 | [diff] [blame] | 1227 | // This matches 16 permutations of |
| 1228 | // max(min(x, y), min(max(x, y), z)) |
| 1229 | class FPMed3Pat<ValueType vt, |
| 1230 | Instruction med3Inst> : Pat< |
| 1231 | (fmaxnum (fminnum_oneuse (VOP3Mods_nnan vt:$src0, i32:$src0_mods), |
| 1232 | (VOP3Mods_nnan vt:$src1, i32:$src1_mods)), |
| 1233 | (fminnum_oneuse (fmaxnum_oneuse (VOP3Mods_nnan vt:$src0, i32:$src0_mods), |
| 1234 | (VOP3Mods_nnan vt:$src1, i32:$src1_mods)), |
| 1235 | (vt (VOP3Mods_nnan vt:$src2, i32:$src2_mods)))), |
| 1236 | (med3Inst $src0_mods, $src0, $src1_mods, $src1, $src2_mods, $src2, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 1237 | >; |
| 1238 | |
| 1239 | def : FPMed3Pat<f32, V_MED3_F32>; |
| 1240 | |
| Matt Arsenault | 10268f9 | 2017-02-27 22:40:39 +0000 | [diff] [blame] | 1241 | let Predicates = [isGFX9] in { |
| 1242 | def : FPMed3Pat<f16, V_MED3_F16>; |
| 1243 | def : IntMed3Pat<V_MED3_I16, smax, smax_oneuse, smin_oneuse, i16>; |
| 1244 | def : IntMed3Pat<V_MED3_U16, umax, umax_oneuse, umin_oneuse, i16>; |
| 1245 | } // End Predicates = [isGFX9] |
| Matt Arsenault | af63524 | 2017-01-30 19:30:24 +0000 | [diff] [blame] | 1246 | |
| Tom Stellard | 245c15f | 2015-05-26 15:55:52 +0000 | [diff] [blame] | 1247 | //============================================================================// |
| 1248 | // Assembler aliases |
| 1249 | //============================================================================// |
| 1250 | |
| 1251 | def : MnemonicAlias<"v_add_u32", "v_add_i32">; |
| 1252 | def : MnemonicAlias<"v_sub_u32", "v_sub_i32">; |
| 1253 | def : MnemonicAlias<"v_subrev_u32", "v_subrev_i32">; |
| 1254 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 1255 | } // End isGCN predicate |