| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 1 | //===-- SIInstructions.td - SI Instruction Defintions ---------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // This file was originally auto-generated from a GPU register header file and |
| 10 | // all the instruction definitions were originally commented out. Instructions |
| 11 | // that are not yet supported remain commented out. |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| Eric Christopher | 7792e32 | 2015-01-30 23:24:40 +0000 | [diff] [blame] | 14 | def isGCN : Predicate<"Subtarget->getGeneration() " |
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 15 | ">= SISubtarget::SOUTHERN_ISLANDS">, |
| Tom Stellard | d7e6f13 | 2015-04-08 01:09:26 +0000 | [diff] [blame] | 16 | AssemblerPredicate<"FeatureGCN">; |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 17 | def isSI : Predicate<"Subtarget->getGeneration() " |
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 18 | "== SISubtarget::SOUTHERN_ISLANDS">, |
| Matt Arsenault | d6adfb4 | 2015-09-24 19:52:21 +0000 | [diff] [blame] | 19 | AssemblerPredicate<"FeatureSouthernIslands">; |
| 20 | |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 21 | def has16BankLDS : Predicate<"Subtarget->getLDSBankCount() == 16">; |
| 22 | def has32BankLDS : Predicate<"Subtarget->getLDSBankCount() == 32">; |
| Matt Arsenault | cc88ce3 | 2016-10-12 18:00:51 +0000 | [diff] [blame] | 23 | def HasVGPRIndexMode : Predicate<"Subtarget->hasVGPRIndexMode()">, |
| 24 | AssemblerPredicate<"FeatureVGPRIndexMode">; |
| 25 | def HasMovrel : Predicate<"Subtarget->hasMovrel()">, |
| 26 | AssemblerPredicate<"FeatureMovrel">; |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 27 | |
| Valery Pykhtin | 2828b9b | 2016-09-19 14:39:49 +0000 | [diff] [blame] | 28 | include "VOPInstructions.td" |
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 29 | include "SOPInstructions.td" |
| Valery Pykhtin | 1b13886 | 2016-09-01 09:56:47 +0000 | [diff] [blame] | 30 | include "SMInstructions.td" |
| Valery Pykhtin | 8bc6596 | 2016-09-05 11:22:51 +0000 | [diff] [blame] | 31 | include "FLATInstructions.td" |
| Valery Pykhtin | b66e5eb | 2016-09-10 13:09:16 +0000 | [diff] [blame] | 32 | include "BUFInstructions.td" |
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 33 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 34 | let SubtargetPredicate = isGCN in { |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 35 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 36 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 3a35d8f | 2014-10-01 14:44:45 +0000 | [diff] [blame] | 37 | // EXP Instructions |
| 38 | //===----------------------------------------------------------------------===// |
| 39 | |
| Matt Arsenault | 7bee6ac | 2016-12-05 20:23:10 +0000 | [diff] [blame] | 40 | defm EXP : EXP_m<0, AMDGPUexport>; |
| 41 | defm EXP_DONE : EXP_m<1, AMDGPUexport_done>; |
| Tom Stellard | 3a35d8f | 2014-10-01 14:44:45 +0000 | [diff] [blame] | 42 | |
| 43 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 44 | // VINTRP Instructions |
| 45 | //===----------------------------------------------------------------------===// |
| 46 | |
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 47 | let Uses = [M0, EXEC] in { |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 48 | |
| Tom Stellard | ae38f30 | 2015-01-14 01:13:19 +0000 | [diff] [blame] | 49 | // FIXME: Specify SchedRW for VINTRP insturctions. |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 50 | |
| 51 | multiclass V_INTERP_P1_F32_m : VINTRP_m < |
| 52 | 0x00000000, |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 53 | (outs VGPR_32:$vdst), |
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 54 | (ins VGPR_32:$vsrc, Attr:$attr, AttrChan:$attrchan), |
| 55 | "v_interp_p1_f32 $vdst, $vsrc, $attr$attrchan", |
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 56 | [(set f32:$vdst, (AMDGPUinterp_p1 f32:$vsrc, (i32 imm:$attrchan), |
| 57 | (i32 imm:$attr)))] |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 58 | >; |
| 59 | |
| 60 | let OtherPredicates = [has32BankLDS] in { |
| 61 | |
| 62 | defm V_INTERP_P1_F32 : V_INTERP_P1_F32_m; |
| 63 | |
| 64 | } // End OtherPredicates = [has32BankLDS] |
| 65 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 66 | let OtherPredicates = [has16BankLDS], Constraints = "@earlyclobber $vdst", isAsmParserOnly=1 in { |
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 67 | |
| 68 | defm V_INTERP_P1_F32_16bank : V_INTERP_P1_F32_m; |
| 69 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 70 | } // End OtherPredicates = [has32BankLDS], Constraints = "@earlyclobber $vdst", isAsmParserOnly=1 |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 71 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 72 | let DisableEncoding = "$src0", Constraints = "$src0 = $vdst" in { |
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 73 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 74 | defm V_INTERP_P2_F32 : VINTRP_m < |
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 75 | 0x00000001, |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 76 | (outs VGPR_32:$vdst), |
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 77 | (ins VGPR_32:$src0, VGPR_32:$vsrc, Attr:$attr, AttrChan:$attrchan), |
| 78 | "v_interp_p2_f32 $vdst, $vsrc, $attr$attrchan", |
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 79 | [(set f32:$vdst, (AMDGPUinterp_p2 f32:$src0, f32:$vsrc, (i32 imm:$attrchan), |
| 80 | (i32 imm:$attr)))]>; |
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 81 | |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 82 | } // End DisableEncoding = "$src0", Constraints = "$src0 = $vdst" |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 83 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 84 | defm V_INTERP_MOV_F32 : VINTRP_m < |
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 85 | 0x00000002, |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 86 | (outs VGPR_32:$vdst), |
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 87 | (ins InterpSlot:$vsrc, Attr:$attr, AttrChan:$attrchan), |
| 88 | "v_interp_mov_f32 $vdst, $vsrc, $attr$attrchan", |
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 89 | [(set f32:$vdst, (AMDGPUinterp_mov (i32 imm:$vsrc), (i32 imm:$attrchan), |
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 90 | (i32 imm:$attr)))]>; |
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 91 | |
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 92 | } // End Uses = [M0, EXEC] |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 93 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 94 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 95 | // Pseudo Instructions |
| 96 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 97 | |
| 98 | let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] in { |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 99 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 100 | // For use in patterns |
| Tom Stellard | cc4c871 | 2016-02-16 18:14:56 +0000 | [diff] [blame] | 101 | def V_CNDMASK_B64_PSEUDO : VOP3Common <(outs VReg_64:$vdst), |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 102 | (ins VSrc_b64:$src0, VSrc_b64:$src1, SSrc_b64:$src2), "", []> { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 103 | let isPseudo = 1; |
| 104 | let isCodeGenOnly = 1; |
| Matt Arsenault | 22e4179 | 2016-08-27 01:00:37 +0000 | [diff] [blame] | 105 | let usesCustomInserter = 1; |
| Tom Stellard | 60024a0 | 2014-09-24 01:33:24 +0000 | [diff] [blame] | 106 | } |
| 107 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 108 | // 64-bit vector move instruction. This is mainly used by the SIFoldOperands |
| 109 | // pass to enable folding of inline immediates. |
| Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 110 | def V_MOV_B64_PSEUDO : VPseudoInstSI <(outs VReg_64:$vdst), |
| 111 | (ins VSrc_b64:$src0)>; |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 112 | } // End let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] |
| 113 | |
| Wei Ding | 205bfdb | 2017-02-10 02:15:29 +0000 | [diff] [blame^] | 114 | def S_TRAP_PSEUDO : SPseudoInstSI <(outs), (ins i16imm:$simm16)> { |
| Wei Ding | ee21a36 | 2017-01-24 06:41:21 +0000 | [diff] [blame] | 115 | let hasSideEffects = 1; |
| 116 | let SALU = 1; |
| 117 | let usesCustomInserter = 1; |
| 118 | } |
| 119 | |
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 120 | let usesCustomInserter = 1, SALU = 1 in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 121 | def GET_GROUPSTATICSIZE : PseudoInstSI <(outs SReg_32:$sdst), (ins), |
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 122 | [(set SReg_32:$sdst, (int_amdgcn_groupstaticsize))]>; |
| 123 | } // End let usesCustomInserter = 1, SALU = 1 |
| 124 | |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 125 | def S_MOV_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 126 | (ins SSrc_b64:$src0)> { |
| 127 | let SALU = 1; |
| 128 | let isAsCheapAsAMove = 1; |
| 129 | let isTerminator = 1; |
| 130 | } |
| 131 | |
| 132 | def S_XOR_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 133 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { |
| 134 | let SALU = 1; |
| 135 | let isAsCheapAsAMove = 1; |
| 136 | let isTerminator = 1; |
| 137 | } |
| 138 | |
| 139 | def S_ANDN2_B64_term : PseudoInstSI<(outs SReg_64:$dst), |
| 140 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { |
| 141 | let SALU = 1; |
| 142 | let isAsCheapAsAMove = 1; |
| 143 | let isTerminator = 1; |
| 144 | } |
| 145 | |
| Stanislav Mekhanoshin | ea91cca | 2016-11-15 19:00:15 +0000 | [diff] [blame] | 146 | def WAVE_BARRIER : SPseudoInstSI<(outs), (ins), |
| 147 | [(int_amdgcn_wave_barrier)]> { |
| 148 | let SchedRW = []; |
| 149 | let hasNoSchedulingInfo = 1; |
| 150 | let hasSideEffects = 1; |
| 151 | let mayLoad = 1; |
| 152 | let mayStore = 1; |
| 153 | let isBarrier = 1; |
| 154 | let isConvergent = 1; |
| 155 | } |
| 156 | |
| Matt Arsenault | 8fb3738 | 2013-10-11 21:03:36 +0000 | [diff] [blame] | 157 | // SI pseudo instructions. These are used by the CFG structurizer pass |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 158 | // and should be lowered to ISA instructions prior to codegen. |
| 159 | |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 160 | // Dummy terminator instruction to use after control flow instructions |
| 161 | // replaced with exec mask operations. |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 162 | def SI_MASK_BRANCH : PseudoInstSI < |
| Matt Arsenault | f98a596 | 2016-08-27 00:42:21 +0000 | [diff] [blame] | 163 | (outs), (ins brtarget:$target)> { |
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 164 | let isBranch = 0; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 165 | let isTerminator = 1; |
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 166 | let isBarrier = 0; |
| Matt Arsenault | 78fc9da | 2016-08-22 19:33:16 +0000 | [diff] [blame] | 167 | let Uses = [EXEC]; |
| Matt Arsenault | c59a923 | 2016-10-06 18:12:07 +0000 | [diff] [blame] | 168 | let SchedRW = []; |
| 169 | let hasNoSchedulingInfo = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 170 | } |
| 171 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 172 | let isTerminator = 1 in { |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 173 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 174 | def SI_IF: CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 175 | (outs SReg_64:$dst), (ins SReg_64:$vcc, brtarget:$target), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 176 | [(set i64:$dst, (int_amdgcn_if i1:$vcc, bb:$target))], 1, 1> { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 177 | let Constraints = ""; |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 178 | let Size = 12; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 179 | let mayLoad = 1; |
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 180 | let mayStore = 1; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 181 | let hasSideEffects = 1; |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 182 | } |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 183 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 184 | def SI_ELSE : CFPseudoInstSI < |
| 185 | (outs SReg_64:$dst), (ins SReg_64:$src, brtarget:$target, i1imm:$execfix), [], 1, 1> { |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 186 | let Constraints = "$src = $dst"; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 187 | let Size = 12; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 188 | let mayStore = 1; |
| 189 | let mayLoad = 1; |
| 190 | let hasSideEffects = 1; |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 191 | } |
| 192 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 193 | def SI_LOOP : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 194 | (outs), (ins SReg_64:$saved, brtarget:$target), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 195 | [(int_amdgcn_loop i64:$saved, bb:$target)], 1, 1> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 196 | let Size = 8; |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 197 | let isBranch = 1; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 198 | let hasSideEffects = 1; |
| 199 | let mayLoad = 1; |
| 200 | let mayStore = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 201 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 202 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 203 | } // End isBranch = 1, isTerminator = 1 |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 204 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 205 | def SI_END_CF : CFPseudoInstSI < |
| 206 | (outs), (ins SReg_64:$saved), |
| 207 | [(int_amdgcn_end_cf i64:$saved)], 1, 1> { |
| 208 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 209 | let isAsCheapAsAMove = 1; |
| 210 | let isReMaterializable = 1; |
| 211 | let mayLoad = 1; |
| 212 | let mayStore = 1; |
| 213 | let hasSideEffects = 1; |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 214 | } |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 215 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 216 | def SI_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 217 | (outs SReg_64:$dst), (ins SReg_64:$src), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 218 | [(set i64:$dst, (int_amdgcn_break i64:$src))], 1> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 219 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 220 | let isAsCheapAsAMove = 1; |
| 221 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 222 | } |
| Matt Arsenault | 48d70cb | 2016-07-09 17:18:39 +0000 | [diff] [blame] | 223 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 224 | def SI_IF_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 225 | (outs SReg_64:$dst), (ins SReg_64:$vcc, SReg_64:$src), |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 226 | [(set i64:$dst, (int_amdgcn_if_break i1:$vcc, i64:$src))]> { |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 227 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 228 | let isAsCheapAsAMove = 1; |
| 229 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 230 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 231 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 232 | def SI_ELSE_BREAK : CFPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 233 | (outs SReg_64:$dst), (ins SReg_64:$src0, SReg_64:$src1), |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 234 | [(set i64:$dst, (int_amdgcn_else_break i64:$src0, i64:$src1))]> { |
| 235 | let Size = 4; |
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 236 | let isAsCheapAsAMove = 1; |
| 237 | let isReMaterializable = 1; |
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 238 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 239 | |
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 240 | let Uses = [EXEC], Defs = [EXEC,VCC] in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 241 | def SI_KILL : PseudoInstSI < |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 242 | (outs), (ins VSrc_b32:$src), |
| Matt Arsenault | 03006fd | 2016-07-19 16:27:56 +0000 | [diff] [blame] | 243 | [(AMDGPUkill i32:$src)]> { |
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 244 | let isConvergent = 1; |
| 245 | let usesCustomInserter = 1; |
| 246 | } |
| 247 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 248 | def SI_KILL_TERMINATOR : SPseudoInstSI < |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 249 | (outs), (ins VSrc_b32:$src)> { |
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 250 | let isTerminator = 1; |
| 251 | } |
| 252 | |
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 253 | } // End Uses = [EXEC], Defs = [EXEC,VCC] |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 254 | |
| Matt Arsenault | 327188a | 2016-12-15 21:57:11 +0000 | [diff] [blame] | 255 | // Branch on undef scc. Used to avoid intermediate copy from |
| 256 | // IMPLICIT_DEF to SCC. |
| 257 | def SI_BR_UNDEF : SPseudoInstSI <(outs), (ins sopp_brtarget:$simm16)> { |
| 258 | let isTerminator = 1; |
| 259 | let usesCustomInserter = 1; |
| 260 | } |
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 261 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 262 | def SI_PS_LIVE : PseudoInstSI < |
| 263 | (outs SReg_64:$dst), (ins), |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 264 | [(set i1:$dst, (int_amdgcn_ps_live))]> { |
| 265 | let SALU = 1; |
| 266 | } |
| Nicolai Haehnle | b0c9748 | 2016-04-22 04:04:08 +0000 | [diff] [blame] | 267 | |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 268 | // Used as an isel pseudo to directly emit initialization with an |
| 269 | // s_mov_b32 rather than a copy of another initialized |
| 270 | // register. MachineCSE skips copies, and we don't want to have to |
| 271 | // fold operands before it runs. |
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 272 | def SI_INIT_M0 : SPseudoInstSI <(outs), (ins SSrc_b32:$src)> { |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 273 | let Defs = [M0]; |
| 274 | let usesCustomInserter = 1; |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 275 | let isAsCheapAsAMove = 1; |
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 276 | let isReMaterializable = 1; |
| 277 | } |
| 278 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 279 | def SI_RETURN : SPseudoInstSI < |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 280 | (outs), (ins variable_ops), [(AMDGPUreturn)]> { |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 281 | let isTerminator = 1; |
| 282 | let isBarrier = 1; |
| 283 | let isReturn = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 284 | let hasSideEffects = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 285 | let hasNoSchedulingInfo = 1; |
| Nicolai Haehnle | a246dcc | 2016-09-03 12:26:32 +0000 | [diff] [blame] | 286 | let DisableWQM = 1; |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 287 | } |
| 288 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 289 | let Defs = [M0, EXEC], |
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 290 | UseNamedOperandTable = 1 in { |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 291 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 292 | class SI_INDIRECT_SRC<RegisterClass rc> : VPseudoInstSI < |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 293 | (outs VGPR_32:$vdst), |
| 294 | (ins rc:$src, VS_32:$idx, i32imm:$offset)> { |
| 295 | let usesCustomInserter = 1; |
| 296 | } |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 297 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 298 | class SI_INDIRECT_DST<RegisterClass rc> : VPseudoInstSI < |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 299 | (outs rc:$vdst), |
| 300 | (ins rc:$src, VS_32:$idx, i32imm:$offset, VGPR_32:$val)> { |
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 301 | let Constraints = "$src = $vdst"; |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 302 | let usesCustomInserter = 1; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 303 | } |
| 304 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 305 | // TODO: We can support indirect SGPR access. |
| 306 | def SI_INDIRECT_SRC_V1 : SI_INDIRECT_SRC<VGPR_32>; |
| 307 | def SI_INDIRECT_SRC_V2 : SI_INDIRECT_SRC<VReg_64>; |
| 308 | def SI_INDIRECT_SRC_V4 : SI_INDIRECT_SRC<VReg_128>; |
| 309 | def SI_INDIRECT_SRC_V8 : SI_INDIRECT_SRC<VReg_256>; |
| 310 | def SI_INDIRECT_SRC_V16 : SI_INDIRECT_SRC<VReg_512>; |
| 311 | |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 312 | def SI_INDIRECT_DST_V1 : SI_INDIRECT_DST<VGPR_32>; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 313 | def SI_INDIRECT_DST_V2 : SI_INDIRECT_DST<VReg_64>; |
| 314 | def SI_INDIRECT_DST_V4 : SI_INDIRECT_DST<VReg_128>; |
| 315 | def SI_INDIRECT_DST_V8 : SI_INDIRECT_DST<VReg_256>; |
| 316 | def SI_INDIRECT_DST_V16 : SI_INDIRECT_DST<VReg_512>; |
| 317 | |
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 318 | } // End Uses = [EXEC], Defs = [M0, EXEC] |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 319 | |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 320 | multiclass SI_SPILL_SGPR <RegisterClass sgpr_class> { |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 321 | let UseNamedOperandTable = 1, SGPRSpill = 1, Uses = [EXEC] in { |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 322 | def _SAVE : PseudoInstSI < |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 323 | (outs), |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 324 | (ins sgpr_class:$data, i32imm:$addr)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 325 | let mayStore = 1; |
| 326 | let mayLoad = 0; |
| 327 | } |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 328 | |
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 329 | def _RESTORE : PseudoInstSI < |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 330 | (outs sgpr_class:$data), |
| 331 | (ins i32imm:$addr)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 332 | let mayStore = 0; |
| 333 | let mayLoad = 1; |
| 334 | } |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 335 | } // End UseNamedOperandTable = 1 |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 336 | } |
| 337 | |
| Matt Arsenault | 2510a31 | 2016-09-03 06:57:55 +0000 | [diff] [blame] | 338 | // You cannot use M0 as the output of v_readlane_b32 instructions or |
| 339 | // use it in the sdata operand of SMEM instructions. We still need to |
| 340 | // be able to spill the physical register m0, so allow it for |
| 341 | // SI_SPILL_32_* instructions. |
| 342 | defm SI_SPILL_S32 : SI_SPILL_SGPR <SReg_32>; |
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 343 | defm SI_SPILL_S64 : SI_SPILL_SGPR <SReg_64>; |
| 344 | defm SI_SPILL_S128 : SI_SPILL_SGPR <SReg_128>; |
| 345 | defm SI_SPILL_S256 : SI_SPILL_SGPR <SReg_256>; |
| 346 | defm SI_SPILL_S512 : SI_SPILL_SGPR <SReg_512>; |
| 347 | |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 348 | multiclass SI_SPILL_VGPR <RegisterClass vgpr_class> { |
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 349 | let UseNamedOperandTable = 1, VGPRSpill = 1, |
| 350 | SchedRW = [WriteVMEM] in { |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 351 | def _SAVE : VPseudoInstSI < |
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 352 | (outs), |
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 353 | (ins vgpr_class:$vdata, i32imm:$vaddr, SReg_128:$srsrc, |
| 354 | SReg_32:$soffset, i32imm:$offset)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 355 | let mayStore = 1; |
| 356 | let mayLoad = 0; |
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 357 | // (2 * 4) + (8 * num_subregs) bytes maximum |
| 358 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 359 | } |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 360 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 361 | def _RESTORE : VPseudoInstSI < |
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 362 | (outs vgpr_class:$vdata), |
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 363 | (ins i32imm:$vaddr, SReg_128:$srsrc, SReg_32:$soffset, |
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 364 | i32imm:$offset)> { |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 365 | let mayStore = 0; |
| 366 | let mayLoad = 1; |
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 367 | |
| 368 | // (2 * 4) + (8 * num_subregs) bytes maximum |
| 369 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); |
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 370 | } |
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 371 | } // End UseNamedOperandTable = 1, VGPRSpill = 1, SchedRW = [WriteVMEM] |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 372 | } |
| 373 | |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 374 | defm SI_SPILL_V32 : SI_SPILL_VGPR <VGPR_32>; |
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 375 | defm SI_SPILL_V64 : SI_SPILL_VGPR <VReg_64>; |
| 376 | defm SI_SPILL_V96 : SI_SPILL_VGPR <VReg_96>; |
| 377 | defm SI_SPILL_V128 : SI_SPILL_VGPR <VReg_128>; |
| 378 | defm SI_SPILL_V256 : SI_SPILL_VGPR <VReg_256>; |
| 379 | defm SI_SPILL_V512 : SI_SPILL_VGPR <VReg_512>; |
| 380 | |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 381 | def SI_PC_ADD_REL_OFFSET : SPseudoInstSI < |
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 382 | (outs SReg_64:$dst), |
| Konstantin Zhuravlyov | c96b5d7 | 2016-10-14 04:37:34 +0000 | [diff] [blame] | 383 | (ins si_ga:$ptr_lo, si_ga:$ptr_hi), |
| 384 | [(set SReg_64:$dst, |
| 385 | (i64 (SIpc_add_rel_offset (tglobaladdr:$ptr_lo), (tglobaladdr:$ptr_hi))))]> { |
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 386 | let Defs = [SCC]; |
| Matt Arsenault | d092a06 | 2015-10-02 18:58:37 +0000 | [diff] [blame] | 387 | } |
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 388 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 389 | } // End SubtargetPredicate = isGCN |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 390 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 391 | let Predicates = [isGCN] in { |
| Wei Ding | 205bfdb | 2017-02-10 02:15:29 +0000 | [diff] [blame^] | 392 | def : Pat< |
| 393 | (trap), |
| 394 | (S_TRAP_PSEUDO TRAPTYPE.LLVM_TRAP) |
| 395 | >; |
| 396 | |
| 397 | def : Pat< |
| 398 | (debugtrap), |
| 399 | (S_TRAP_PSEUDO TRAPTYPE.LLVM_DEBUG_TRAP) |
| 400 | >; |
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 401 | |
| Nicolai Haehnle | 3b57200 | 2016-07-28 11:39:24 +0000 | [diff] [blame] | 402 | def : Pat< |
| 403 | (int_amdgcn_else i64:$src, bb:$target), |
| 404 | (SI_ELSE $src, $target, 0) |
| 405 | >; |
| 406 | |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 407 | def : Pat < |
| 408 | (int_AMDGPU_kilp), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 409 | (SI_KILL (i32 0xbf800000)) |
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 410 | >; |
| 411 | |
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 412 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 413 | // VOP1 Patterns |
| 414 | //===----------------------------------------------------------------------===// |
| 415 | |
| Matt Arsenault | 22ca3f8 | 2014-07-15 23:50:10 +0000 | [diff] [blame] | 416 | let Predicates = [UnsafeFPMath] in { |
| Matt Arsenault | 0bbcd8b | 2015-02-14 04:30:08 +0000 | [diff] [blame] | 417 | |
| 418 | //def : RcpPat<V_RCP_F64_e32, f64>; |
| 419 | //defm : RsqPat<V_RSQ_F64_e32, f64>; |
| 420 | //defm : RsqPat<V_RSQ_F32_e32, f32>; |
| 421 | |
| 422 | def : RsqPat<V_RSQ_F32_e32, f32>; |
| 423 | def : RsqPat<V_RSQ_F64_e32, f64>; |
| Matt Arsenault | 7401516 | 2016-05-28 00:19:52 +0000 | [diff] [blame] | 424 | |
| 425 | // Convert (x - floor(x)) to fract(x) |
| 426 | def : Pat < |
| 427 | (f32 (fsub (f32 (VOP3Mods f32:$x, i32:$mods)), |
| 428 | (f32 (ffloor (f32 (VOP3Mods f32:$x, i32:$mods)))))), |
| 429 | (V_FRACT_F32_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 430 | >; |
| 431 | |
| 432 | // Convert (x + (-floor(x))) to fract(x) |
| 433 | def : Pat < |
| 434 | (f64 (fadd (f64 (VOP3Mods f64:$x, i32:$mods)), |
| 435 | (f64 (fneg (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))))))), |
| 436 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 437 | >; |
| 438 | |
| 439 | } // End Predicates = [UnsafeFPMath] |
| Matt Arsenault | e9fa3b8 | 2014-07-15 20:18:31 +0000 | [diff] [blame] | 440 | |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 441 | |
| 442 | // f16_to_fp patterns |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 443 | def : Pat < |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 444 | (f32 (f16_to_fp i32:$src0)), |
| 445 | (V_CVT_F32_F16_e64 SRCMODS.NONE, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 446 | >; |
| 447 | |
| 448 | def : Pat < |
| 449 | (f32 (f16_to_fp (and_oneuse i32:$src0, 0x7fff))), |
| 450 | (V_CVT_F32_F16_e64 SRCMODS.ABS, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 451 | >; |
| 452 | |
| 453 | def : Pat < |
| 454 | (f32 (f16_to_fp (or_oneuse i32:$src0, 0x8000))), |
| 455 | (V_CVT_F32_F16_e64 SRCMODS.NEG_ABS, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 456 | >; |
| 457 | |
| 458 | def : Pat < |
| 459 | (f32 (f16_to_fp (xor_oneuse i32:$src0, 0x8000))), |
| 460 | (V_CVT_F32_F16_e64 SRCMODS.NEG, $src0, DSTCLAMP.NONE, DSTOMOD.NONE) |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 461 | >; |
| 462 | |
| 463 | def : Pat < |
| 464 | (f64 (fpextend f16:$src)), |
| 465 | (V_CVT_F64_F32_e32 (V_CVT_F32_F16_e32 $src)) |
| 466 | >; |
| 467 | |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 468 | // fp_to_fp16 patterns |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 469 | def : Pat < |
| Matt Arsenault | 9dba9bd | 2017-02-02 02:27:04 +0000 | [diff] [blame] | 470 | (i32 (fp_to_f16 (f32 (VOP3Mods0 f32:$src0, i32:$src0_modifiers, i1:$clamp, i32:$omod)))), |
| 471 | (V_CVT_F16_F32_e64 $src0_modifiers, f32:$src0, $clamp, $omod) |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 472 | >; |
| 473 | |
| 474 | def : Pat < |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 475 | (i32 (fp_to_sint f16:$src)), |
| 476 | (V_CVT_I32_F32_e32 (V_CVT_F32_F16_e32 $src)) |
| 477 | >; |
| 478 | |
| 479 | def : Pat < |
| 480 | (i32 (fp_to_uint f16:$src)), |
| 481 | (V_CVT_U32_F32_e32 (V_CVT_F32_F16_e32 $src)) |
| 482 | >; |
| 483 | |
| 484 | def : Pat < |
| 485 | (f16 (sint_to_fp i32:$src)), |
| 486 | (V_CVT_F16_F32_e32 (V_CVT_F32_I32_e32 $src)) |
| 487 | >; |
| 488 | |
| 489 | def : Pat < |
| 490 | (f16 (uint_to_fp i32:$src)), |
| 491 | (V_CVT_F16_F32_e32 (V_CVT_F32_U32_e32 $src)) |
| 492 | >; |
| 493 | |
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 494 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 58ac744 | 2014-04-29 23:12:48 +0000 | [diff] [blame] | 495 | // VOP2 Patterns |
| 496 | //===----------------------------------------------------------------------===// |
| 497 | |
| Konstantin Zhuravlyov | bf998c7 | 2016-11-16 03:39:12 +0000 | [diff] [blame] | 498 | multiclass FMADPat <ValueType vt, Instruction inst> { |
| 499 | def : Pat < |
| 500 | (vt (fmad (VOP3NoMods0 vt:$src0, i32:$src0_modifiers, i1:$clamp, i32:$omod), |
| 501 | (VOP3NoMods vt:$src1, i32:$src1_modifiers), |
| 502 | (VOP3NoMods vt:$src2, i32:$src2_modifiers))), |
| 503 | (inst $src0_modifiers, $src0, $src1_modifiers, $src1, |
| 504 | $src2_modifiers, $src2, $clamp, $omod) |
| 505 | >; |
| 506 | } |
| 507 | |
| 508 | defm : FMADPat <f16, V_MAC_F16_e64>; |
| 509 | defm : FMADPat <f32, V_MAC_F32_e64>; |
| 510 | |
| 511 | multiclass SelectPat <ValueType vt, Instruction inst> { |
| Konstantin Zhuravlyov | 2a87a42 | 2016-11-16 03:16:26 +0000 | [diff] [blame] | 512 | def : Pat < |
| 513 | (vt (select i1:$src0, vt:$src1, vt:$src2)), |
| 514 | (inst $src2, $src1, $src0) |
| 515 | >; |
| 516 | } |
| 517 | |
| Konstantin Zhuravlyov | bf998c7 | 2016-11-16 03:39:12 +0000 | [diff] [blame] | 518 | defm : SelectPat <i16, V_CNDMASK_B32_e64>; |
| 519 | defm : SelectPat <i32, V_CNDMASK_B32_e64>; |
| 520 | defm : SelectPat <f16, V_CNDMASK_B32_e64>; |
| 521 | defm : SelectPat <f32, V_CNDMASK_B32_e64>; |
| Konstantin Zhuravlyov | 2a87a42 | 2016-11-16 03:16:26 +0000 | [diff] [blame] | 522 | |
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 523 | def : Pat < |
| 524 | (i32 (add (i32 (ctpop i32:$popcnt)), i32:$val)), |
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 525 | (V_BCNT_U32_B32_e64 $popcnt, $val) |
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 526 | >; |
| 527 | |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 528 | /********** ============================================ **********/ |
| 529 | /********** Extraction, Insertion, Building and Casting **********/ |
| 530 | /********** ============================================ **********/ |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 531 | |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 532 | foreach Index = 0-2 in { |
| 533 | def Extract_Element_v2i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 534 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 535 | >; |
| 536 | def Insert_Element_v2i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 537 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 538 | >; |
| 539 | |
| 540 | def Extract_Element_v2f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 541 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 542 | >; |
| 543 | def Insert_Element_v2f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 544 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 545 | >; |
| 546 | } |
| 547 | |
| 548 | foreach Index = 0-3 in { |
| 549 | def Extract_Element_v4i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 550 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 551 | >; |
| 552 | def Insert_Element_v4i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 553 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 554 | >; |
| 555 | |
| 556 | def Extract_Element_v4f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 557 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 558 | >; |
| 559 | def Insert_Element_v4f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 560 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 561 | >; |
| 562 | } |
| 563 | |
| 564 | foreach Index = 0-7 in { |
| 565 | def Extract_Element_v8i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 566 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 567 | >; |
| 568 | def Insert_Element_v8i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 569 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 570 | >; |
| 571 | |
| 572 | def Extract_Element_v8f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 573 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 574 | >; |
| 575 | def Insert_Element_v8f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 576 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 577 | >; |
| 578 | } |
| 579 | |
| 580 | foreach Index = 0-15 in { |
| 581 | def Extract_Element_v16i32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 582 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 583 | >; |
| 584 | def Insert_Element_v16i32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 585 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 586 | >; |
| 587 | |
| 588 | def Extract_Element_v16f32_#Index : Extract_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 589 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 590 | >; |
| 591 | def Insert_Element_v16f32_#Index : Insert_Element < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 592 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) |
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 593 | >; |
| 594 | } |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 595 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 596 | // FIXME: Why do only some of these type combinations for SReg and |
| 597 | // VReg? |
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 598 | // 16-bit bitcast |
| 599 | def : BitConvert <i16, f16, VGPR_32>; |
| 600 | def : BitConvert <f16, i16, VGPR_32>; |
| 601 | def : BitConvert <i16, f16, SReg_32>; |
| 602 | def : BitConvert <f16, i16, SReg_32>; |
| 603 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 604 | // 32-bit bitcast |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 605 | def : BitConvert <i32, f32, VGPR_32>; |
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 606 | def : BitConvert <f32, i32, VGPR_32>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 607 | def : BitConvert <i32, f32, SReg_32>; |
| 608 | def : BitConvert <f32, i32, SReg_32>; |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 609 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 610 | // 64-bit bitcast |
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 611 | def : BitConvert <i64, f64, VReg_64>; |
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 612 | def : BitConvert <f64, i64, VReg_64>; |
| Tom Stellard | ed2f614 | 2013-07-18 21:43:42 +0000 | [diff] [blame] | 613 | def : BitConvert <v2i32, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 614 | def : BitConvert <v2f32, v2i32, VReg_64>; |
| Tom Stellard | 7ea3d6d | 2014-03-31 14:01:55 +0000 | [diff] [blame] | 615 | def : BitConvert <i64, v2i32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 616 | def : BitConvert <v2i32, i64, VReg_64>; |
| Matt Arsenault | 064c206 | 2014-06-11 17:40:32 +0000 | [diff] [blame] | 617 | def : BitConvert <i64, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 618 | def : BitConvert <v2f32, i64, VReg_64>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 619 | def : BitConvert <f64, v2f32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 620 | def : BitConvert <v2f32, f64, VReg_64>; |
| Matt Arsenault | 2acc7a4 | 2014-06-11 19:31:13 +0000 | [diff] [blame] | 621 | def : BitConvert <f64, v2i32, VReg_64>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 622 | def : BitConvert <v2i32, f64, VReg_64>; |
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 623 | def : BitConvert <v4i32, v4f32, VReg_128>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 624 | def : BitConvert <v4f32, v4i32, VReg_128>; |
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 625 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 626 | // 128-bit bitcast |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 627 | def : BitConvert <v2i64, v4i32, SReg_128>; |
| 628 | def : BitConvert <v4i32, v2i64, SReg_128>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 629 | def : BitConvert <v2f64, v4f32, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 630 | def : BitConvert <v2f64, v4i32, VReg_128>; |
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 631 | def : BitConvert <v4f32, v2f64, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 632 | def : BitConvert <v4i32, v2f64, VReg_128>; |
| Matt Arsenault | e57206d | 2016-05-25 18:07:36 +0000 | [diff] [blame] | 633 | def : BitConvert <v2i64, v2f64, VReg_128>; |
| 634 | def : BitConvert <v2f64, v2i64, VReg_128>; |
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 635 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 636 | // 256-bit bitcast |
| Tom Stellard | 967bf58 | 2014-02-13 23:34:15 +0000 | [diff] [blame] | 637 | def : BitConvert <v8i32, v8f32, SReg_256>; |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 638 | def : BitConvert <v8f32, v8i32, SReg_256>; |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 639 | def : BitConvert <v8i32, v8f32, VReg_256>; |
| 640 | def : BitConvert <v8f32, v8i32, VReg_256>; |
| Tom Stellard | 20ee94f | 2013-08-14 22:22:09 +0000 | [diff] [blame] | 641 | |
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 642 | // 512-bit bitcast |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 643 | def : BitConvert <v16i32, v16f32, VReg_512>; |
| 644 | def : BitConvert <v16f32, v16i32, VReg_512>; |
| 645 | |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 646 | /********** =================== **********/ |
| 647 | /********** Src & Dst modifiers **********/ |
| 648 | /********** =================== **********/ |
| 649 | |
| 650 | def : Pat < |
| Matt Arsenault | 1cffa4c | 2014-11-13 19:49:04 +0000 | [diff] [blame] | 651 | (AMDGPUclamp (VOP3Mods0Clamp f32:$src0, i32:$src0_modifiers, i32:$omod), |
| 652 | (f32 FP_ZERO), (f32 FP_ONE)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 653 | (V_ADD_F32_e64 $src0_modifiers, $src0, 0, (i32 0), 1, $omod) |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 654 | >; |
| 655 | |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 656 | /********** ================================ **********/ |
| 657 | /********** Floating point absolute/negative **********/ |
| 658 | /********** ================================ **********/ |
| 659 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 660 | // Prevent expanding both fneg and fabs. |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 661 | |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 662 | def : Pat < |
| 663 | (fneg (fabs f32:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 664 | (S_OR_B32 $src, (S_MOV_B32(i32 0x80000000))) // Set sign bit |
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 665 | >; |
| 666 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 667 | // FIXME: Should use S_OR_B32 |
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 668 | def : Pat < |
| 669 | (fneg (fabs f64:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 670 | (REG_SEQUENCE VReg_64, |
| 671 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 672 | sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 673 | (V_OR_B32_e32 (i32 (EXTRACT_SUBREG f64:$src, sub1)), |
| 674 | (V_MOV_B32_e32 (i32 0x80000000))), // Set sign bit. |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 675 | sub1) |
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 676 | >; |
| 677 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 678 | def : Pat < |
| 679 | (fabs f32:$src), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 680 | (V_AND_B32_e64 $src, (V_MOV_B32_e32 (i32 0x7fffffff))) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 681 | >; |
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 682 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 683 | def : Pat < |
| 684 | (fneg f32:$src), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 685 | (V_XOR_B32_e32 $src, (V_MOV_B32_e32 (i32 0x80000000))) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 686 | >; |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 687 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 688 | def : Pat < |
| 689 | (fabs f64:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 690 | (REG_SEQUENCE VReg_64, |
| 691 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 692 | sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 693 | (V_AND_B32_e64 (i32 (EXTRACT_SUBREG f64:$src, sub1)), |
| 694 | (V_MOV_B32_e32 (i32 0x7fffffff))), // Set sign bit. |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 695 | sub1) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 696 | >; |
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 697 | |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 698 | def : Pat < |
| 699 | (fneg f64:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 700 | (REG_SEQUENCE VReg_64, |
| 701 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), |
| 702 | sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 703 | (V_XOR_B32_e32 (i32 (EXTRACT_SUBREG f64:$src, sub1)), |
| 704 | (i32 (V_MOV_B32_e32 (i32 0x80000000)))), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 705 | sub1) |
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 706 | >; |
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 707 | |
| Matt Arsenault | c79dc70 | 2016-11-15 02:25:28 +0000 | [diff] [blame] | 708 | def : Pat < |
| Konstantin Zhuravlyov | 7d88275 | 2017-01-13 19:49:25 +0000 | [diff] [blame] | 709 | (fcopysign f16:$src0, f16:$src1), |
| 710 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, $src1) |
| 711 | >; |
| 712 | |
| 713 | def : Pat < |
| 714 | (fcopysign f32:$src0, f16:$src1), |
| 715 | (V_BFI_B32 (S_MOV_B32 (i32 0x7fffffff)), $src0, |
| 716 | (V_LSHLREV_B32_e64 (i32 16), $src1)) |
| 717 | >; |
| 718 | |
| 719 | def : Pat < |
| 720 | (fcopysign f64:$src0, f16:$src1), |
| 721 | (REG_SEQUENCE SReg_64, |
| 722 | (i32 (EXTRACT_SUBREG $src0, sub0)), sub0, |
| 723 | (V_BFI_B32 (S_MOV_B32 (i32 0x7fffffff)), (i32 (EXTRACT_SUBREG $src0, sub1)), |
| 724 | (V_LSHLREV_B32_e64 (i32 16), $src1)), sub1) |
| 725 | >; |
| 726 | |
| 727 | def : Pat < |
| 728 | (fcopysign f16:$src0, f32:$src1), |
| 729 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, |
| 730 | (V_LSHRREV_B32_e64 (i32 16), $src1)) |
| 731 | >; |
| 732 | |
| 733 | def : Pat < |
| 734 | (fcopysign f16:$src0, f64:$src1), |
| 735 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, |
| 736 | (V_LSHRREV_B32_e64 (i32 16), (EXTRACT_SUBREG $src1, sub1))) |
| 737 | >; |
| 738 | |
| 739 | def : Pat < |
| Matt Arsenault | c79dc70 | 2016-11-15 02:25:28 +0000 | [diff] [blame] | 740 | (fneg f16:$src), |
| 741 | (V_XOR_B32_e32 $src, (V_MOV_B32_e32 (i32 0x00008000))) |
| 742 | >; |
| 743 | |
| 744 | def : Pat < |
| 745 | (fabs f16:$src), |
| 746 | (V_AND_B32_e64 $src, (V_MOV_B32_e32 (i32 0x00007fff))) |
| 747 | >; |
| 748 | |
| 749 | def : Pat < |
| 750 | (fneg (fabs f16:$src)), |
| 751 | (S_OR_B32 $src, (S_MOV_B32 (i32 0x00008000))) // Set sign bit |
| 752 | >; |
| 753 | |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 754 | /********** ================== **********/ |
| 755 | /********** Immediate Patterns **********/ |
| 756 | /********** ================== **********/ |
| 757 | |
| 758 | def : Pat < |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 759 | (VGPRImm<(i32 imm)>:$imm), |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 760 | (V_MOV_B32_e32 imm:$imm) |
| 761 | >; |
| 762 | |
| 763 | def : Pat < |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 764 | (VGPRImm<(f32 fpimm)>:$imm), |
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 765 | (V_MOV_B32_e32 (f32 (bitcast_fpimm_to_i32 $imm))) |
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 766 | >; |
| 767 | |
| 768 | def : Pat < |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 769 | (i32 imm:$imm), |
| 770 | (S_MOV_B32 imm:$imm) |
| 771 | >; |
| 772 | |
| Matt Arsenault | e96d037 | 2016-12-08 20:14:46 +0000 | [diff] [blame] | 773 | // FIXME: Workaround for ordering issue with peephole optimizer where |
| 774 | // a register class copy interferes with immediate folding. Should |
| 775 | // use s_mov_b32, which can be shrunk to s_movk_i32 |
| 776 | def : Pat < |
| 777 | (VGPRImm<(f16 fpimm)>:$imm), |
| 778 | (V_MOV_B32_e32 (f16 (bitcast_fpimm_to_i32 $imm))) |
| 779 | >; |
| 780 | |
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 781 | def : Pat < |
| 782 | (f32 fpimm:$imm), |
| 783 | (S_MOV_B32 (f32 (bitcast_fpimm_to_i32 $imm))) |
| 784 | >; |
| 785 | |
| 786 | def : Pat < |
| Matt Arsenault | e96d037 | 2016-12-08 20:14:46 +0000 | [diff] [blame] | 787 | (f16 fpimm:$imm), |
| 788 | (S_MOV_B32 (i32 (bitcast_fpimm_to_i32 $imm))) |
| 789 | >; |
| 790 | |
| 791 | def : Pat < |
| Matt Arsenault | ac0fc84 | 2016-09-17 16:09:55 +0000 | [diff] [blame] | 792 | (i32 frameindex:$fi), |
| 793 | (V_MOV_B32_e32 (i32 (frameindex_to_targetframeindex $fi))) |
| 794 | >; |
| 795 | |
| 796 | def : Pat < |
| Christian Konig | b559b07 | 2013-02-16 11:28:36 +0000 | [diff] [blame] | 797 | (i64 InlineImm<i64>:$imm), |
| 798 | (S_MOV_B64 InlineImm<i64>:$imm) |
| 799 | >; |
| 800 | |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 801 | // XXX - Should this use a s_cmp to set SCC? |
| 802 | |
| 803 | // Set to sign-extended 64-bit value (true = -1, false = 0) |
| 804 | def : Pat < |
| 805 | (i1 imm:$imm), |
| 806 | (S_MOV_B64 (i64 (as_i64imm $imm))) |
| 807 | >; |
| 808 | |
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 809 | def : Pat < |
| 810 | (f64 InlineFPImm<f64>:$imm), |
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 811 | (S_MOV_B64 (f64 (bitcast_fpimm_to_i64 InlineFPImm<f64>:$imm))) |
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 812 | >; |
| 813 | |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 814 | /********** ================== **********/ |
| 815 | /********** Intrinsic Patterns **********/ |
| 816 | /********** ================== **********/ |
| 817 | |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 818 | def : POW_Common <V_LOG_F32_e32, V_EXP_F32_e32, V_MUL_LEGACY_F32_e32>; |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 819 | |
| 820 | def : Pat < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 821 | (int_AMDGPU_cube v4f32:$src), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 822 | (REG_SEQUENCE VReg_128, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 823 | (V_CUBETC_F32 0 /* src0_modifiers */, (f32 (EXTRACT_SUBREG $src, sub0)), |
| 824 | 0 /* src1_modifiers */, (f32 (EXTRACT_SUBREG $src, sub1)), |
| 825 | 0 /* src2_modifiers */, (f32 (EXTRACT_SUBREG $src, sub2)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 826 | 0 /* clamp */, 0 /* omod */), sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 827 | (V_CUBESC_F32 0 /* src0_modifiers */, (f32 (EXTRACT_SUBREG $src, sub0)), |
| 828 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub1)), |
| 829 | 0 /* src2_modifiers */,(f32 (EXTRACT_SUBREG $src, sub2)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 830 | 0 /* clamp */, 0 /* omod */), sub1, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 831 | (V_CUBEMA_F32 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub0)), |
| 832 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub1)), |
| 833 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub2)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 834 | 0 /* clamp */, 0 /* omod */), sub2, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 835 | (V_CUBEID_F32 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub0)), |
| 836 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub1)), |
| 837 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub2)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 838 | 0 /* clamp */, 0 /* omod */), sub3) |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 839 | >; |
| 840 | |
| Michel Danzer | 0cc991e | 2013-02-22 11:22:58 +0000 | [diff] [blame] | 841 | def : Pat < |
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 842 | (i32 (sext i1:$src0)), |
| 843 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src0) |
| Michel Danzer | 0cc991e | 2013-02-22 11:22:58 +0000 | [diff] [blame] | 844 | >; |
| 845 | |
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 846 | class Ext32Pat <SDNode ext> : Pat < |
| 847 | (i32 (ext i1:$src0)), |
| Michel Danzer | 5d26fdf | 2014-02-05 09:48:05 +0000 | [diff] [blame] | 848 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src0) |
| 849 | >; |
| 850 | |
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 851 | def : Ext32Pat <zext>; |
| 852 | def : Ext32Pat <anyext>; |
| 853 | |
| Michel Danzer | 8caa904 | 2013-04-10 17:17:56 +0000 | [diff] [blame] | 854 | // The multiplication scales from [0,1] to the unsigned integer range |
| 855 | def : Pat < |
| 856 | (AMDGPUurecip i32:$src0), |
| 857 | (V_CVT_U32_F32_e32 |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 858 | (V_MUL_F32_e32 (i32 CONST.FP_UINT_MAX_PLUS_1), |
| Michel Danzer | 8caa904 | 2013-04-10 17:17:56 +0000 | [diff] [blame] | 859 | (V_RCP_IFLAG_F32_e32 (V_CVT_F32_U32_e32 $src0)))) |
| 860 | >; |
| 861 | |
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 862 | //===----------------------------------------------------------------------===// |
| 863 | // VOP3 Patterns |
| 864 | //===----------------------------------------------------------------------===// |
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 865 | |
| Matt Arsenault | eb26020 | 2014-05-22 18:00:15 +0000 | [diff] [blame] | 866 | def : IMad24Pat<V_MAD_I32_I24>; |
| 867 | def : UMad24Pat<V_MAD_U32_U24>; |
| 868 | |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 869 | defm : BFIPatterns <V_BFI_B32, S_MOV_B32, SReg_64>; |
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 870 | def : ROTRPattern <V_ALIGNBIT_B32>; |
| 871 | |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 872 | /********** ====================== **********/ |
| Simon Pilgrim | e995a808 | 2016-11-18 11:04:02 +0000 | [diff] [blame] | 873 | /********** Indirect addressing **********/ |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 874 | /********** ====================== **********/ |
| 875 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 876 | multiclass SI_INDIRECT_Pattern <ValueType vt, ValueType eltvt, string VecSize> { |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 877 | // Extract with offset |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 878 | def : Pat< |
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 879 | (eltvt (extractelt vt:$src, (MOVRELOffset i32:$idx, (i32 imm:$offset)))), |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 880 | (!cast<Instruction>("SI_INDIRECT_SRC_"#VecSize) $src, $idx, imm:$offset) |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 881 | >; |
| 882 | |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 883 | // Insert with offset |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 884 | def : Pat< |
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 885 | (insertelt vt:$src, eltvt:$val, (MOVRELOffset i32:$idx, (i32 imm:$offset))), |
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 886 | (!cast<Instruction>("SI_INDIRECT_DST_"#VecSize) $src, $idx, imm:$offset, $val) |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 887 | >; |
| 888 | } |
| 889 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 890 | defm : SI_INDIRECT_Pattern <v2f32, f32, "V2">; |
| 891 | defm : SI_INDIRECT_Pattern <v4f32, f32, "V4">; |
| 892 | defm : SI_INDIRECT_Pattern <v8f32, f32, "V8">; |
| 893 | defm : SI_INDIRECT_Pattern <v16f32, f32, "V16">; |
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 894 | |
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 895 | defm : SI_INDIRECT_Pattern <v2i32, i32, "V2">; |
| 896 | defm : SI_INDIRECT_Pattern <v4i32, i32, "V4">; |
| 897 | defm : SI_INDIRECT_Pattern <v8i32, i32, "V8">; |
| 898 | defm : SI_INDIRECT_Pattern <v16i32, i32, "V16">; |
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 899 | |
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 900 | //===----------------------------------------------------------------------===// |
| Wei Ding | 1041a64 | 2016-08-24 14:59:47 +0000 | [diff] [blame] | 901 | // SAD Patterns |
| 902 | //===----------------------------------------------------------------------===// |
| 903 | |
| 904 | def : Pat < |
| 905 | (add (sub_oneuse (umax i32:$src0, i32:$src1), |
| 906 | (umin i32:$src0, i32:$src1)), |
| 907 | i32:$src2), |
| 908 | (V_SAD_U32 $src0, $src1, $src2) |
| 909 | >; |
| 910 | |
| 911 | def : Pat < |
| 912 | (add (select_oneuse (i1 (setugt i32:$src0, i32:$src1)), |
| 913 | (sub i32:$src0, i32:$src1), |
| 914 | (sub i32:$src1, i32:$src0)), |
| 915 | i32:$src2), |
| 916 | (V_SAD_U32 $src0, $src1, $src2) |
| 917 | >; |
| 918 | |
| 919 | //===----------------------------------------------------------------------===// |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 920 | // Conversion Patterns |
| 921 | //===----------------------------------------------------------------------===// |
| 922 | |
| 923 | def : Pat<(i32 (sext_inreg i32:$src, i1)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 924 | (S_BFE_I32 i32:$src, (i32 65536))>; // 0 | 1 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 925 | |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 926 | // Handle sext_inreg in i64 |
| 927 | def : Pat < |
| 928 | (i64 (sext_inreg i64:$src, i1)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 929 | (S_BFE_I64 i64:$src, (i32 0x10000)) // 0 | 1 << 16 |
| 930 | >; |
| 931 | |
| 932 | def : Pat < |
| Matt Arsenault | 682eb43 | 2017-01-11 22:35:22 +0000 | [diff] [blame] | 933 | (i16 (sext_inreg i16:$src, i1)), |
| 934 | (S_BFE_I32 $src, (i32 0x00010000)) // 0 | 1 << 16 |
| 935 | >; |
| 936 | |
| 937 | def : Pat < |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 938 | (i16 (sext_inreg i16:$src, i8)), |
| 939 | (S_BFE_I32 $src, (i32 0x80000)) // 0 | 8 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 940 | >; |
| 941 | |
| 942 | def : Pat < |
| 943 | (i64 (sext_inreg i64:$src, i8)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 944 | (S_BFE_I64 i64:$src, (i32 0x80000)) // 0 | 8 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 945 | >; |
| 946 | |
| 947 | def : Pat < |
| 948 | (i64 (sext_inreg i64:$src, i16)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 949 | (S_BFE_I64 i64:$src, (i32 0x100000)) // 0 | 16 << 16 |
| Matt Arsenault | 9481221 | 2014-11-14 18:18:16 +0000 | [diff] [blame] | 950 | >; |
| 951 | |
| 952 | def : Pat < |
| 953 | (i64 (sext_inreg i64:$src, i32)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 954 | (S_BFE_I64 i64:$src, (i32 0x200000)) // 0 | 32 << 16 |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 955 | >; |
| 956 | |
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 957 | def : Pat < |
| 958 | (i64 (zext i32:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 959 | (REG_SEQUENCE SReg_64, $src, sub0, (S_MOV_B32 (i32 0)), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 960 | >; |
| 961 | |
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 962 | def : Pat < |
| 963 | (i64 (anyext i32:$src)), |
| 964 | (REG_SEQUENCE SReg_64, $src, sub0, (i32 (IMPLICIT_DEF)), sub1) |
| 965 | >; |
| 966 | |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 967 | class ZExt_i64_i1_Pat <SDNode ext> : Pat < |
| 968 | (i64 (ext i1:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 969 | (REG_SEQUENCE VReg_64, |
| 970 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src), sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 971 | (S_MOV_B32 (i32 0)), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 972 | >; |
| 973 | |
| 974 | |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 975 | def : ZExt_i64_i1_Pat<zext>; |
| 976 | def : ZExt_i64_i1_Pat<anyext>; |
| 977 | |
| Tom Stellard | bc4497b | 2016-02-12 23:45:29 +0000 | [diff] [blame] | 978 | // FIXME: We need to use COPY_TO_REGCLASS to work-around the fact that |
| 979 | // REG_SEQUENCE patterns don't support instructions with multiple outputs. |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 980 | def : Pat < |
| 981 | (i64 (sext i32:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 982 | (REG_SEQUENCE SReg_64, $src, sub0, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 983 | (i32 (COPY_TO_REGCLASS (S_ASHR_I32 $src, (i32 31)), SReg_32_XM0)), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 984 | >; |
| 985 | |
| 986 | def : Pat < |
| 987 | (i64 (sext i1:$src)), |
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 988 | (REG_SEQUENCE VReg_64, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 989 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src), sub0, |
| 990 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src), sub1) |
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 991 | >; |
| 992 | |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 993 | class FPToI1Pat<Instruction Inst, int KOne, ValueType kone_type, ValueType vt, SDPatternOperator fp_to_int> : Pat < |
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 994 | (i1 (fp_to_int (vt (VOP3Mods vt:$src0, i32:$src0_modifiers)))), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 995 | (i1 (Inst 0, (kone_type KOne), $src0_modifiers, $src0, DSTCLAMP.NONE, DSTOMOD.NONE)) |
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 996 | >; |
| 997 | |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 998 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_ONE, i32, f32, fp_to_uint>; |
| 999 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_NEG_ONE, i32, f32, fp_to_sint>; |
| 1000 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_ONE, i64, f64, fp_to_uint>; |
| 1001 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_NEG_ONE, i64, f64, fp_to_sint>; |
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 1002 | |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 1003 | // If we need to perform a logical operation on i1 values, we need to |
| 1004 | // use vector comparisons since there is only one SCC register. Vector |
| Simon Pilgrim | e995a808 | 2016-11-18 11:04:02 +0000 | [diff] [blame] | 1005 | // comparisons still write to a pair of SGPRs, so treat these as |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 1006 | // 64-bit comparisons. When legalizing SGPR copies, instructions |
| 1007 | // resulting in the copies from SCC to these instructions will be |
| 1008 | // moved to the VALU. |
| 1009 | def : Pat < |
| 1010 | (i1 (and i1:$src0, i1:$src1)), |
| 1011 | (S_AND_B64 $src0, $src1) |
| 1012 | >; |
| 1013 | |
| 1014 | def : Pat < |
| 1015 | (i1 (or i1:$src0, i1:$src1)), |
| 1016 | (S_OR_B64 $src0, $src1) |
| 1017 | >; |
| 1018 | |
| 1019 | def : Pat < |
| 1020 | (i1 (xor i1:$src0, i1:$src1)), |
| 1021 | (S_XOR_B64 $src0, $src1) |
| 1022 | >; |
| 1023 | |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1024 | def : Pat < |
| 1025 | (f32 (sint_to_fp i1:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1026 | (V_CNDMASK_B32_e64 (i32 0), (i32 CONST.FP32_NEG_ONE), $src) |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1027 | >; |
| 1028 | |
| 1029 | def : Pat < |
| 1030 | (f32 (uint_to_fp i1:$src)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1031 | (V_CNDMASK_B32_e64 (i32 0), (i32 CONST.FP32_ONE), $src) |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1032 | >; |
| 1033 | |
| 1034 | def : Pat < |
| 1035 | (f64 (sint_to_fp i1:$src)), |
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 1036 | (V_CVT_F64_I32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src)) |
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1037 | >; |
| 1038 | |
| 1039 | def : Pat < |
| 1040 | (f64 (uint_to_fp i1:$src)), |
| 1041 | (V_CVT_F64_U32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src)) |
| 1042 | >; |
| 1043 | |
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 1044 | //===----------------------------------------------------------------------===// |
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 1045 | // Miscellaneous Patterns |
| 1046 | //===----------------------------------------------------------------------===// |
| 1047 | |
| 1048 | def : Pat < |
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 1049 | (i32 (trunc i64:$a)), |
| 1050 | (EXTRACT_SUBREG $a, sub0) |
| 1051 | >; |
| 1052 | |
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 1053 | def : Pat < |
| 1054 | (i1 (trunc i32:$a)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1055 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), $a), (i32 1)) |
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 1056 | >; |
| 1057 | |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1058 | def : Pat < |
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 1059 | (i1 (trunc i64:$a)), |
| Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 1060 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1061 | (i32 (EXTRACT_SUBREG $a, sub0))), (i32 1)) |
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 1062 | >; |
| 1063 | |
| 1064 | def : Pat < |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1065 | (i32 (bswap i32:$a)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1066 | (V_BFI_B32 (S_MOV_B32 (i32 0x00ff00ff)), |
| 1067 | (V_ALIGNBIT_B32 $a, $a, (i32 24)), |
| 1068 | (V_ALIGNBIT_B32 $a, $a, (i32 8))) |
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1069 | >; |
| 1070 | |
| Marek Olsak | 63a7b08 | 2015-03-24 13:40:21 +0000 | [diff] [blame] | 1071 | multiclass BFMPatterns <ValueType vt, InstSI BFM, InstSI MOV> { |
| 1072 | def : Pat < |
| 1073 | (vt (shl (vt (add (vt (shl 1, vt:$a)), -1)), vt:$b)), |
| 1074 | (BFM $a, $b) |
| 1075 | >; |
| 1076 | |
| 1077 | def : Pat < |
| 1078 | (vt (add (vt (shl 1, vt:$a)), -1)), |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1079 | (BFM $a, (MOV (i32 0))) |
| Marek Olsak | 63a7b08 | 2015-03-24 13:40:21 +0000 | [diff] [blame] | 1080 | >; |
| 1081 | } |
| 1082 | |
| 1083 | defm : BFMPatterns <i32, S_BFM_B32, S_MOV_B32>; |
| 1084 | // FIXME: defm : BFMPatterns <i64, S_BFM_B64, S_MOV_B64>; |
| 1085 | |
| Marek Olsak | 949f5da | 2015-03-24 13:40:34 +0000 | [diff] [blame] | 1086 | def : BFEPattern <V_BFE_U32, S_MOV_B32>; |
| 1087 | |
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1088 | def : Pat< |
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame] | 1089 | (fcanonicalize (f16 (VOP3Mods f16:$src, i32:$src_mods))), |
| 1090 | (V_MUL_F16_e64 0, (i32 CONST.FP16_ONE), $src_mods, $src, 0, 0) |
| Matt Arsenault | ce84130 | 2016-12-22 03:05:37 +0000 | [diff] [blame] | 1091 | >; |
| 1092 | |
| 1093 | def : Pat< |
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame] | 1094 | (fcanonicalize (f32 (VOP3Mods f32:$src, i32:$src_mods))), |
| 1095 | (V_MUL_F32_e64 0, (i32 CONST.FP32_ONE), $src_mods, $src, 0, 0) |
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1096 | >; |
| 1097 | |
| 1098 | def : Pat< |
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame] | 1099 | (fcanonicalize (f64 (VOP3Mods f64:$src, i32:$src_mods))), |
| 1100 | (V_MUL_F64 0, CONST.FP64_ONE, $src_mods, $src, 0, 0) |
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1101 | >; |
| 1102 | |
| Matt Arsenault | 4165efd | 2017-01-17 07:26:53 +0000 | [diff] [blame] | 1103 | // Allow integer inputs |
| 1104 | class ExpPattern<SDPatternOperator node, ValueType vt, Instruction Inst> : Pat< |
| 1105 | (node (i8 timm:$tgt), (i8 timm:$en), vt:$src0, vt:$src1, vt:$src2, vt:$src3, (i1 timm:$compr), (i1 timm:$vm)), |
| 1106 | (Inst i8:$tgt, vt:$src0, vt:$src1, vt:$src2, vt:$src3, i1:$vm, i1:$compr, i8:$en) |
| 1107 | >; |
| 1108 | |
| 1109 | def : ExpPattern<AMDGPUexport, i32, EXP>; |
| 1110 | def : ExpPattern<AMDGPUexport_done, i32, EXP_DONE>; |
| 1111 | |
| Marek Olsak | 43650e4 | 2015-03-24 13:40:08 +0000 | [diff] [blame] | 1112 | //===----------------------------------------------------------------------===// |
| 1113 | // Fract Patterns |
| 1114 | //===----------------------------------------------------------------------===// |
| 1115 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1116 | let Predicates = [isSI] in { |
| 1117 | |
| 1118 | // V_FRACT is buggy on SI, so the F32 version is never used and (x-floor(x)) is |
| 1119 | // used instead. However, SI doesn't have V_FLOOR_F64, so the most efficient |
| 1120 | // way to implement it is using V_FRACT_F64. |
| 1121 | // The workaround for the V_FRACT bug is: |
| 1122 | // fract(x) = isnan(x) ? x : min(V_FRACT(x), 0.99999999999999999) |
| 1123 | |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1124 | // Convert floor(x) to (x - fract(x)) |
| 1125 | def : Pat < |
| 1126 | (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))), |
| 1127 | (V_ADD_F64 |
| 1128 | $mods, |
| 1129 | $x, |
| 1130 | SRCMODS.NEG, |
| 1131 | (V_CNDMASK_B64_PSEUDO |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1132 | (V_MIN_F64 |
| 1133 | SRCMODS.NONE, |
| 1134 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE), |
| 1135 | SRCMODS.NONE, |
| 1136 | (V_MOV_B64_PSEUDO 0x3fefffffffffffff), |
| 1137 | DSTCLAMP.NONE, DSTOMOD.NONE), |
| Marek Olsak | 1354b87 | 2015-07-27 11:37:42 +0000 | [diff] [blame] | 1138 | $x, |
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1139 | (V_CMP_CLASS_F64_e64 SRCMODS.NONE, $x, (i32 3 /*NaN*/))), |
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1140 | DSTCLAMP.NONE, DSTOMOD.NONE) |
| 1141 | >; |
| 1142 | |
| 1143 | } // End Predicates = [isSI] |
| 1144 | |
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 1145 | //============================================================================// |
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 1146 | // Miscellaneous Optimization Patterns |
| 1147 | //============================================================================// |
| 1148 | |
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 1149 | def : SHA256MaPattern <V_BFI_B32, V_XOR_B32_e64>; |
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 1150 | |
| Matt Arsenault | c89f291 | 2016-03-07 21:54:48 +0000 | [diff] [blame] | 1151 | def : IntMed3Pat<V_MED3_I32, smax, smax_oneuse, smin_oneuse>; |
| 1152 | def : IntMed3Pat<V_MED3_U32, umax, umax_oneuse, umin_oneuse>; |
| 1153 | |
| Matt Arsenault | f84e5d9 | 2017-01-31 03:07:46 +0000 | [diff] [blame] | 1154 | // This matches 16 permutations of |
| 1155 | // max(min(x, y), min(max(x, y), z)) |
| 1156 | class FPMed3Pat<ValueType vt, |
| 1157 | Instruction med3Inst> : Pat< |
| 1158 | (fmaxnum (fminnum_oneuse (VOP3Mods_nnan vt:$src0, i32:$src0_mods), |
| 1159 | (VOP3Mods_nnan vt:$src1, i32:$src1_mods)), |
| 1160 | (fminnum_oneuse (fmaxnum_oneuse (VOP3Mods_nnan vt:$src0, i32:$src0_mods), |
| 1161 | (VOP3Mods_nnan vt:$src1, i32:$src1_mods)), |
| 1162 | (vt (VOP3Mods_nnan vt:$src2, i32:$src2_mods)))), |
| 1163 | (med3Inst $src0_mods, $src0, $src1_mods, $src1, $src2_mods, $src2, DSTCLAMP.NONE, DSTOMOD.NONE) |
| 1164 | >; |
| 1165 | |
| 1166 | def : FPMed3Pat<f32, V_MED3_F32>; |
| 1167 | |
| Matt Arsenault | af63524 | 2017-01-30 19:30:24 +0000 | [diff] [blame] | 1168 | |
| 1169 | // Undo sub x, c -> add x, -c canonicalization since c is more likely |
| 1170 | // an inline immediate than -c. |
| 1171 | // TODO: Also do for 64-bit. |
| 1172 | def : Pat< |
| 1173 | (add i32:$src0, (i32 NegSubInlineConst32:$src1)), |
| 1174 | (S_SUB_I32 $src0, NegSubInlineConst32:$src1) |
| 1175 | >; |
| 1176 | |
| Tom Stellard | 245c15f | 2015-05-26 15:55:52 +0000 | [diff] [blame] | 1177 | //============================================================================// |
| 1178 | // Assembler aliases |
| 1179 | //============================================================================// |
| 1180 | |
| 1181 | def : MnemonicAlias<"v_add_u32", "v_add_i32">; |
| 1182 | def : MnemonicAlias<"v_sub_u32", "v_sub_i32">; |
| 1183 | def : MnemonicAlias<"v_subrev_u32", "v_subrev_i32">; |
| 1184 | |
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 1185 | } // End isGCN predicate |