| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 1 | //===-- SIInstructions.td - SI Instruction Defintions ---------------------===// | 
|  | 2 | // | 
|  | 3 | //                     The LLVM Compiler Infrastructure | 
|  | 4 | // | 
|  | 5 | // This file is distributed under the University of Illinois Open Source | 
|  | 6 | // License. See LICENSE.TXT for details. | 
|  | 7 | // | 
|  | 8 | //===----------------------------------------------------------------------===// | 
|  | 9 | // This file was originally auto-generated from a GPU register header file and | 
|  | 10 | // all the instruction definitions were originally commented out.  Instructions | 
|  | 11 | // that are not yet supported remain commented out. | 
|  | 12 | //===----------------------------------------------------------------------===// | 
|  | 13 |  | 
| Eric Christopher | 7792e32 | 2015-01-30 23:24:40 +0000 | [diff] [blame] | 14 | def isGCN : Predicate<"Subtarget->getGeneration() " | 
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 15 | ">= SISubtarget::SOUTHERN_ISLANDS">, | 
| Tom Stellard | d7e6f13 | 2015-04-08 01:09:26 +0000 | [diff] [blame] | 16 | AssemblerPredicate<"FeatureGCN">; | 
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 17 | def isSI : Predicate<"Subtarget->getGeneration() " | 
| Matt Arsenault | 43e92fe | 2016-06-24 06:30:11 +0000 | [diff] [blame] | 18 | "== SISubtarget::SOUTHERN_ISLANDS">, | 
| Matt Arsenault | d6adfb4 | 2015-09-24 19:52:21 +0000 | [diff] [blame] | 19 | AssemblerPredicate<"FeatureSouthernIslands">; | 
|  | 20 |  | 
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 21 | def has16BankLDS : Predicate<"Subtarget->getLDSBankCount() == 16">; | 
|  | 22 | def has32BankLDS : Predicate<"Subtarget->getLDSBankCount() == 32">; | 
| Matt Arsenault | cc88ce3 | 2016-10-12 18:00:51 +0000 | [diff] [blame] | 23 | def HasVGPRIndexMode : Predicate<"Subtarget->hasVGPRIndexMode()">, | 
|  | 24 | AssemblerPredicate<"FeatureVGPRIndexMode">; | 
|  | 25 | def HasMovrel : Predicate<"Subtarget->hasMovrel()">, | 
|  | 26 | AssemblerPredicate<"FeatureMovrel">; | 
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 27 |  | 
| Valery Pykhtin | 2828b9b | 2016-09-19 14:39:49 +0000 | [diff] [blame] | 28 | include "VOPInstructions.td" | 
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 29 | include "SOPInstructions.td" | 
| Valery Pykhtin | 1b13886 | 2016-09-01 09:56:47 +0000 | [diff] [blame] | 30 | include "SMInstructions.td" | 
| Valery Pykhtin | 8bc6596 | 2016-09-05 11:22:51 +0000 | [diff] [blame] | 31 | include "FLATInstructions.td" | 
| Valery Pykhtin | b66e5eb | 2016-09-10 13:09:16 +0000 | [diff] [blame] | 32 | include "BUFInstructions.td" | 
| Valery Pykhtin | a34fb49 | 2016-08-30 15:20:31 +0000 | [diff] [blame] | 33 |  | 
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 34 | let SubtargetPredicate = isGCN in { | 
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 35 |  | 
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 36 | //===----------------------------------------------------------------------===// | 
| Tom Stellard | 3a35d8f | 2014-10-01 14:44:45 +0000 | [diff] [blame] | 37 | // EXP Instructions | 
|  | 38 | //===----------------------------------------------------------------------===// | 
|  | 39 |  | 
| Matt Arsenault | 7bee6ac | 2016-12-05 20:23:10 +0000 | [diff] [blame] | 40 | defm EXP : EXP_m<0, AMDGPUexport>; | 
|  | 41 | defm EXP_DONE : EXP_m<1, AMDGPUexport_done>; | 
| Tom Stellard | 3a35d8f | 2014-10-01 14:44:45 +0000 | [diff] [blame] | 42 |  | 
|  | 43 | //===----------------------------------------------------------------------===// | 
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 44 | // VINTRP Instructions | 
|  | 45 | //===----------------------------------------------------------------------===// | 
|  | 46 |  | 
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 47 | let Uses = [M0, EXEC] in { | 
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 48 |  | 
| Tom Stellard | ae38f30 | 2015-01-14 01:13:19 +0000 | [diff] [blame] | 49 | // FIXME: Specify SchedRW for VINTRP insturctions. | 
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 50 |  | 
|  | 51 | multiclass V_INTERP_P1_F32_m : VINTRP_m < | 
|  | 52 | 0x00000000, | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 53 | (outs VGPR_32:$vdst), | 
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 54 | (ins VGPR_32:$vsrc, Attr:$attr, AttrChan:$attrchan), | 
|  | 55 | "v_interp_p1_f32 $vdst, $vsrc, $attr$attrchan", | 
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 56 | [(set f32:$vdst, (AMDGPUinterp_p1 f32:$vsrc, (i32 imm:$attrchan), | 
|  | 57 | (i32 imm:$attr)))] | 
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 58 | >; | 
|  | 59 |  | 
|  | 60 | let OtherPredicates = [has32BankLDS] in { | 
|  | 61 |  | 
|  | 62 | defm V_INTERP_P1_F32 : V_INTERP_P1_F32_m; | 
|  | 63 |  | 
|  | 64 | } // End OtherPredicates = [has32BankLDS] | 
|  | 65 |  | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 66 | let OtherPredicates = [has16BankLDS], Constraints = "@earlyclobber $vdst", isAsmParserOnly=1 in { | 
| Tom Stellard | ec87f84 | 2015-05-25 16:15:54 +0000 | [diff] [blame] | 67 |  | 
|  | 68 | defm V_INTERP_P1_F32_16bank : V_INTERP_P1_F32_m; | 
|  | 69 |  | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 70 | } // End OtherPredicates = [has32BankLDS], Constraints = "@earlyclobber $vdst", isAsmParserOnly=1 | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 71 |  | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 72 | let DisableEncoding = "$src0", Constraints = "$src0 = $vdst" in { | 
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 73 |  | 
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 74 | defm V_INTERP_P2_F32 : VINTRP_m < | 
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 75 | 0x00000001, | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 76 | (outs VGPR_32:$vdst), | 
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 77 | (ins VGPR_32:$src0, VGPR_32:$vsrc, Attr:$attr, AttrChan:$attrchan), | 
|  | 78 | "v_interp_p2_f32 $vdst, $vsrc, $attr$attrchan", | 
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 79 | [(set f32:$vdst, (AMDGPUinterp_p2 f32:$src0, f32:$vsrc, (i32 imm:$attrchan), | 
|  | 80 | (i32 imm:$attr)))]>; | 
| Tom Stellard | 5082816 | 2015-05-25 16:15:56 +0000 | [diff] [blame] | 81 |  | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 82 | } // End DisableEncoding = "$src0", Constraints = "$src0 = $vdst" | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 83 |  | 
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 84 | defm V_INTERP_MOV_F32 : VINTRP_m < | 
| Tom Stellard | c70cf90 | 2015-05-25 16:15:50 +0000 | [diff] [blame] | 85 | 0x00000002, | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 86 | (outs VGPR_32:$vdst), | 
| Matt Arsenault | 0e8a299 | 2016-12-15 20:40:20 +0000 | [diff] [blame] | 87 | (ins InterpSlot:$vsrc, Attr:$attr, AttrChan:$attrchan), | 
|  | 88 | "v_interp_mov_f32 $vdst, $vsrc, $attr$attrchan", | 
| Matt Arsenault | f0c8625 | 2016-12-10 00:29:55 +0000 | [diff] [blame] | 89 | [(set f32:$vdst, (AMDGPUinterp_mov (i32 imm:$vsrc), (i32 imm:$attrchan), | 
| Matt Arsenault | ac066f3 | 2016-12-06 22:29:43 +0000 | [diff] [blame] | 90 | (i32 imm:$attr)))]>; | 
| Tom Stellard | 2a9d947 | 2015-05-12 15:00:46 +0000 | [diff] [blame] | 91 |  | 
| Matt Arsenault | 80f766a | 2015-09-10 01:23:28 +0000 | [diff] [blame] | 92 | } // End Uses = [M0, EXEC] | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 93 |  | 
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 94 | //===----------------------------------------------------------------------===// | 
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 95 | // Pseudo Instructions | 
|  | 96 | //===----------------------------------------------------------------------===// | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 97 |  | 
|  | 98 | let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] in { | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 99 |  | 
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 100 | // For use in patterns | 
| Tom Stellard | cc4c871 | 2016-02-16 18:14:56 +0000 | [diff] [blame] | 101 | def V_CNDMASK_B64_PSEUDO : VOP3Common <(outs VReg_64:$vdst), | 
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 102 | (ins VSrc_b64:$src0, VSrc_b64:$src1, SSrc_b64:$src2), "", []> { | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 103 | let isPseudo = 1; | 
|  | 104 | let isCodeGenOnly = 1; | 
| Matt Arsenault | 22e4179 | 2016-08-27 01:00:37 +0000 | [diff] [blame] | 105 | let usesCustomInserter = 1; | 
| Tom Stellard | 60024a0 | 2014-09-24 01:33:24 +0000 | [diff] [blame] | 106 | } | 
|  | 107 |  | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 108 | // 64-bit vector move instruction.  This is mainly used by the SIFoldOperands | 
|  | 109 | // pass to enable folding of inline immediates. | 
| Matt Arsenault | 4bd7236 | 2016-12-10 00:39:12 +0000 | [diff] [blame] | 110 | def V_MOV_B64_PSEUDO : VPseudoInstSI <(outs VReg_64:$vdst), | 
|  | 111 | (ins VSrc_b64:$src0)>; | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 112 | } // End let hasSideEffects = 0, mayLoad = 0, mayStore = 0, Uses = [EXEC] | 
|  | 113 |  | 
| Wei Ding | ee21a36 | 2017-01-24 06:41:21 +0000 | [diff] [blame] | 114 | def S_TRAP_PSEUDO : VPseudoInstSI <(outs), (ins), | 
|  | 115 | [(trap)]> { | 
|  | 116 | let hasSideEffects = 1; | 
|  | 117 | let SALU = 1; | 
|  | 118 | let usesCustomInserter = 1; | 
|  | 119 | } | 
|  | 120 |  | 
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 121 | let usesCustomInserter = 1, SALU = 1 in { | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 122 | def GET_GROUPSTATICSIZE : PseudoInstSI <(outs SReg_32:$sdst), (ins), | 
| Changpeng Fang | 01f6062 | 2016-03-15 17:28:44 +0000 | [diff] [blame] | 123 | [(set SReg_32:$sdst, (int_amdgcn_groupstaticsize))]>; | 
|  | 124 | } // End let usesCustomInserter = 1, SALU = 1 | 
|  | 125 |  | 
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 126 | def S_MOV_B64_term : PseudoInstSI<(outs SReg_64:$dst), | 
|  | 127 | (ins SSrc_b64:$src0)> { | 
|  | 128 | let SALU = 1; | 
|  | 129 | let isAsCheapAsAMove = 1; | 
|  | 130 | let isTerminator = 1; | 
|  | 131 | } | 
|  | 132 |  | 
|  | 133 | def S_XOR_B64_term : PseudoInstSI<(outs SReg_64:$dst), | 
|  | 134 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { | 
|  | 135 | let SALU = 1; | 
|  | 136 | let isAsCheapAsAMove = 1; | 
|  | 137 | let isTerminator = 1; | 
|  | 138 | } | 
|  | 139 |  | 
|  | 140 | def S_ANDN2_B64_term : PseudoInstSI<(outs SReg_64:$dst), | 
|  | 141 | (ins SSrc_b64:$src0, SSrc_b64:$src1)> { | 
|  | 142 | let SALU = 1; | 
|  | 143 | let isAsCheapAsAMove = 1; | 
|  | 144 | let isTerminator = 1; | 
|  | 145 | } | 
|  | 146 |  | 
| Stanislav Mekhanoshin | ea91cca | 2016-11-15 19:00:15 +0000 | [diff] [blame] | 147 | def WAVE_BARRIER : SPseudoInstSI<(outs), (ins), | 
|  | 148 | [(int_amdgcn_wave_barrier)]> { | 
|  | 149 | let SchedRW = []; | 
|  | 150 | let hasNoSchedulingInfo = 1; | 
|  | 151 | let hasSideEffects = 1; | 
|  | 152 | let mayLoad = 1; | 
|  | 153 | let mayStore = 1; | 
|  | 154 | let isBarrier = 1; | 
|  | 155 | let isConvergent = 1; | 
|  | 156 | } | 
|  | 157 |  | 
| Matt Arsenault | 8fb3738 | 2013-10-11 21:03:36 +0000 | [diff] [blame] | 158 | // SI pseudo instructions. These are used by the CFG structurizer pass | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 159 | // and should be lowered to ISA instructions prior to codegen. | 
|  | 160 |  | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 161 | // Dummy terminator instruction to use after control flow instructions | 
|  | 162 | // replaced with exec mask operations. | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 163 | def SI_MASK_BRANCH : PseudoInstSI < | 
| Matt Arsenault | f98a596 | 2016-08-27 00:42:21 +0000 | [diff] [blame] | 164 | (outs), (ins brtarget:$target)> { | 
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 165 | let isBranch = 0; | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 166 | let isTerminator = 1; | 
| Matt Arsenault | 57431c9 | 2016-08-10 19:11:42 +0000 | [diff] [blame] | 167 | let isBarrier = 0; | 
| Matt Arsenault | 78fc9da | 2016-08-22 19:33:16 +0000 | [diff] [blame] | 168 | let Uses = [EXEC]; | 
| Matt Arsenault | c59a923 | 2016-10-06 18:12:07 +0000 | [diff] [blame] | 169 | let SchedRW = []; | 
|  | 170 | let hasNoSchedulingInfo = 1; | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 171 | } | 
|  | 172 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 173 | let isTerminator = 1 in { | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 174 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 175 | def SI_IF: CFPseudoInstSI < | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 176 | (outs SReg_64:$dst), (ins SReg_64:$vcc, brtarget:$target), | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 177 | [(set i64:$dst, (int_amdgcn_if i1:$vcc, bb:$target))], 1, 1> { | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 178 | let Constraints = ""; | 
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 179 | let Size = 12; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 180 | let mayLoad = 1; | 
| Matt Arsenault | e674075 | 2016-09-29 01:44:16 +0000 | [diff] [blame] | 181 | let mayStore = 1; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 182 | let hasSideEffects = 1; | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 183 | } | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 184 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 185 | def SI_ELSE : CFPseudoInstSI < | 
|  | 186 | (outs SReg_64:$dst), (ins SReg_64:$src, brtarget:$target, i1imm:$execfix), [], 1, 1> { | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 187 | let Constraints = "$src = $dst"; | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 188 | let Size = 12; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 189 | let mayStore = 1; | 
|  | 190 | let mayLoad = 1; | 
|  | 191 | let hasSideEffects = 1; | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 192 | } | 
|  | 193 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 194 | def SI_LOOP : CFPseudoInstSI < | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 195 | (outs), (ins SReg_64:$saved, brtarget:$target), | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 196 | [(int_amdgcn_loop i64:$saved, bb:$target)], 1, 1> { | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 197 | let Size = 8; | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 198 | let isBranch = 1; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 199 | let hasSideEffects = 1; | 
|  | 200 | let mayLoad = 1; | 
|  | 201 | let mayStore = 1; | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 202 | } | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 203 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 204 | } // End isBranch = 1, isTerminator = 1 | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 205 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 206 | def SI_END_CF : CFPseudoInstSI < | 
|  | 207 | (outs), (ins SReg_64:$saved), | 
|  | 208 | [(int_amdgcn_end_cf i64:$saved)], 1, 1> { | 
|  | 209 | let Size = 4; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 210 | let isAsCheapAsAMove = 1; | 
|  | 211 | let isReMaterializable = 1; | 
|  | 212 | let mayLoad = 1; | 
|  | 213 | let mayStore = 1; | 
|  | 214 | let hasSideEffects = 1; | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 215 | } | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 216 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 217 | def SI_BREAK : CFPseudoInstSI < | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 218 | (outs SReg_64:$dst), (ins SReg_64:$src), | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 219 | [(set i64:$dst, (int_amdgcn_break i64:$src))], 1> { | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 220 | let Size = 4; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 221 | let isAsCheapAsAMove = 1; | 
|  | 222 | let isReMaterializable = 1; | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 223 | } | 
| Matt Arsenault | 48d70cb | 2016-07-09 17:18:39 +0000 | [diff] [blame] | 224 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 225 | def SI_IF_BREAK : CFPseudoInstSI < | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 226 | (outs SReg_64:$dst), (ins SReg_64:$vcc, SReg_64:$src), | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 227 | [(set i64:$dst, (int_amdgcn_if_break i1:$vcc, i64:$src))]> { | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 228 | let Size = 4; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 229 | let isAsCheapAsAMove = 1; | 
|  | 230 | let isReMaterializable = 1; | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 231 | } | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 232 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 233 | def SI_ELSE_BREAK : CFPseudoInstSI < | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 234 | (outs SReg_64:$dst), (ins SReg_64:$src0, SReg_64:$src1), | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 235 | [(set i64:$dst, (int_amdgcn_else_break i64:$src0, i64:$src1))]> { | 
|  | 236 | let Size = 4; | 
| Matt Arsenault | 6408c91 | 2016-09-16 22:11:18 +0000 | [diff] [blame] | 237 | let isAsCheapAsAMove = 1; | 
|  | 238 | let isReMaterializable = 1; | 
| Matt Arsenault | c6b1350 | 2016-08-10 19:11:51 +0000 | [diff] [blame] | 239 | } | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 240 |  | 
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 241 | let Uses = [EXEC], Defs = [EXEC,VCC] in { | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 242 | def SI_KILL : PseudoInstSI < | 
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 243 | (outs), (ins VSrc_b32:$src), | 
| Matt Arsenault | 03006fd | 2016-07-19 16:27:56 +0000 | [diff] [blame] | 244 | [(AMDGPUkill i32:$src)]> { | 
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 245 | let isConvergent = 1; | 
|  | 246 | let usesCustomInserter = 1; | 
|  | 247 | } | 
|  | 248 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 249 | def SI_KILL_TERMINATOR : SPseudoInstSI < | 
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 250 | (outs), (ins VSrc_b32:$src)> { | 
| Matt Arsenault | 786724a | 2016-07-12 21:41:32 +0000 | [diff] [blame] | 251 | let isTerminator = 1; | 
|  | 252 | } | 
|  | 253 |  | 
| Tom Stellard | aa79834 | 2015-05-01 03:44:09 +0000 | [diff] [blame] | 254 | } // End Uses = [EXEC], Defs = [EXEC,VCC] | 
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 255 |  | 
| Matt Arsenault | 327188a | 2016-12-15 21:57:11 +0000 | [diff] [blame] | 256 | // Branch on undef scc. Used to avoid intermediate copy from | 
|  | 257 | // IMPLICIT_DEF to SCC. | 
|  | 258 | def SI_BR_UNDEF : SPseudoInstSI <(outs), (ins sopp_brtarget:$simm16)> { | 
|  | 259 | let isTerminator = 1; | 
|  | 260 | let usesCustomInserter = 1; | 
|  | 261 | } | 
| Tom Stellard | f879435 | 2012-12-19 22:10:31 +0000 | [diff] [blame] | 262 |  | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 263 | def SI_PS_LIVE : PseudoInstSI < | 
|  | 264 | (outs SReg_64:$dst), (ins), | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 265 | [(set i1:$dst, (int_amdgcn_ps_live))]> { | 
|  | 266 | let SALU = 1; | 
|  | 267 | } | 
| Nicolai Haehnle | b0c9748 | 2016-04-22 04:04:08 +0000 | [diff] [blame] | 268 |  | 
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 269 | // Used as an isel pseudo to directly emit initialization with an | 
|  | 270 | // s_mov_b32 rather than a copy of another initialized | 
|  | 271 | // register. MachineCSE skips copies, and we don't want to have to | 
|  | 272 | // fold operands before it runs. | 
| Sam Kolton | 1eeb11b | 2016-09-09 14:44:04 +0000 | [diff] [blame] | 273 | def SI_INIT_M0 : SPseudoInstSI <(outs), (ins SSrc_b32:$src)> { | 
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 274 | let Defs = [M0]; | 
|  | 275 | let usesCustomInserter = 1; | 
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 276 | let isAsCheapAsAMove = 1; | 
| Matt Arsenault | 4ac341c | 2016-04-14 21:58:15 +0000 | [diff] [blame] | 277 | let isReMaterializable = 1; | 
|  | 278 | } | 
|  | 279 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 280 | def SI_RETURN : SPseudoInstSI < | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 281 | (outs), (ins variable_ops), [(AMDGPUreturn)]> { | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 282 | let isTerminator = 1; | 
|  | 283 | let isBarrier = 1; | 
|  | 284 | let isReturn = 1; | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 285 | let hasSideEffects = 1; | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 286 | let hasNoSchedulingInfo = 1; | 
| Nicolai Haehnle | a246dcc | 2016-09-03 12:26:32 +0000 | [diff] [blame] | 287 | let DisableWQM = 1; | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 288 | } | 
|  | 289 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 290 | let Defs = [M0, EXEC], | 
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 291 | UseNamedOperandTable = 1 in { | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 292 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 293 | class SI_INDIRECT_SRC<RegisterClass rc> : VPseudoInstSI < | 
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 294 | (outs VGPR_32:$vdst), | 
|  | 295 | (ins rc:$src, VS_32:$idx, i32imm:$offset)> { | 
|  | 296 | let usesCustomInserter = 1; | 
|  | 297 | } | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 298 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 299 | class SI_INDIRECT_DST<RegisterClass rc> : VPseudoInstSI < | 
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 300 | (outs rc:$vdst), | 
|  | 301 | (ins rc:$src, VS_32:$idx, i32imm:$offset, VGPR_32:$val)> { | 
| Matt Arsenault | 3cb4dde | 2016-06-22 23:40:57 +0000 | [diff] [blame] | 302 | let Constraints = "$src = $vdst"; | 
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 303 | let usesCustomInserter = 1; | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 304 | } | 
|  | 305 |  | 
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 306 | // TODO: We can support indirect SGPR access. | 
|  | 307 | def SI_INDIRECT_SRC_V1 : SI_INDIRECT_SRC<VGPR_32>; | 
|  | 308 | def SI_INDIRECT_SRC_V2 : SI_INDIRECT_SRC<VReg_64>; | 
|  | 309 | def SI_INDIRECT_SRC_V4 : SI_INDIRECT_SRC<VReg_128>; | 
|  | 310 | def SI_INDIRECT_SRC_V8 : SI_INDIRECT_SRC<VReg_256>; | 
|  | 311 | def SI_INDIRECT_SRC_V16 : SI_INDIRECT_SRC<VReg_512>; | 
|  | 312 |  | 
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 313 | def SI_INDIRECT_DST_V1 : SI_INDIRECT_DST<VGPR_32>; | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 314 | def SI_INDIRECT_DST_V2 : SI_INDIRECT_DST<VReg_64>; | 
|  | 315 | def SI_INDIRECT_DST_V4 : SI_INDIRECT_DST<VReg_128>; | 
|  | 316 | def SI_INDIRECT_DST_V8 : SI_INDIRECT_DST<VReg_256>; | 
|  | 317 | def SI_INDIRECT_DST_V16 : SI_INDIRECT_DST<VReg_512>; | 
|  | 318 |  | 
| Matt Arsenault | cb540bc | 2016-07-19 00:35:03 +0000 | [diff] [blame] | 319 | } // End Uses = [EXEC], Defs = [M0, EXEC] | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 320 |  | 
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 321 | multiclass SI_SPILL_SGPR <RegisterClass sgpr_class> { | 
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 322 | let UseNamedOperandTable = 1, SGPRSpill = 1, Uses = [EXEC] in { | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 323 | def _SAVE : PseudoInstSI < | 
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 324 | (outs), | 
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 325 | (ins sgpr_class:$data, i32imm:$addr)> { | 
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 326 | let mayStore = 1; | 
|  | 327 | let mayLoad = 0; | 
|  | 328 | } | 
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 329 |  | 
| Matt Arsenault | fc7e6a0 | 2016-07-12 00:23:17 +0000 | [diff] [blame] | 330 | def _RESTORE : PseudoInstSI < | 
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 331 | (outs sgpr_class:$data), | 
|  | 332 | (ins i32imm:$addr)> { | 
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 333 | let mayStore = 0; | 
|  | 334 | let mayLoad = 1; | 
|  | 335 | } | 
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 336 | } // End UseNamedOperandTable = 1 | 
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 337 | } | 
|  | 338 |  | 
| Matt Arsenault | 2510a31 | 2016-09-03 06:57:55 +0000 | [diff] [blame] | 339 | // You cannot use M0 as the output of v_readlane_b32 instructions or | 
|  | 340 | // use it in the sdata operand of SMEM instructions. We still need to | 
|  | 341 | // be able to spill the physical register m0, so allow it for | 
|  | 342 | // SI_SPILL_32_* instructions. | 
|  | 343 | defm SI_SPILL_S32  : SI_SPILL_SGPR <SReg_32>; | 
| Tom Stellard | eba6107 | 2014-05-02 15:41:42 +0000 | [diff] [blame] | 344 | defm SI_SPILL_S64  : SI_SPILL_SGPR <SReg_64>; | 
|  | 345 | defm SI_SPILL_S128 : SI_SPILL_SGPR <SReg_128>; | 
|  | 346 | defm SI_SPILL_S256 : SI_SPILL_SGPR <SReg_256>; | 
|  | 347 | defm SI_SPILL_S512 : SI_SPILL_SGPR <SReg_512>; | 
|  | 348 |  | 
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 349 | multiclass SI_SPILL_VGPR <RegisterClass vgpr_class> { | 
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 350 | let UseNamedOperandTable = 1, VGPRSpill = 1, | 
|  | 351 | SchedRW = [WriteVMEM] in { | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 352 | def _SAVE : VPseudoInstSI < | 
| Tom Stellard | 42fb60e | 2015-01-14 15:42:31 +0000 | [diff] [blame] | 353 | (outs), | 
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 354 | (ins vgpr_class:$vdata, i32imm:$vaddr, SReg_128:$srsrc, | 
|  | 355 | SReg_32:$soffset, i32imm:$offset)> { | 
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 356 | let mayStore = 1; | 
|  | 357 | let mayLoad = 0; | 
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 358 | // (2 * 4) + (8 * num_subregs) bytes maximum | 
|  | 359 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); | 
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 360 | } | 
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 361 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 362 | def _RESTORE : VPseudoInstSI < | 
| Matt Arsenault | 3354f42 | 2016-09-10 01:20:33 +0000 | [diff] [blame] | 363 | (outs vgpr_class:$vdata), | 
| Matt Arsenault | bcfd94c | 2016-09-17 15:52:37 +0000 | [diff] [blame] | 364 | (ins i32imm:$vaddr, SReg_128:$srsrc, SReg_32:$soffset, | 
| Matt Arsenault | 9babdf4 | 2016-06-22 20:15:28 +0000 | [diff] [blame] | 365 | i32imm:$offset)> { | 
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 366 | let mayStore = 0; | 
|  | 367 | let mayLoad = 1; | 
| Matt Arsenault | ac42ba8 | 2016-09-03 17:25:44 +0000 | [diff] [blame] | 368 |  | 
|  | 369 | // (2 * 4) + (8 * num_subregs) bytes maximum | 
|  | 370 | let Size = !add(!shl(!srl(vgpr_class.Size, 5), 3), 8); | 
| Matt Arsenault | 9a32cd3 | 2015-08-29 06:48:57 +0000 | [diff] [blame] | 371 | } | 
| Matt Arsenault | 7348a7e | 2016-09-10 01:20:28 +0000 | [diff] [blame] | 372 | } // End UseNamedOperandTable = 1, VGPRSpill = 1, SchedRW = [WriteVMEM] | 
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 373 | } | 
|  | 374 |  | 
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 375 | defm SI_SPILL_V32  : SI_SPILL_VGPR <VGPR_32>; | 
| Tom Stellard | 9646890 | 2014-09-24 01:33:17 +0000 | [diff] [blame] | 376 | defm SI_SPILL_V64  : SI_SPILL_VGPR <VReg_64>; | 
|  | 377 | defm SI_SPILL_V96  : SI_SPILL_VGPR <VReg_96>; | 
|  | 378 | defm SI_SPILL_V128 : SI_SPILL_VGPR <VReg_128>; | 
|  | 379 | defm SI_SPILL_V256 : SI_SPILL_VGPR <VReg_256>; | 
|  | 380 | defm SI_SPILL_V512 : SI_SPILL_VGPR <VReg_512>; | 
|  | 381 |  | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 382 | def SI_PC_ADD_REL_OFFSET : SPseudoInstSI < | 
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 383 | (outs SReg_64:$dst), | 
| Konstantin Zhuravlyov | c96b5d7 | 2016-10-14 04:37:34 +0000 | [diff] [blame] | 384 | (ins si_ga:$ptr_lo, si_ga:$ptr_hi), | 
|  | 385 | [(set SReg_64:$dst, | 
|  | 386 | (i64 (SIpc_add_rel_offset (tglobaladdr:$ptr_lo), (tglobaladdr:$ptr_hi))))]> { | 
| Matt Arsenault | 71ed8a6 | 2016-08-27 03:00:51 +0000 | [diff] [blame] | 387 | let Defs = [SCC]; | 
| Matt Arsenault | d092a06 | 2015-10-02 18:58:37 +0000 | [diff] [blame] | 388 | } | 
| Tom Stellard | 067c815 | 2014-07-21 14:01:14 +0000 | [diff] [blame] | 389 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 390 | } // End SubtargetPredicate = isGCN | 
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 391 |  | 
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 392 | let Predicates = [isGCN] in { | 
| Tom Stellard | 0e70de5 | 2014-05-16 20:56:45 +0000 | [diff] [blame] | 393 |  | 
| Nicolai Haehnle | 3b57200 | 2016-07-28 11:39:24 +0000 | [diff] [blame] | 394 | def : Pat< | 
|  | 395 | (int_amdgcn_else i64:$src, bb:$target), | 
|  | 396 | (SI_ELSE $src, $target, 0) | 
|  | 397 | >; | 
|  | 398 |  | 
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 399 | def : Pat < | 
|  | 400 | (int_AMDGPU_kilp), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 401 | (SI_KILL (i32 0xbf800000)) | 
| Tom Stellard | be8ebee | 2013-01-18 21:15:50 +0000 | [diff] [blame] | 402 | >; | 
|  | 403 |  | 
| Tom Stellard | 8d6d449 | 2014-04-22 16:33:57 +0000 | [diff] [blame] | 404 | //===----------------------------------------------------------------------===// | 
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 405 | // VOP1 Patterns | 
|  | 406 | //===----------------------------------------------------------------------===// | 
|  | 407 |  | 
| Matt Arsenault | 22ca3f8 | 2014-07-15 23:50:10 +0000 | [diff] [blame] | 408 | let Predicates = [UnsafeFPMath] in { | 
| Matt Arsenault | 0bbcd8b | 2015-02-14 04:30:08 +0000 | [diff] [blame] | 409 |  | 
|  | 410 | //def : RcpPat<V_RCP_F64_e32, f64>; | 
|  | 411 | //defm : RsqPat<V_RSQ_F64_e32, f64>; | 
|  | 412 | //defm : RsqPat<V_RSQ_F32_e32, f32>; | 
|  | 413 |  | 
|  | 414 | def : RsqPat<V_RSQ_F32_e32, f32>; | 
|  | 415 | def : RsqPat<V_RSQ_F64_e32, f64>; | 
| Matt Arsenault | 7401516 | 2016-05-28 00:19:52 +0000 | [diff] [blame] | 416 |  | 
|  | 417 | // Convert (x - floor(x)) to fract(x) | 
|  | 418 | def : Pat < | 
|  | 419 | (f32 (fsub (f32 (VOP3Mods f32:$x, i32:$mods)), | 
|  | 420 | (f32 (ffloor (f32 (VOP3Mods f32:$x, i32:$mods)))))), | 
|  | 421 | (V_FRACT_F32_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) | 
|  | 422 | >; | 
|  | 423 |  | 
|  | 424 | // Convert (x + (-floor(x))) to fract(x) | 
|  | 425 | def : Pat < | 
|  | 426 | (f64 (fadd (f64 (VOP3Mods f64:$x, i32:$mods)), | 
|  | 427 | (f64 (fneg (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))))))), | 
|  | 428 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE) | 
|  | 429 | >; | 
|  | 430 |  | 
|  | 431 | } // End Predicates = [UnsafeFPMath] | 
| Matt Arsenault | e9fa3b8 | 2014-07-15 20:18:31 +0000 | [diff] [blame] | 432 |  | 
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 433 | def : Pat < | 
|  | 434 | (f32 (fpextend f16:$src)), | 
|  | 435 | (V_CVT_F32_F16_e32 $src) | 
|  | 436 | >; | 
|  | 437 |  | 
|  | 438 | def : Pat < | 
|  | 439 | (f64 (fpextend f16:$src)), | 
|  | 440 | (V_CVT_F64_F32_e32 (V_CVT_F32_F16_e32 $src)) | 
|  | 441 | >; | 
|  | 442 |  | 
|  | 443 | def : Pat < | 
|  | 444 | (f16 (fpround f32:$src)), | 
|  | 445 | (V_CVT_F16_F32_e32 $src) | 
|  | 446 | >; | 
|  | 447 |  | 
|  | 448 | def : Pat < | 
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 449 | (i32 (fp_to_sint f16:$src)), | 
|  | 450 | (V_CVT_I32_F32_e32 (V_CVT_F32_F16_e32 $src)) | 
|  | 451 | >; | 
|  | 452 |  | 
|  | 453 | def : Pat < | 
|  | 454 | (i32 (fp_to_uint f16:$src)), | 
|  | 455 | (V_CVT_U32_F32_e32 (V_CVT_F32_F16_e32 $src)) | 
|  | 456 | >; | 
|  | 457 |  | 
|  | 458 | def : Pat < | 
|  | 459 | (f16 (sint_to_fp i32:$src)), | 
|  | 460 | (V_CVT_F16_F32_e32 (V_CVT_F32_I32_e32 $src)) | 
|  | 461 | >; | 
|  | 462 |  | 
|  | 463 | def : Pat < | 
|  | 464 | (f16 (uint_to_fp i32:$src)), | 
|  | 465 | (V_CVT_F16_F32_e32 (V_CVT_F32_U32_e32 $src)) | 
|  | 466 | >; | 
|  | 467 |  | 
| Matt Arsenault | a0050b0 | 2014-06-19 01:19:19 +0000 | [diff] [blame] | 468 | //===----------------------------------------------------------------------===// | 
| Tom Stellard | 58ac744 | 2014-04-29 23:12:48 +0000 | [diff] [blame] | 469 | // VOP2 Patterns | 
|  | 470 | //===----------------------------------------------------------------------===// | 
|  | 471 |  | 
| Konstantin Zhuravlyov | bf998c7 | 2016-11-16 03:39:12 +0000 | [diff] [blame] | 472 | multiclass FMADPat <ValueType vt, Instruction inst> { | 
|  | 473 | def : Pat < | 
|  | 474 | (vt (fmad (VOP3NoMods0 vt:$src0, i32:$src0_modifiers, i1:$clamp, i32:$omod), | 
|  | 475 | (VOP3NoMods  vt:$src1, i32:$src1_modifiers), | 
|  | 476 | (VOP3NoMods  vt:$src2, i32:$src2_modifiers))), | 
|  | 477 | (inst $src0_modifiers, $src0, $src1_modifiers, $src1, | 
|  | 478 | $src2_modifiers, $src2, $clamp, $omod) | 
|  | 479 | >; | 
|  | 480 | } | 
|  | 481 |  | 
|  | 482 | defm : FMADPat <f16, V_MAC_F16_e64>; | 
|  | 483 | defm : FMADPat <f32, V_MAC_F32_e64>; | 
|  | 484 |  | 
|  | 485 | multiclass SelectPat <ValueType vt, Instruction inst> { | 
| Konstantin Zhuravlyov | 2a87a42 | 2016-11-16 03:16:26 +0000 | [diff] [blame] | 486 | def : Pat < | 
|  | 487 | (vt (select i1:$src0, vt:$src1, vt:$src2)), | 
|  | 488 | (inst $src2, $src1, $src0) | 
|  | 489 | >; | 
|  | 490 | } | 
|  | 491 |  | 
| Konstantin Zhuravlyov | bf998c7 | 2016-11-16 03:39:12 +0000 | [diff] [blame] | 492 | defm : SelectPat <i16, V_CNDMASK_B32_e64>; | 
|  | 493 | defm : SelectPat <i32, V_CNDMASK_B32_e64>; | 
|  | 494 | defm : SelectPat <f16, V_CNDMASK_B32_e64>; | 
|  | 495 | defm : SelectPat <f32, V_CNDMASK_B32_e64>; | 
| Konstantin Zhuravlyov | 2a87a42 | 2016-11-16 03:16:26 +0000 | [diff] [blame] | 496 |  | 
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 497 | def : Pat < | 
|  | 498 | (i32 (add (i32 (ctpop i32:$popcnt)), i32:$val)), | 
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 499 | (V_BCNT_U32_B32_e64 $popcnt, $val) | 
| Tom Stellard | ae4c9e7 | 2014-06-20 17:06:11 +0000 | [diff] [blame] | 500 | >; | 
|  | 501 |  | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 502 | /********** ============================================ **********/ | 
|  | 503 | /********** Extraction, Insertion, Building and Casting  **********/ | 
|  | 504 | /********** ============================================ **********/ | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 505 |  | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 506 | foreach Index = 0-2 in { | 
|  | 507 | def Extract_Element_v2i32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 508 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 509 | >; | 
|  | 510 | def Insert_Element_v2i32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 511 | i32, v2i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 512 | >; | 
|  | 513 |  | 
|  | 514 | def Extract_Element_v2f32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 515 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 516 | >; | 
|  | 517 | def Insert_Element_v2f32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 518 | f32, v2f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 519 | >; | 
|  | 520 | } | 
|  | 521 |  | 
|  | 522 | foreach Index = 0-3 in { | 
|  | 523 | def Extract_Element_v4i32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 524 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 525 | >; | 
|  | 526 | def Insert_Element_v4i32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 527 | i32, v4i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 528 | >; | 
|  | 529 |  | 
|  | 530 | def Extract_Element_v4f32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 531 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 532 | >; | 
|  | 533 | def Insert_Element_v4f32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 534 | f32, v4f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 535 | >; | 
|  | 536 | } | 
|  | 537 |  | 
|  | 538 | foreach Index = 0-7 in { | 
|  | 539 | def Extract_Element_v8i32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 540 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 541 | >; | 
|  | 542 | def Insert_Element_v8i32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 543 | i32, v8i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 544 | >; | 
|  | 545 |  | 
|  | 546 | def Extract_Element_v8f32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 547 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 548 | >; | 
|  | 549 | def Insert_Element_v8f32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 550 | f32, v8f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 551 | >; | 
|  | 552 | } | 
|  | 553 |  | 
|  | 554 | foreach Index = 0-15 in { | 
|  | 555 | def Extract_Element_v16i32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 556 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 557 | >; | 
|  | 558 | def Insert_Element_v16i32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 559 | i32, v16i32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 560 | >; | 
|  | 561 |  | 
|  | 562 | def Extract_Element_v16f32_#Index : Extract_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 563 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 564 | >; | 
|  | 565 | def Insert_Element_v16f32_#Index : Insert_Element < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 566 | f32, v16f32, Index, !cast<SubRegIndex>(sub#Index) | 
| Christian Konig | 4a1b9c3 | 2013-03-18 11:34:10 +0000 | [diff] [blame] | 567 | >; | 
|  | 568 | } | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 569 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 570 | // FIXME: Why do only some of these type combinations for SReg and | 
|  | 571 | // VReg? | 
| Konstantin Zhuravlyov | f86e4b7 | 2016-11-13 07:01:11 +0000 | [diff] [blame] | 572 | // 16-bit bitcast | 
|  | 573 | def : BitConvert <i16, f16, VGPR_32>; | 
|  | 574 | def : BitConvert <f16, i16, VGPR_32>; | 
|  | 575 | def : BitConvert <i16, f16, SReg_32>; | 
|  | 576 | def : BitConvert <f16, i16, SReg_32>; | 
|  | 577 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 578 | // 32-bit bitcast | 
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 579 | def : BitConvert <i32, f32, VGPR_32>; | 
| Tom Stellard | 45c0b3a | 2015-01-07 20:59:25 +0000 | [diff] [blame] | 580 | def : BitConvert <f32, i32, VGPR_32>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 581 | def : BitConvert <i32, f32, SReg_32>; | 
|  | 582 | def : BitConvert <f32, i32, SReg_32>; | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 583 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 584 | // 64-bit bitcast | 
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 585 | def : BitConvert <i64, f64, VReg_64>; | 
| Tom Stellard | 7512c08 | 2013-07-12 18:14:56 +0000 | [diff] [blame] | 586 | def : BitConvert <f64, i64, VReg_64>; | 
| Tom Stellard | ed2f614 | 2013-07-18 21:43:42 +0000 | [diff] [blame] | 587 | def : BitConvert <v2i32, v2f32, VReg_64>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 588 | def : BitConvert <v2f32, v2i32, VReg_64>; | 
| Tom Stellard | 7ea3d6d | 2014-03-31 14:01:55 +0000 | [diff] [blame] | 589 | def : BitConvert <i64, v2i32, VReg_64>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 590 | def : BitConvert <v2i32, i64, VReg_64>; | 
| Matt Arsenault | 064c206 | 2014-06-11 17:40:32 +0000 | [diff] [blame] | 591 | def : BitConvert <i64, v2f32, VReg_64>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 592 | def : BitConvert <v2f32, i64, VReg_64>; | 
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 593 | def : BitConvert <f64, v2f32, VReg_64>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 594 | def : BitConvert <v2f32, f64, VReg_64>; | 
| Matt Arsenault | 2acc7a4 | 2014-06-11 19:31:13 +0000 | [diff] [blame] | 595 | def : BitConvert <f64, v2i32, VReg_64>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 596 | def : BitConvert <v2i32, f64, VReg_64>; | 
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 597 | def : BitConvert <v4i32, v4f32, VReg_128>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 598 | def : BitConvert <v4f32, v4i32, VReg_128>; | 
| Tom Stellard | 8374720 | 2013-07-18 21:43:53 +0000 | [diff] [blame] | 599 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 600 | // 128-bit bitcast | 
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 601 | def : BitConvert <v2i64, v4i32, SReg_128>; | 
|  | 602 | def : BitConvert <v4i32, v2i64, SReg_128>; | 
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 603 | def : BitConvert <v2f64, v4f32, VReg_128>; | 
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 604 | def : BitConvert <v2f64, v4i32, VReg_128>; | 
| Tom Stellard | 8f30721 | 2015-12-15 17:11:17 +0000 | [diff] [blame] | 605 | def : BitConvert <v4f32, v2f64, VReg_128>; | 
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 606 | def : BitConvert <v4i32, v2f64, VReg_128>; | 
| Matt Arsenault | e57206d | 2016-05-25 18:07:36 +0000 | [diff] [blame] | 607 | def : BitConvert <v2i64, v2f64, VReg_128>; | 
|  | 608 | def : BitConvert <v2f64, v2i64, VReg_128>; | 
| Matt Arsenault | 61001bb | 2015-11-25 19:58:34 +0000 | [diff] [blame] | 609 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 610 | // 256-bit bitcast | 
| Tom Stellard | 967bf58 | 2014-02-13 23:34:15 +0000 | [diff] [blame] | 611 | def : BitConvert <v8i32, v8f32, SReg_256>; | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 612 | def : BitConvert <v8f32, v8i32, SReg_256>; | 
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 613 | def : BitConvert <v8i32, v8f32, VReg_256>; | 
|  | 614 | def : BitConvert <v8f32, v8i32, VReg_256>; | 
| Tom Stellard | 20ee94f | 2013-08-14 22:22:09 +0000 | [diff] [blame] | 615 |  | 
| Matt Arsenault | 382d945 | 2016-01-26 04:49:22 +0000 | [diff] [blame] | 616 | // 512-bit bitcast | 
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 617 | def : BitConvert <v16i32, v16f32, VReg_512>; | 
|  | 618 | def : BitConvert <v16f32, v16i32, VReg_512>; | 
|  | 619 |  | 
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 620 | /********** =================== **********/ | 
|  | 621 | /********** Src & Dst modifiers **********/ | 
|  | 622 | /********** =================== **********/ | 
|  | 623 |  | 
|  | 624 | def : Pat < | 
| Matt Arsenault | 1cffa4c | 2014-11-13 19:49:04 +0000 | [diff] [blame] | 625 | (AMDGPUclamp (VOP3Mods0Clamp f32:$src0, i32:$src0_modifiers, i32:$omod), | 
|  | 626 | (f32 FP_ZERO), (f32 FP_ONE)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 627 | (V_ADD_F32_e64 $src0_modifiers, $src0, 0, (i32 0), 1, $omod) | 
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 628 | >; | 
|  | 629 |  | 
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 630 | /********** ================================ **********/ | 
|  | 631 | /********** Floating point absolute/negative **********/ | 
|  | 632 | /********** ================================ **********/ | 
|  | 633 |  | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 634 | // Prevent expanding both fneg and fabs. | 
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 635 |  | 
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 636 | def : Pat < | 
|  | 637 | (fneg (fabs f32:$src)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 638 | (S_OR_B32 $src, (S_MOV_B32(i32 0x80000000))) // Set sign bit | 
| Michel Danzer | 624b02a | 2014-02-04 07:12:38 +0000 | [diff] [blame] | 639 | >; | 
|  | 640 |  | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 641 | // FIXME: Should use S_OR_B32 | 
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 642 | def : Pat < | 
|  | 643 | (fneg (fabs f64:$src)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 644 | (REG_SEQUENCE VReg_64, | 
|  | 645 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), | 
|  | 646 | sub0, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 647 | (V_OR_B32_e32 (i32 (EXTRACT_SUBREG f64:$src, sub1)), | 
|  | 648 | (V_MOV_B32_e32 (i32 0x80000000))), // Set sign bit. | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 649 | sub1) | 
| Matt Arsenault | 13623d0 | 2014-08-15 18:42:18 +0000 | [diff] [blame] | 650 | >; | 
|  | 651 |  | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 652 | def : Pat < | 
|  | 653 | (fabs f32:$src), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 654 | (V_AND_B32_e64 $src, (V_MOV_B32_e32 (i32 0x7fffffff))) | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 655 | >; | 
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 656 |  | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 657 | def : Pat < | 
|  | 658 | (fneg f32:$src), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 659 | (V_XOR_B32_e32 $src, (V_MOV_B32_e32 (i32 0x80000000))) | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 660 | >; | 
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 661 |  | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 662 | def : Pat < | 
|  | 663 | (fabs f64:$src), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 664 | (REG_SEQUENCE VReg_64, | 
|  | 665 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), | 
|  | 666 | sub0, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 667 | (V_AND_B32_e64 (i32 (EXTRACT_SUBREG f64:$src, sub1)), | 
|  | 668 | (V_MOV_B32_e32 (i32 0x7fffffff))), // Set sign bit. | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 669 | sub1) | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 670 | >; | 
| Vincent Lejeune | 79a5834 | 2014-05-10 19:18:25 +0000 | [diff] [blame] | 671 |  | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 672 | def : Pat < | 
|  | 673 | (fneg f64:$src), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 674 | (REG_SEQUENCE VReg_64, | 
|  | 675 | (i32 (EXTRACT_SUBREG f64:$src, sub0)), | 
|  | 676 | sub0, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 677 | (V_XOR_B32_e32 (i32 (EXTRACT_SUBREG f64:$src, sub1)), | 
|  | 678 | (i32 (V_MOV_B32_e32 (i32 0x80000000)))), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 679 | sub1) | 
| Matt Arsenault | fabf545 | 2014-08-15 18:42:22 +0000 | [diff] [blame] | 680 | >; | 
| Christian Konig | 8dbe6f6 | 2013-02-21 15:17:27 +0000 | [diff] [blame] | 681 |  | 
| Matt Arsenault | c79dc70 | 2016-11-15 02:25:28 +0000 | [diff] [blame] | 682 | def : Pat < | 
| Konstantin Zhuravlyov | 7d88275 | 2017-01-13 19:49:25 +0000 | [diff] [blame] | 683 | (fcopysign f16:$src0, f16:$src1), | 
|  | 684 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, $src1) | 
|  | 685 | >; | 
|  | 686 |  | 
|  | 687 | def : Pat < | 
|  | 688 | (fcopysign f32:$src0, f16:$src1), | 
|  | 689 | (V_BFI_B32 (S_MOV_B32 (i32 0x7fffffff)), $src0, | 
|  | 690 | (V_LSHLREV_B32_e64 (i32 16), $src1)) | 
|  | 691 | >; | 
|  | 692 |  | 
|  | 693 | def : Pat < | 
|  | 694 | (fcopysign f64:$src0, f16:$src1), | 
|  | 695 | (REG_SEQUENCE SReg_64, | 
|  | 696 | (i32 (EXTRACT_SUBREG $src0, sub0)), sub0, | 
|  | 697 | (V_BFI_B32 (S_MOV_B32 (i32 0x7fffffff)), (i32 (EXTRACT_SUBREG $src0, sub1)), | 
|  | 698 | (V_LSHLREV_B32_e64 (i32 16), $src1)), sub1) | 
|  | 699 | >; | 
|  | 700 |  | 
|  | 701 | def : Pat < | 
|  | 702 | (fcopysign f16:$src0, f32:$src1), | 
|  | 703 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, | 
|  | 704 | (V_LSHRREV_B32_e64 (i32 16), $src1)) | 
|  | 705 | >; | 
|  | 706 |  | 
|  | 707 | def : Pat < | 
|  | 708 | (fcopysign f16:$src0, f64:$src1), | 
|  | 709 | (V_BFI_B32 (S_MOV_B32 (i32 0x00007fff)), $src0, | 
|  | 710 | (V_LSHRREV_B32_e64 (i32 16), (EXTRACT_SUBREG $src1, sub1))) | 
|  | 711 | >; | 
|  | 712 |  | 
|  | 713 | def : Pat < | 
| Matt Arsenault | c79dc70 | 2016-11-15 02:25:28 +0000 | [diff] [blame] | 714 | (fneg f16:$src), | 
|  | 715 | (V_XOR_B32_e32 $src, (V_MOV_B32_e32 (i32 0x00008000))) | 
|  | 716 | >; | 
|  | 717 |  | 
|  | 718 | def : Pat < | 
|  | 719 | (fabs f16:$src), | 
|  | 720 | (V_AND_B32_e64 $src, (V_MOV_B32_e32 (i32 0x00007fff))) | 
|  | 721 | >; | 
|  | 722 |  | 
|  | 723 | def : Pat < | 
|  | 724 | (fneg (fabs f16:$src)), | 
|  | 725 | (S_OR_B32 $src, (S_MOV_B32 (i32 0x00008000))) // Set sign bit | 
|  | 726 | >; | 
|  | 727 |  | 
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 728 | /********** ================== **********/ | 
|  | 729 | /********** Immediate Patterns **********/ | 
|  | 730 | /********** ================== **********/ | 
|  | 731 |  | 
|  | 732 | def : Pat < | 
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 733 | (VGPRImm<(i32 imm)>:$imm), | 
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 734 | (V_MOV_B32_e32 imm:$imm) | 
|  | 735 | >; | 
|  | 736 |  | 
|  | 737 | def : Pat < | 
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 738 | (VGPRImm<(f32 fpimm)>:$imm), | 
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 739 | (V_MOV_B32_e32 (f32 (bitcast_fpimm_to_i32 $imm))) | 
| Christian Konig | c756cb99 | 2013-02-16 11:28:22 +0000 | [diff] [blame] | 740 | >; | 
|  | 741 |  | 
|  | 742 | def : Pat < | 
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 743 | (i32 imm:$imm), | 
|  | 744 | (S_MOV_B32 imm:$imm) | 
|  | 745 | >; | 
|  | 746 |  | 
| Matt Arsenault | e96d037 | 2016-12-08 20:14:46 +0000 | [diff] [blame] | 747 | // FIXME: Workaround for ordering issue with peephole optimizer where | 
|  | 748 | // a register class copy interferes with immediate folding.  Should | 
|  | 749 | // use s_mov_b32, which can be shrunk to s_movk_i32 | 
|  | 750 | def : Pat < | 
|  | 751 | (VGPRImm<(f16 fpimm)>:$imm), | 
|  | 752 | (V_MOV_B32_e32 (f16 (bitcast_fpimm_to_i32 $imm))) | 
|  | 753 | >; | 
|  | 754 |  | 
| Matt Arsenault | 3d46319 | 2016-11-01 22:55:07 +0000 | [diff] [blame] | 755 | def : Pat < | 
|  | 756 | (f32 fpimm:$imm), | 
|  | 757 | (S_MOV_B32 (f32 (bitcast_fpimm_to_i32 $imm))) | 
|  | 758 | >; | 
|  | 759 |  | 
|  | 760 | def : Pat < | 
| Matt Arsenault | e96d037 | 2016-12-08 20:14:46 +0000 | [diff] [blame] | 761 | (f16 fpimm:$imm), | 
|  | 762 | (S_MOV_B32 (i32 (bitcast_fpimm_to_i32 $imm))) | 
|  | 763 | >; | 
|  | 764 |  | 
|  | 765 | def : Pat < | 
| Matt Arsenault | ac0fc84 | 2016-09-17 16:09:55 +0000 | [diff] [blame] | 766 | (i32 frameindex:$fi), | 
|  | 767 | (V_MOV_B32_e32 (i32 (frameindex_to_targetframeindex $fi))) | 
|  | 768 | >; | 
|  | 769 |  | 
|  | 770 | def : Pat < | 
| Christian Konig | b559b07 | 2013-02-16 11:28:36 +0000 | [diff] [blame] | 771 | (i64 InlineImm<i64>:$imm), | 
|  | 772 | (S_MOV_B64 InlineImm<i64>:$imm) | 
|  | 773 | >; | 
|  | 774 |  | 
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 775 | // XXX - Should this use a s_cmp to set SCC? | 
|  | 776 |  | 
|  | 777 | // Set to sign-extended 64-bit value (true = -1, false = 0) | 
|  | 778 | def : Pat < | 
|  | 779 | (i1 imm:$imm), | 
|  | 780 | (S_MOV_B64 (i64 (as_i64imm $imm))) | 
|  | 781 | >; | 
|  | 782 |  | 
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 783 | def : Pat < | 
|  | 784 | (f64 InlineFPImm<f64>:$imm), | 
| Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 785 | (S_MOV_B64 (f64 (bitcast_fpimm_to_i64 InlineFPImm<f64>:$imm))) | 
| Matt Arsenault | 303011a | 2014-12-17 21:04:08 +0000 | [diff] [blame] | 786 | >; | 
|  | 787 |  | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 788 | /********** ================== **********/ | 
|  | 789 | /********** Intrinsic Patterns **********/ | 
|  | 790 | /********** ================== **********/ | 
|  | 791 |  | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 792 | def : POW_Common <V_LOG_F32_e32, V_EXP_F32_e32, V_MUL_LEGACY_F32_e32>; | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 793 |  | 
|  | 794 | def : Pat < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 795 | (int_AMDGPU_cube v4f32:$src), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 796 | (REG_SEQUENCE VReg_128, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 797 | (V_CUBETC_F32 0 /* src0_modifiers */, (f32 (EXTRACT_SUBREG $src, sub0)), | 
|  | 798 | 0 /* src1_modifiers */, (f32 (EXTRACT_SUBREG $src, sub1)), | 
|  | 799 | 0 /* src2_modifiers */, (f32 (EXTRACT_SUBREG $src, sub2)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 800 | 0 /* clamp */, 0 /* omod */), sub0, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 801 | (V_CUBESC_F32 0 /* src0_modifiers */, (f32 (EXTRACT_SUBREG $src, sub0)), | 
|  | 802 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub1)), | 
|  | 803 | 0 /* src2_modifiers */,(f32 (EXTRACT_SUBREG $src, sub2)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 804 | 0 /* clamp */, 0 /* omod */), sub1, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 805 | (V_CUBEMA_F32 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub0)), | 
|  | 806 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub1)), | 
|  | 807 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub2)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 808 | 0 /* clamp */, 0 /* omod */), sub2, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 809 | (V_CUBEID_F32 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub0)), | 
|  | 810 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub1)), | 
|  | 811 | 0 /* src1_modifiers */,(f32 (EXTRACT_SUBREG $src, sub2)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 812 | 0 /* clamp */, 0 /* omod */), sub3) | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 813 | >; | 
|  | 814 |  | 
| Michel Danzer | 0cc991e | 2013-02-22 11:22:58 +0000 | [diff] [blame] | 815 | def : Pat < | 
| Tom Stellard | 40b7f1f | 2013-05-02 15:30:12 +0000 | [diff] [blame] | 816 | (i32 (sext i1:$src0)), | 
|  | 817 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src0) | 
| Michel Danzer | 0cc991e | 2013-02-22 11:22:58 +0000 | [diff] [blame] | 818 | >; | 
|  | 819 |  | 
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 820 | class Ext32Pat <SDNode ext> : Pat < | 
|  | 821 | (i32 (ext i1:$src0)), | 
| Michel Danzer | 5d26fdf | 2014-02-05 09:48:05 +0000 | [diff] [blame] | 822 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src0) | 
|  | 823 | >; | 
|  | 824 |  | 
| Tom Stellard | f16d38c | 2014-02-13 23:34:13 +0000 | [diff] [blame] | 825 | def : Ext32Pat <zext>; | 
|  | 826 | def : Ext32Pat <anyext>; | 
|  | 827 |  | 
| Michel Danzer | 8caa904 | 2013-04-10 17:17:56 +0000 | [diff] [blame] | 828 | // The multiplication scales from [0,1] to the unsigned integer range | 
|  | 829 | def : Pat < | 
|  | 830 | (AMDGPUurecip i32:$src0), | 
|  | 831 | (V_CVT_U32_F32_e32 | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 832 | (V_MUL_F32_e32 (i32 CONST.FP_UINT_MAX_PLUS_1), | 
| Michel Danzer | 8caa904 | 2013-04-10 17:17:56 +0000 | [diff] [blame] | 833 | (V_RCP_IFLAG_F32_e32 (V_CVT_F32_U32_e32 $src0)))) | 
|  | 834 | >; | 
|  | 835 |  | 
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 836 | //===----------------------------------------------------------------------===// | 
|  | 837 | // VOP3 Patterns | 
|  | 838 | //===----------------------------------------------------------------------===// | 
| Tom Stellard | 75aadc2 | 2012-12-11 21:25:42 +0000 | [diff] [blame] | 839 |  | 
| Matt Arsenault | eb26020 | 2014-05-22 18:00:15 +0000 | [diff] [blame] | 840 | def : IMad24Pat<V_MAD_I32_I24>; | 
|  | 841 | def : UMad24Pat<V_MAD_U32_U24>; | 
|  | 842 |  | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 843 | defm : BFIPatterns <V_BFI_B32, S_MOV_B32, SReg_64>; | 
| Tom Stellard | 0289ff4 | 2014-05-16 20:56:44 +0000 | [diff] [blame] | 844 | def : ROTRPattern <V_ALIGNBIT_B32>; | 
|  | 845 |  | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 846 | /********** ====================== **********/ | 
| Simon Pilgrim | e995a808 | 2016-11-18 11:04:02 +0000 | [diff] [blame] | 847 | /**********   Indirect addressing  **********/ | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 848 | /********** ====================== **********/ | 
|  | 849 |  | 
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 850 | multiclass SI_INDIRECT_Pattern <ValueType vt, ValueType eltvt, string VecSize> { | 
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 851 | // Extract with offset | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 852 | def : Pat< | 
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 853 | (eltvt (extractelt vt:$src, (MOVRELOffset i32:$idx, (i32 imm:$offset)))), | 
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 854 | (!cast<Instruction>("SI_INDIRECT_SRC_"#VecSize) $src, $idx, imm:$offset) | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 855 | >; | 
|  | 856 |  | 
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 857 | // Insert with offset | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 858 | def : Pat< | 
| Nicolai Haehnle | 7968c34 | 2016-07-12 08:12:16 +0000 | [diff] [blame] | 859 | (insertelt vt:$src, eltvt:$val, (MOVRELOffset i32:$idx, (i32 imm:$offset))), | 
| Matt Arsenault | 1322b6f | 2016-07-09 01:13:56 +0000 | [diff] [blame] | 860 | (!cast<Instruction>("SI_INDIRECT_DST_"#VecSize) $src, $idx, imm:$offset, $val) | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 861 | >; | 
|  | 862 | } | 
|  | 863 |  | 
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 864 | defm : SI_INDIRECT_Pattern <v2f32, f32, "V2">; | 
|  | 865 | defm : SI_INDIRECT_Pattern <v4f32, f32, "V4">; | 
|  | 866 | defm : SI_INDIRECT_Pattern <v8f32, f32, "V8">; | 
|  | 867 | defm : SI_INDIRECT_Pattern <v16f32, f32, "V16">; | 
| Matt Arsenault | f5958dd | 2014-02-02 00:05:35 +0000 | [diff] [blame] | 868 |  | 
| Matt Arsenault | 2841927 | 2015-10-07 00:42:51 +0000 | [diff] [blame] | 869 | defm : SI_INDIRECT_Pattern <v2i32, i32, "V2">; | 
|  | 870 | defm : SI_INDIRECT_Pattern <v4i32, i32, "V4">; | 
|  | 871 | defm : SI_INDIRECT_Pattern <v8i32, i32, "V8">; | 
|  | 872 | defm : SI_INDIRECT_Pattern <v16i32, i32, "V16">; | 
| Christian Konig | 2989ffc | 2013-03-18 11:34:16 +0000 | [diff] [blame] | 873 |  | 
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 874 | //===----------------------------------------------------------------------===// | 
| Wei Ding | 1041a64 | 2016-08-24 14:59:47 +0000 | [diff] [blame] | 875 | // SAD Patterns | 
|  | 876 | //===----------------------------------------------------------------------===// | 
|  | 877 |  | 
|  | 878 | def : Pat < | 
|  | 879 | (add (sub_oneuse (umax i32:$src0, i32:$src1), | 
|  | 880 | (umin i32:$src0, i32:$src1)), | 
|  | 881 | i32:$src2), | 
|  | 882 | (V_SAD_U32 $src0, $src1, $src2) | 
|  | 883 | >; | 
|  | 884 |  | 
|  | 885 | def : Pat < | 
|  | 886 | (add (select_oneuse (i1 (setugt i32:$src0, i32:$src1)), | 
|  | 887 | (sub i32:$src0, i32:$src1), | 
|  | 888 | (sub i32:$src1, i32:$src0)), | 
|  | 889 | i32:$src2), | 
|  | 890 | (V_SAD_U32 $src0, $src1, $src2) | 
|  | 891 | >; | 
|  | 892 |  | 
|  | 893 | //===----------------------------------------------------------------------===// | 
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 894 | // Conversion Patterns | 
|  | 895 | //===----------------------------------------------------------------------===// | 
|  | 896 |  | 
|  | 897 | def : Pat<(i32 (sext_inreg i32:$src, i1)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 898 | (S_BFE_I32 i32:$src, (i32 65536))>; // 0 | 1 << 16 | 
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 899 |  | 
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 900 | // Handle sext_inreg in i64 | 
|  | 901 | def : Pat < | 
|  | 902 | (i64 (sext_inreg i64:$src, i1)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 903 | (S_BFE_I64 i64:$src, (i32 0x10000)) // 0 | 1 << 16 | 
|  | 904 | >; | 
|  | 905 |  | 
|  | 906 | def : Pat < | 
| Matt Arsenault | 682eb43 | 2017-01-11 22:35:22 +0000 | [diff] [blame] | 907 | (i16 (sext_inreg i16:$src, i1)), | 
|  | 908 | (S_BFE_I32 $src, (i32 0x00010000)) // 0 | 1 << 16 | 
|  | 909 | >; | 
|  | 910 |  | 
|  | 911 | def : Pat < | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 912 | (i16 (sext_inreg i16:$src, i8)), | 
|  | 913 | (S_BFE_I32 $src, (i32 0x80000)) // 0 | 8 << 16 | 
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 914 | >; | 
|  | 915 |  | 
|  | 916 | def : Pat < | 
|  | 917 | (i64 (sext_inreg i64:$src, i8)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 918 | (S_BFE_I64 i64:$src, (i32 0x80000)) // 0 | 8 << 16 | 
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 919 | >; | 
|  | 920 |  | 
|  | 921 | def : Pat < | 
|  | 922 | (i64 (sext_inreg i64:$src, i16)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 923 | (S_BFE_I64 i64:$src, (i32 0x100000)) // 0 | 16 << 16 | 
| Matt Arsenault | 9481221 | 2014-11-14 18:18:16 +0000 | [diff] [blame] | 924 | >; | 
|  | 925 |  | 
|  | 926 | def : Pat < | 
|  | 927 | (i64 (sext_inreg i64:$src, i32)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 928 | (S_BFE_I64 i64:$src, (i32 0x200000)) // 0 | 32 << 16 | 
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 929 | >; | 
|  | 930 |  | 
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 931 | def : Pat < | 
|  | 932 | (i64 (zext i32:$src)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 933 | (REG_SEQUENCE SReg_64, $src, sub0, (S_MOV_B32 (i32 0)), sub1) | 
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 934 | >; | 
|  | 935 |  | 
| Matt Arsenault | c6b69a9 | 2016-07-26 23:06:33 +0000 | [diff] [blame] | 936 | def : Pat < | 
|  | 937 | (i64 (anyext i32:$src)), | 
|  | 938 | (REG_SEQUENCE SReg_64, $src, sub0, (i32 (IMPLICIT_DEF)), sub1) | 
|  | 939 | >; | 
|  | 940 |  | 
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 941 | class ZExt_i64_i1_Pat <SDNode ext> : Pat < | 
|  | 942 | (i64 (ext i1:$src)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 943 | (REG_SEQUENCE VReg_64, | 
|  | 944 | (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src), sub0, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 945 | (S_MOV_B32 (i32 0)), sub1) | 
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 946 | >; | 
|  | 947 |  | 
|  | 948 |  | 
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 949 | def : ZExt_i64_i1_Pat<zext>; | 
|  | 950 | def : ZExt_i64_i1_Pat<anyext>; | 
|  | 951 |  | 
| Tom Stellard | bc4497b | 2016-02-12 23:45:29 +0000 | [diff] [blame] | 952 | // FIXME: We need to use COPY_TO_REGCLASS to work-around the fact that | 
|  | 953 | // REG_SEQUENCE patterns don't support instructions with multiple outputs. | 
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 954 | def : Pat < | 
|  | 955 | (i64 (sext i32:$src)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 956 | (REG_SEQUENCE SReg_64, $src, sub0, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 957 | (i32 (COPY_TO_REGCLASS (S_ASHR_I32 $src, (i32 31)), SReg_32_XM0)), sub1) | 
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 958 | >; | 
|  | 959 |  | 
|  | 960 | def : Pat < | 
|  | 961 | (i64 (sext i1:$src)), | 
| Matt Arsenault | 7d858d8 | 2014-11-02 23:46:54 +0000 | [diff] [blame] | 962 | (REG_SEQUENCE VReg_64, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 963 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src), sub0, | 
|  | 964 | (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src), sub1) | 
| Matt Arsenault | b2cbf79 | 2014-06-10 18:54:59 +0000 | [diff] [blame] | 965 | >; | 
|  | 966 |  | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 967 | class FPToI1Pat<Instruction Inst, int KOne, ValueType kone_type, ValueType vt, SDPatternOperator fp_to_int> : Pat < | 
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 968 | (i1 (fp_to_int (vt (VOP3Mods vt:$src0, i32:$src0_modifiers)))), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 969 | (i1 (Inst 0, (kone_type KOne), $src0_modifiers, $src0, DSTCLAMP.NONE, DSTOMOD.NONE)) | 
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 970 | >; | 
|  | 971 |  | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 972 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_ONE, i32, f32, fp_to_uint>; | 
|  | 973 | def : FPToI1Pat<V_CMP_EQ_F32_e64, CONST.FP32_NEG_ONE, i32, f32, fp_to_sint>; | 
|  | 974 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_ONE, i64, f64, fp_to_uint>; | 
|  | 975 | def : FPToI1Pat<V_CMP_EQ_F64_e64, CONST.FP64_NEG_ONE, i64, f64, fp_to_sint>; | 
| Matt Arsenault | 7fb961f | 2016-07-22 17:01:21 +0000 | [diff] [blame] | 976 |  | 
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 977 | // If we need to perform a logical operation on i1 values, we need to | 
|  | 978 | // use vector comparisons since there is only one SCC register. Vector | 
| Simon Pilgrim | e995a808 | 2016-11-18 11:04:02 +0000 | [diff] [blame] | 979 | // comparisons still write to a pair of SGPRs, so treat these as | 
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 980 | // 64-bit comparisons. When legalizing SGPR copies, instructions | 
|  | 981 | // resulting in the copies from SCC to these instructions will be | 
|  | 982 | // moved to the VALU. | 
|  | 983 | def : Pat < | 
|  | 984 | (i1 (and i1:$src0, i1:$src1)), | 
|  | 985 | (S_AND_B64 $src0, $src1) | 
|  | 986 | >; | 
|  | 987 |  | 
|  | 988 | def : Pat < | 
|  | 989 | (i1 (or i1:$src0, i1:$src1)), | 
|  | 990 | (S_OR_B64 $src0, $src1) | 
|  | 991 | >; | 
|  | 992 |  | 
|  | 993 | def : Pat < | 
|  | 994 | (i1 (xor i1:$src0, i1:$src1)), | 
|  | 995 | (S_XOR_B64 $src0, $src1) | 
|  | 996 | >; | 
|  | 997 |  | 
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 998 | def : Pat < | 
|  | 999 | (f32 (sint_to_fp i1:$src)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1000 | (V_CNDMASK_B32_e64 (i32 0), (i32 CONST.FP32_NEG_ONE), $src) | 
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1001 | >; | 
|  | 1002 |  | 
|  | 1003 | def : Pat < | 
|  | 1004 | (f32 (uint_to_fp i1:$src)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1005 | (V_CNDMASK_B32_e64 (i32 0), (i32 CONST.FP32_ONE), $src) | 
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1006 | >; | 
|  | 1007 |  | 
|  | 1008 | def : Pat < | 
|  | 1009 | (f64 (sint_to_fp i1:$src)), | 
| Matt Arsenault | becd656 | 2014-12-03 05:22:35 +0000 | [diff] [blame] | 1010 | (V_CVT_F64_I32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 -1), $src)) | 
| Matt Arsenault | aeca2fa | 2014-05-31 06:47:42 +0000 | [diff] [blame] | 1011 | >; | 
|  | 1012 |  | 
|  | 1013 | def : Pat < | 
|  | 1014 | (f64 (uint_to_fp i1:$src)), | 
|  | 1015 | (V_CVT_F64_U32_e32 (V_CNDMASK_B32_e64 (i32 0), (i32 1), $src)) | 
|  | 1016 | >; | 
|  | 1017 |  | 
| Matt Arsenault | 5dbd5db | 2014-04-22 03:49:30 +0000 | [diff] [blame] | 1018 | //===----------------------------------------------------------------------===// | 
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 1019 | // Miscellaneous Patterns | 
|  | 1020 | //===----------------------------------------------------------------------===// | 
|  | 1021 |  | 
|  | 1022 | def : Pat < | 
| Tom Stellard | 81d871d | 2013-11-13 23:36:50 +0000 | [diff] [blame] | 1023 | (i32 (trunc i64:$a)), | 
|  | 1024 | (EXTRACT_SUBREG $a, sub0) | 
|  | 1025 | >; | 
|  | 1026 |  | 
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 1027 | def : Pat < | 
|  | 1028 | (i1 (trunc i32:$a)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1029 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), $a), (i32 1)) | 
| Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 1030 | >; | 
|  | 1031 |  | 
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1032 | def : Pat < | 
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 1033 | (i1 (trunc i64:$a)), | 
| Matt Arsenault | 5d8eb25 | 2016-09-30 01:50:20 +0000 | [diff] [blame] | 1034 | (V_CMP_EQ_U32_e64 (S_AND_B32 (i32 1), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1035 | (i32 (EXTRACT_SUBREG $a, sub0))), (i32 1)) | 
| Matt Arsenault | abd271b | 2015-02-05 06:05:13 +0000 | [diff] [blame] | 1036 | >; | 
|  | 1037 |  | 
|  | 1038 | def : Pat < | 
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1039 | (i32 (bswap i32:$a)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1040 | (V_BFI_B32 (S_MOV_B32 (i32 0x00ff00ff)), | 
|  | 1041 | (V_ALIGNBIT_B32 $a, $a, (i32 24)), | 
|  | 1042 | (V_ALIGNBIT_B32 $a, $a, (i32 8))) | 
| Matt Arsenault | e306a32 | 2014-10-21 16:25:08 +0000 | [diff] [blame] | 1043 | >; | 
|  | 1044 |  | 
| Marek Olsak | 63a7b08 | 2015-03-24 13:40:21 +0000 | [diff] [blame] | 1045 | multiclass BFMPatterns <ValueType vt, InstSI BFM, InstSI MOV> { | 
|  | 1046 | def : Pat < | 
|  | 1047 | (vt (shl (vt (add (vt (shl 1, vt:$a)), -1)), vt:$b)), | 
|  | 1048 | (BFM $a, $b) | 
|  | 1049 | >; | 
|  | 1050 |  | 
|  | 1051 | def : Pat < | 
|  | 1052 | (vt (add (vt (shl 1, vt:$a)), -1)), | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1053 | (BFM $a, (MOV (i32 0))) | 
| Marek Olsak | 63a7b08 | 2015-03-24 13:40:21 +0000 | [diff] [blame] | 1054 | >; | 
|  | 1055 | } | 
|  | 1056 |  | 
|  | 1057 | defm : BFMPatterns <i32, S_BFM_B32, S_MOV_B32>; | 
|  | 1058 | // FIXME: defm : BFMPatterns <i64, S_BFM_B64, S_MOV_B64>; | 
|  | 1059 |  | 
| Marek Olsak | 949f5da | 2015-03-24 13:40:34 +0000 | [diff] [blame] | 1060 | def : BFEPattern <V_BFE_U32, S_MOV_B32>; | 
|  | 1061 |  | 
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1062 | def : Pat< | 
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame^] | 1063 | (fcanonicalize (f16 (VOP3Mods f16:$src, i32:$src_mods))), | 
|  | 1064 | (V_MUL_F16_e64 0, (i32 CONST.FP16_ONE), $src_mods, $src, 0, 0) | 
| Matt Arsenault | ce84130 | 2016-12-22 03:05:37 +0000 | [diff] [blame] | 1065 | >; | 
|  | 1066 |  | 
|  | 1067 | def : Pat< | 
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame^] | 1068 | (fcanonicalize (f32 (VOP3Mods f32:$src, i32:$src_mods))), | 
|  | 1069 | (V_MUL_F32_e64 0, (i32 CONST.FP32_ONE), $src_mods, $src, 0, 0) | 
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1070 | >; | 
|  | 1071 |  | 
|  | 1072 | def : Pat< | 
| Matt Arsenault | d5d7851 | 2017-01-31 17:28:40 +0000 | [diff] [blame^] | 1073 | (fcanonicalize (f64 (VOP3Mods f64:$src, i32:$src_mods))), | 
|  | 1074 | (V_MUL_F64 0, CONST.FP64_ONE, $src_mods, $src, 0, 0) | 
| Matt Arsenault | 9cd9071 | 2016-04-14 01:42:16 +0000 | [diff] [blame] | 1075 | >; | 
|  | 1076 |  | 
| Matt Arsenault | 4165efd | 2017-01-17 07:26:53 +0000 | [diff] [blame] | 1077 | // Allow integer inputs | 
|  | 1078 | class ExpPattern<SDPatternOperator node, ValueType vt, Instruction Inst> : Pat< | 
|  | 1079 | (node (i8 timm:$tgt), (i8 timm:$en), vt:$src0, vt:$src1, vt:$src2, vt:$src3, (i1 timm:$compr), (i1 timm:$vm)), | 
|  | 1080 | (Inst i8:$tgt, vt:$src0, vt:$src1, vt:$src2, vt:$src3, i1:$vm, i1:$compr, i8:$en) | 
|  | 1081 | >; | 
|  | 1082 |  | 
|  | 1083 | def : ExpPattern<AMDGPUexport, i32, EXP>; | 
|  | 1084 | def : ExpPattern<AMDGPUexport_done, i32, EXP_DONE>; | 
|  | 1085 |  | 
| Marek Olsak | 43650e4 | 2015-03-24 13:40:08 +0000 | [diff] [blame] | 1086 | //===----------------------------------------------------------------------===// | 
|  | 1087 | // Fract Patterns | 
|  | 1088 | //===----------------------------------------------------------------------===// | 
|  | 1089 |  | 
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1090 | let Predicates = [isSI] in { | 
|  | 1091 |  | 
|  | 1092 | // V_FRACT is buggy on SI, so the F32 version is never used and (x-floor(x)) is | 
|  | 1093 | // used instead. However, SI doesn't have V_FLOOR_F64, so the most efficient | 
|  | 1094 | // way to implement it is using V_FRACT_F64. | 
|  | 1095 | // The workaround for the V_FRACT bug is: | 
|  | 1096 | //    fract(x) = isnan(x) ? x : min(V_FRACT(x), 0.99999999999999999) | 
|  | 1097 |  | 
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1098 | // Convert floor(x) to (x - fract(x)) | 
|  | 1099 | def : Pat < | 
|  | 1100 | (f64 (ffloor (f64 (VOP3Mods f64:$x, i32:$mods)))), | 
|  | 1101 | (V_ADD_F64 | 
|  | 1102 | $mods, | 
|  | 1103 | $x, | 
|  | 1104 | SRCMODS.NEG, | 
|  | 1105 | (V_CNDMASK_B64_PSEUDO | 
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1106 | (V_MIN_F64 | 
|  | 1107 | SRCMODS.NONE, | 
|  | 1108 | (V_FRACT_F64_e64 $mods, $x, DSTCLAMP.NONE, DSTOMOD.NONE), | 
|  | 1109 | SRCMODS.NONE, | 
|  | 1110 | (V_MOV_B64_PSEUDO 0x3fefffffffffffff), | 
|  | 1111 | DSTCLAMP.NONE, DSTOMOD.NONE), | 
| Marek Olsak | 1354b87 | 2015-07-27 11:37:42 +0000 | [diff] [blame] | 1112 | $x, | 
| Tom Stellard | 115a615 | 2016-11-10 16:02:37 +0000 | [diff] [blame] | 1113 | (V_CMP_CLASS_F64_e64 SRCMODS.NONE, $x, (i32 3 /*NaN*/))), | 
| Marek Olsak | 7d77728 | 2015-03-24 13:40:15 +0000 | [diff] [blame] | 1114 | DSTCLAMP.NONE, DSTOMOD.NONE) | 
|  | 1115 | >; | 
|  | 1116 |  | 
|  | 1117 | } // End Predicates = [isSI] | 
|  | 1118 |  | 
| Tom Stellard | fb96169 | 2013-10-23 00:44:19 +0000 | [diff] [blame] | 1119 | //============================================================================// | 
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 1120 | // Miscellaneous Optimization Patterns | 
|  | 1121 | //============================================================================// | 
|  | 1122 |  | 
| Matt Arsenault | 49dd428 | 2014-09-15 17:15:02 +0000 | [diff] [blame] | 1123 | def : SHA256MaPattern <V_BFI_B32, V_XOR_B32_e64>; | 
| Tom Stellard | eac65dd | 2013-05-03 17:21:20 +0000 | [diff] [blame] | 1124 |  | 
| Matt Arsenault | c89f291 | 2016-03-07 21:54:48 +0000 | [diff] [blame] | 1125 | def : IntMed3Pat<V_MED3_I32, smax, smax_oneuse, smin_oneuse>; | 
|  | 1126 | def : IntMed3Pat<V_MED3_U32, umax, umax_oneuse, umin_oneuse>; | 
|  | 1127 |  | 
| Matt Arsenault | f84e5d9 | 2017-01-31 03:07:46 +0000 | [diff] [blame] | 1128 | // This matches 16 permutations of | 
|  | 1129 | // max(min(x, y), min(max(x, y), z)) | 
|  | 1130 | class FPMed3Pat<ValueType vt, | 
|  | 1131 | Instruction med3Inst> : Pat< | 
|  | 1132 | (fmaxnum (fminnum_oneuse (VOP3Mods_nnan vt:$src0, i32:$src0_mods), | 
|  | 1133 | (VOP3Mods_nnan vt:$src1, i32:$src1_mods)), | 
|  | 1134 | (fminnum_oneuse (fmaxnum_oneuse (VOP3Mods_nnan vt:$src0, i32:$src0_mods), | 
|  | 1135 | (VOP3Mods_nnan vt:$src1, i32:$src1_mods)), | 
|  | 1136 | (vt (VOP3Mods_nnan vt:$src2, i32:$src2_mods)))), | 
|  | 1137 | (med3Inst $src0_mods, $src0, $src1_mods, $src1, $src2_mods, $src2, DSTCLAMP.NONE, DSTOMOD.NONE) | 
|  | 1138 | >; | 
|  | 1139 |  | 
|  | 1140 | def : FPMed3Pat<f32, V_MED3_F32>; | 
|  | 1141 |  | 
| Matt Arsenault | af63524 | 2017-01-30 19:30:24 +0000 | [diff] [blame] | 1142 |  | 
|  | 1143 | // Undo sub x, c -> add x, -c canonicalization since c is more likely | 
|  | 1144 | // an inline immediate than -c. | 
|  | 1145 | // TODO: Also do for 64-bit. | 
|  | 1146 | def : Pat< | 
|  | 1147 | (add i32:$src0, (i32 NegSubInlineConst32:$src1)), | 
|  | 1148 | (S_SUB_I32 $src0, NegSubInlineConst32:$src1) | 
|  | 1149 | >; | 
|  | 1150 |  | 
| Tom Stellard | 245c15f | 2015-05-26 15:55:52 +0000 | [diff] [blame] | 1151 | //============================================================================// | 
|  | 1152 | // Assembler aliases | 
|  | 1153 | //============================================================================// | 
|  | 1154 |  | 
|  | 1155 | def : MnemonicAlias<"v_add_u32", "v_add_i32">; | 
|  | 1156 | def : MnemonicAlias<"v_sub_u32", "v_sub_i32">; | 
|  | 1157 | def : MnemonicAlias<"v_subrev_u32", "v_subrev_i32">; | 
|  | 1158 |  | 
| Marek Olsak | 5df00d6 | 2014-12-07 12:18:57 +0000 | [diff] [blame] | 1159 | } // End isGCN predicate |