| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -march=arm64 -verify-machineinstrs | FileCheck %s |
| 2 | ; RUN: llc < %s -march=arm64 -aarch64-unscaled-mem-op=true\ |
| 3 | ; RUN: -verify-machineinstrs | FileCheck -check-prefix=LDUR_CHK %s |
| 4 | |
| 5 | ; CHECK: ldp_int |
| 6 | ; CHECK: ldp |
| 7 | define i32 @ldp_int(i32* %p) nounwind { |
| 8 | %tmp = load i32* %p, align 4 |
| 9 | %add.ptr = getelementptr inbounds i32* %p, i64 1 |
| 10 | %tmp1 = load i32* %add.ptr, align 4 |
| 11 | %add = add nsw i32 %tmp1, %tmp |
| 12 | ret i32 %add |
| 13 | } |
| 14 | |
| Quentin Colombet | 29f5533 | 2015-01-24 01:25:54 +0000 | [diff] [blame] | 15 | ; CHECK: ldp_sext_int |
| 16 | ; CHECK: ldpsw |
| 17 | define i64 @ldp_sext_int(i32* %p) nounwind { |
| 18 | %tmp = load i32* %p, align 4 |
| 19 | %add.ptr = getelementptr inbounds i32* %p, i64 1 |
| 20 | %tmp1 = load i32* %add.ptr, align 4 |
| 21 | %sexttmp = sext i32 %tmp to i64 |
| 22 | %sexttmp1 = sext i32 %tmp1 to i64 |
| 23 | %add = add nsw i64 %sexttmp1, %sexttmp |
| 24 | ret i64 %add |
| 25 | } |
| 26 | |
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 27 | ; CHECK: ldp_long |
| 28 | ; CHECK: ldp |
| 29 | define i64 @ldp_long(i64* %p) nounwind { |
| 30 | %tmp = load i64* %p, align 8 |
| 31 | %add.ptr = getelementptr inbounds i64* %p, i64 1 |
| 32 | %tmp1 = load i64* %add.ptr, align 8 |
| 33 | %add = add nsw i64 %tmp1, %tmp |
| 34 | ret i64 %add |
| 35 | } |
| 36 | |
| 37 | ; CHECK: ldp_float |
| 38 | ; CHECK: ldp |
| 39 | define float @ldp_float(float* %p) nounwind { |
| 40 | %tmp = load float* %p, align 4 |
| 41 | %add.ptr = getelementptr inbounds float* %p, i64 1 |
| 42 | %tmp1 = load float* %add.ptr, align 4 |
| 43 | %add = fadd float %tmp, %tmp1 |
| 44 | ret float %add |
| 45 | } |
| 46 | |
| 47 | ; CHECK: ldp_double |
| 48 | ; CHECK: ldp |
| 49 | define double @ldp_double(double* %p) nounwind { |
| 50 | %tmp = load double* %p, align 8 |
| 51 | %add.ptr = getelementptr inbounds double* %p, i64 1 |
| 52 | %tmp1 = load double* %add.ptr, align 8 |
| 53 | %add = fadd double %tmp, %tmp1 |
| 54 | ret double %add |
| 55 | } |
| 56 | |
| 57 | ; Test the load/store optimizer---combine ldurs into a ldp, if appropriate |
| 58 | define i32 @ldur_int(i32* %a) nounwind { |
| 59 | ; LDUR_CHK: ldur_int |
| 60 | ; LDUR_CHK: ldp [[DST1:w[0-9]+]], [[DST2:w[0-9]+]], [x0, #-8] |
| 61 | ; LDUR_CHK-NEXT: add w{{[0-9]+}}, [[DST2]], [[DST1]] |
| 62 | ; LDUR_CHK-NEXT: ret |
| 63 | %p1 = getelementptr inbounds i32* %a, i32 -1 |
| 64 | %tmp1 = load i32* %p1, align 2 |
| 65 | %p2 = getelementptr inbounds i32* %a, i32 -2 |
| 66 | %tmp2 = load i32* %p2, align 2 |
| 67 | %tmp3 = add i32 %tmp1, %tmp2 |
| 68 | ret i32 %tmp3 |
| 69 | } |
| 70 | |
| Quentin Colombet | 29f5533 | 2015-01-24 01:25:54 +0000 | [diff] [blame] | 71 | define i64 @ldur_sext_int(i32* %a) nounwind { |
| 72 | ; LDUR_CHK: ldur_sext_int |
| 73 | ; LDUR_CHK: ldpsw [[DST1:x[0-9]+]], [[DST2:x[0-9]+]], [x0, #-8] |
| 74 | ; LDUR_CHK-NEXT: add x{{[0-9]+}}, [[DST2]], [[DST1]] |
| 75 | ; LDUR_CHK-NEXT: ret |
| 76 | %p1 = getelementptr inbounds i32* %a, i32 -1 |
| 77 | %tmp1 = load i32* %p1, align 2 |
| 78 | %p2 = getelementptr inbounds i32* %a, i32 -2 |
| 79 | %tmp2 = load i32* %p2, align 2 |
| 80 | %sexttmp1 = sext i32 %tmp1 to i64 |
| 81 | %sexttmp2 = sext i32 %tmp2 to i64 |
| 82 | %tmp3 = add i64 %sexttmp1, %sexttmp2 |
| 83 | ret i64 %tmp3 |
| 84 | } |
| 85 | |
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 86 | define i64 @ldur_long(i64* %a) nounwind ssp { |
| 87 | ; LDUR_CHK: ldur_long |
| 88 | ; LDUR_CHK: ldp [[DST1:x[0-9]+]], [[DST2:x[0-9]+]], [x0, #-16] |
| 89 | ; LDUR_CHK-NEXT: add x{{[0-9]+}}, [[DST2]], [[DST1]] |
| 90 | ; LDUR_CHK-NEXT: ret |
| 91 | %p1 = getelementptr inbounds i64* %a, i64 -1 |
| 92 | %tmp1 = load i64* %p1, align 2 |
| 93 | %p2 = getelementptr inbounds i64* %a, i64 -2 |
| 94 | %tmp2 = load i64* %p2, align 2 |
| 95 | %tmp3 = add i64 %tmp1, %tmp2 |
| 96 | ret i64 %tmp3 |
| 97 | } |
| 98 | |
| 99 | define float @ldur_float(float* %a) { |
| 100 | ; LDUR_CHK: ldur_float |
| 101 | ; LDUR_CHK: ldp [[DST1:s[0-9]+]], [[DST2:s[0-9]+]], [x0, #-8] |
| 102 | ; LDUR_CHK-NEXT: add s{{[0-9]+}}, [[DST2]], [[DST1]] |
| 103 | ; LDUR_CHK-NEXT: ret |
| 104 | %p1 = getelementptr inbounds float* %a, i64 -1 |
| 105 | %tmp1 = load float* %p1, align 2 |
| 106 | %p2 = getelementptr inbounds float* %a, i64 -2 |
| 107 | %tmp2 = load float* %p2, align 2 |
| 108 | %tmp3 = fadd float %tmp1, %tmp2 |
| 109 | ret float %tmp3 |
| 110 | } |
| 111 | |
| 112 | define double @ldur_double(double* %a) { |
| 113 | ; LDUR_CHK: ldur_double |
| 114 | ; LDUR_CHK: ldp [[DST1:d[0-9]+]], [[DST2:d[0-9]+]], [x0, #-16] |
| 115 | ; LDUR_CHK-NEXT: add d{{[0-9]+}}, [[DST2]], [[DST1]] |
| 116 | ; LDUR_CHK-NEXT: ret |
| 117 | %p1 = getelementptr inbounds double* %a, i64 -1 |
| 118 | %tmp1 = load double* %p1, align 2 |
| 119 | %p2 = getelementptr inbounds double* %a, i64 -2 |
| 120 | %tmp2 = load double* %p2, align 2 |
| 121 | %tmp3 = fadd double %tmp1, %tmp2 |
| 122 | ret double %tmp3 |
| 123 | } |
| 124 | |
| 125 | ; Now check some boundary conditions |
| 126 | define i64 @pairUpBarelyIn(i64* %a) nounwind ssp { |
| 127 | ; LDUR_CHK: pairUpBarelyIn |
| 128 | ; LDUR_CHK-NOT: ldur |
| 129 | ; LDUR_CHK: ldp [[DST1:x[0-9]+]], [[DST2:x[0-9]+]], [x0, #-256] |
| 130 | ; LDUR_CHK-NEXT: add x{{[0-9]+}}, [[DST2]], [[DST1]] |
| 131 | ; LDUR_CHK-NEXT: ret |
| 132 | %p1 = getelementptr inbounds i64* %a, i64 -31 |
| 133 | %tmp1 = load i64* %p1, align 2 |
| 134 | %p2 = getelementptr inbounds i64* %a, i64 -32 |
| 135 | %tmp2 = load i64* %p2, align 2 |
| 136 | %tmp3 = add i64 %tmp1, %tmp2 |
| 137 | ret i64 %tmp3 |
| 138 | } |
| 139 | |
| Quentin Colombet | 29f5533 | 2015-01-24 01:25:54 +0000 | [diff] [blame] | 140 | define i64 @pairUpBarelyInSext(i32* %a) nounwind ssp { |
| 141 | ; LDUR_CHK: pairUpBarelyInSext |
| 142 | ; LDUR_CHK-NOT: ldur |
| 143 | ; LDUR_CHK: ldpsw [[DST1:x[0-9]+]], [[DST2:x[0-9]+]], [x0, #-256] |
| 144 | ; LDUR_CHK-NEXT: add x{{[0-9]+}}, [[DST2]], [[DST1]] |
| 145 | ; LDUR_CHK-NEXT: ret |
| 146 | %p1 = getelementptr inbounds i32* %a, i64 -63 |
| 147 | %tmp1 = load i32* %p1, align 2 |
| 148 | %p2 = getelementptr inbounds i32* %a, i64 -64 |
| 149 | %tmp2 = load i32* %p2, align 2 |
| 150 | %sexttmp1 = sext i32 %tmp1 to i64 |
| 151 | %sexttmp2 = sext i32 %tmp2 to i64 |
| 152 | %tmp3 = add i64 %sexttmp1, %sexttmp2 |
| 153 | ret i64 %tmp3 |
| 154 | } |
| 155 | |
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 156 | define i64 @pairUpBarelyOut(i64* %a) nounwind ssp { |
| 157 | ; LDUR_CHK: pairUpBarelyOut |
| 158 | ; LDUR_CHK-NOT: ldp |
| 159 | ; Don't be fragile about which loads or manipulations of the base register |
| 160 | ; are used---just check that there isn't an ldp before the add |
| 161 | ; LDUR_CHK: add |
| 162 | ; LDUR_CHK-NEXT: ret |
| 163 | %p1 = getelementptr inbounds i64* %a, i64 -32 |
| 164 | %tmp1 = load i64* %p1, align 2 |
| 165 | %p2 = getelementptr inbounds i64* %a, i64 -33 |
| 166 | %tmp2 = load i64* %p2, align 2 |
| 167 | %tmp3 = add i64 %tmp1, %tmp2 |
| 168 | ret i64 %tmp3 |
| 169 | } |
| 170 | |
| Quentin Colombet | 29f5533 | 2015-01-24 01:25:54 +0000 | [diff] [blame] | 171 | define i64 @pairUpBarelyOutSext(i32* %a) nounwind ssp { |
| 172 | ; LDUR_CHK: pairUpBarelyOutSext |
| 173 | ; LDUR_CHK-NOT: ldp |
| 174 | ; Don't be fragile about which loads or manipulations of the base register |
| 175 | ; are used---just check that there isn't an ldp before the add |
| 176 | ; LDUR_CHK: add |
| 177 | ; LDUR_CHK-NEXT: ret |
| 178 | %p1 = getelementptr inbounds i32* %a, i64 -64 |
| 179 | %tmp1 = load i32* %p1, align 2 |
| 180 | %p2 = getelementptr inbounds i32* %a, i64 -65 |
| 181 | %tmp2 = load i32* %p2, align 2 |
| 182 | %sexttmp1 = sext i32 %tmp1 to i64 |
| 183 | %sexttmp2 = sext i32 %tmp2 to i64 |
| 184 | %tmp3 = add i64 %sexttmp1, %sexttmp2 |
| 185 | ret i64 %tmp3 |
| 186 | } |
| 187 | |
| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 188 | define i64 @pairUpNotAligned(i64* %a) nounwind ssp { |
| 189 | ; LDUR_CHK: pairUpNotAligned |
| 190 | ; LDUR_CHK-NOT: ldp |
| 191 | ; LDUR_CHK: ldur |
| 192 | ; LDUR_CHK-NEXT: ldur |
| 193 | ; LDUR_CHK-NEXT: add |
| 194 | ; LDUR_CHK-NEXT: ret |
| 195 | %p1 = getelementptr inbounds i64* %a, i64 -18 |
| 196 | %bp1 = bitcast i64* %p1 to i8* |
| 197 | %bp1p1 = getelementptr inbounds i8* %bp1, i64 1 |
| 198 | %dp1 = bitcast i8* %bp1p1 to i64* |
| 199 | %tmp1 = load i64* %dp1, align 1 |
| 200 | |
| 201 | %p2 = getelementptr inbounds i64* %a, i64 -17 |
| 202 | %bp2 = bitcast i64* %p2 to i8* |
| 203 | %bp2p1 = getelementptr inbounds i8* %bp2, i64 1 |
| 204 | %dp2 = bitcast i8* %bp2p1 to i64* |
| 205 | %tmp2 = load i64* %dp2, align 1 |
| 206 | |
| 207 | %tmp3 = add i64 %tmp1, %tmp2 |
| 208 | ret i64 %tmp3 |
| 209 | } |
| Quentin Colombet | 29f5533 | 2015-01-24 01:25:54 +0000 | [diff] [blame] | 210 | |
| 211 | define i64 @pairUpNotAlignedSext(i32* %a) nounwind ssp { |
| 212 | ; LDUR_CHK: pairUpNotAlignedSext |
| 213 | ; LDUR_CHK-NOT: ldp |
| 214 | ; LDUR_CHK: ldursw |
| 215 | ; LDUR_CHK-NEXT: ldursw |
| 216 | ; LDUR_CHK-NEXT: add |
| 217 | ; LDUR_CHK-NEXT: ret |
| 218 | %p1 = getelementptr inbounds i32* %a, i64 -18 |
| 219 | %bp1 = bitcast i32* %p1 to i8* |
| 220 | %bp1p1 = getelementptr inbounds i8* %bp1, i64 1 |
| 221 | %dp1 = bitcast i8* %bp1p1 to i32* |
| 222 | %tmp1 = load i32* %dp1, align 1 |
| 223 | |
| 224 | %p2 = getelementptr inbounds i32* %a, i64 -17 |
| 225 | %bp2 = bitcast i32* %p2 to i8* |
| 226 | %bp2p1 = getelementptr inbounds i8* %bp2, i64 1 |
| 227 | %dp2 = bitcast i8* %bp2p1 to i32* |
| 228 | %tmp2 = load i32* %dp2, align 1 |
| 229 | |
| 230 | %sexttmp1 = sext i32 %tmp1 to i64 |
| 231 | %sexttmp2 = sext i32 %tmp2 to i64 |
| 232 | %tmp3 = add i64 %sexttmp1, %sexttmp2 |
| 233 | ret i64 %tmp3 |
| 234 | } |