| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1 | ; RUN: opt < %s -tsan -S | FileCheck %s | 
|  | 2 | ; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime. | 
|  | 3 | target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128" | 
|  | 4 |  | 
|  | 5 | define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable { | 
|  | 6 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 7 | %0 = load atomic i8* %a unordered, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 8 | ret i8 %0 | 
|  | 9 | } | 
|  | 10 | ; CHECK: atomic8_load_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 11 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 12 |  | 
|  | 13 | define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable { | 
|  | 14 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 15 | %0 = load atomic i8* %a monotonic, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 16 | ret i8 %0 | 
|  | 17 | } | 
|  | 18 | ; CHECK: atomic8_load_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 19 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 20 |  | 
|  | 21 | define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable { | 
|  | 22 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 23 | %0 = load atomic i8* %a acquire, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 24 | ret i8 %0 | 
|  | 25 | } | 
|  | 26 | ; CHECK: atomic8_load_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 27 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 2) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 28 |  | 
|  | 29 | define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable { | 
|  | 30 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 31 | %0 = load atomic i8* %a seq_cst, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 32 | ret i8 %0 | 
|  | 33 | } | 
|  | 34 | ; CHECK: atomic8_load_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 35 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 36 |  | 
|  | 37 | define void @atomic8_store_unordered(i8* %a) nounwind uwtable { | 
|  | 38 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 39 | store atomic i8 0, i8* %a unordered, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 40 | ret void | 
|  | 41 | } | 
|  | 42 | ; CHECK: atomic8_store_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 43 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 44 |  | 
|  | 45 | define void @atomic8_store_monotonic(i8* %a) nounwind uwtable { | 
|  | 46 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 47 | store atomic i8 0, i8* %a monotonic, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 48 | ret void | 
|  | 49 | } | 
|  | 50 | ; CHECK: atomic8_store_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 51 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 52 |  | 
|  | 53 | define void @atomic8_store_release(i8* %a) nounwind uwtable { | 
|  | 54 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 55 | store atomic i8 0, i8* %a release, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 56 | ret void | 
|  | 57 | } | 
|  | 58 | ; CHECK: atomic8_store_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 59 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 3) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 60 |  | 
|  | 61 | define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable { | 
|  | 62 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 63 | store atomic i8 0, i8* %a seq_cst, align 1 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 64 | ret void | 
|  | 65 | } | 
|  | 66 | ; CHECK: atomic8_store_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 67 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 68 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 69 | define void @atomic8_xchg_monotonic(i8* %a) nounwind uwtable { | 
|  | 70 | entry: | 
|  | 71 | atomicrmw xchg i8* %a, i8 0 monotonic | 
|  | 72 | ret void | 
|  | 73 | } | 
|  | 74 | ; CHECK: atomic8_xchg_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 75 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 76 |  | 
|  | 77 | define void @atomic8_add_monotonic(i8* %a) nounwind uwtable { | 
|  | 78 | entry: | 
|  | 79 | atomicrmw add i8* %a, i8 0 monotonic | 
|  | 80 | ret void | 
|  | 81 | } | 
|  | 82 | ; CHECK: atomic8_add_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 83 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 84 |  | 
|  | 85 | define void @atomic8_sub_monotonic(i8* %a) nounwind uwtable { | 
|  | 86 | entry: | 
|  | 87 | atomicrmw sub i8* %a, i8 0 monotonic | 
|  | 88 | ret void | 
|  | 89 | } | 
|  | 90 | ; CHECK: atomic8_sub_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 91 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 92 |  | 
|  | 93 | define void @atomic8_and_monotonic(i8* %a) nounwind uwtable { | 
|  | 94 | entry: | 
|  | 95 | atomicrmw and i8* %a, i8 0 monotonic | 
|  | 96 | ret void | 
|  | 97 | } | 
|  | 98 | ; CHECK: atomic8_and_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 99 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 100 |  | 
|  | 101 | define void @atomic8_or_monotonic(i8* %a) nounwind uwtable { | 
|  | 102 | entry: | 
|  | 103 | atomicrmw or i8* %a, i8 0 monotonic | 
|  | 104 | ret void | 
|  | 105 | } | 
|  | 106 | ; CHECK: atomic8_or_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 107 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 108 |  | 
|  | 109 | define void @atomic8_xor_monotonic(i8* %a) nounwind uwtable { | 
|  | 110 | entry: | 
|  | 111 | atomicrmw xor i8* %a, i8 0 monotonic | 
|  | 112 | ret void | 
|  | 113 | } | 
|  | 114 | ; CHECK: atomic8_xor_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 115 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 116 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 117 | define void @atomic8_nand_monotonic(i8* %a) nounwind uwtable { | 
|  | 118 | entry: | 
|  | 119 | atomicrmw nand i8* %a, i8 0 monotonic | 
|  | 120 | ret void | 
|  | 121 | } | 
|  | 122 | ; CHECK: atomic8_nand_monotonic | 
|  | 123 | ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 0) | 
|  | 124 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 125 | define void @atomic8_xchg_acquire(i8* %a) nounwind uwtable { | 
|  | 126 | entry: | 
|  | 127 | atomicrmw xchg i8* %a, i8 0 acquire | 
|  | 128 | ret void | 
|  | 129 | } | 
|  | 130 | ; CHECK: atomic8_xchg_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 131 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 132 |  | 
|  | 133 | define void @atomic8_add_acquire(i8* %a) nounwind uwtable { | 
|  | 134 | entry: | 
|  | 135 | atomicrmw add i8* %a, i8 0 acquire | 
|  | 136 | ret void | 
|  | 137 | } | 
|  | 138 | ; CHECK: atomic8_add_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 139 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 140 |  | 
|  | 141 | define void @atomic8_sub_acquire(i8* %a) nounwind uwtable { | 
|  | 142 | entry: | 
|  | 143 | atomicrmw sub i8* %a, i8 0 acquire | 
|  | 144 | ret void | 
|  | 145 | } | 
|  | 146 | ; CHECK: atomic8_sub_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 147 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 148 |  | 
|  | 149 | define void @atomic8_and_acquire(i8* %a) nounwind uwtable { | 
|  | 150 | entry: | 
|  | 151 | atomicrmw and i8* %a, i8 0 acquire | 
|  | 152 | ret void | 
|  | 153 | } | 
|  | 154 | ; CHECK: atomic8_and_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 155 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 156 |  | 
|  | 157 | define void @atomic8_or_acquire(i8* %a) nounwind uwtable { | 
|  | 158 | entry: | 
|  | 159 | atomicrmw or i8* %a, i8 0 acquire | 
|  | 160 | ret void | 
|  | 161 | } | 
|  | 162 | ; CHECK: atomic8_or_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 163 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 164 |  | 
|  | 165 | define void @atomic8_xor_acquire(i8* %a) nounwind uwtable { | 
|  | 166 | entry: | 
|  | 167 | atomicrmw xor i8* %a, i8 0 acquire | 
|  | 168 | ret void | 
|  | 169 | } | 
|  | 170 | ; CHECK: atomic8_xor_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 171 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 172 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 173 | define void @atomic8_nand_acquire(i8* %a) nounwind uwtable { | 
|  | 174 | entry: | 
|  | 175 | atomicrmw nand i8* %a, i8 0 acquire | 
|  | 176 | ret void | 
|  | 177 | } | 
|  | 178 | ; CHECK: atomic8_nand_acquire | 
|  | 179 | ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 2) | 
|  | 180 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 181 | define void @atomic8_xchg_release(i8* %a) nounwind uwtable { | 
|  | 182 | entry: | 
|  | 183 | atomicrmw xchg i8* %a, i8 0 release | 
|  | 184 | ret void | 
|  | 185 | } | 
|  | 186 | ; CHECK: atomic8_xchg_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 187 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 188 |  | 
|  | 189 | define void @atomic8_add_release(i8* %a) nounwind uwtable { | 
|  | 190 | entry: | 
|  | 191 | atomicrmw add i8* %a, i8 0 release | 
|  | 192 | ret void | 
|  | 193 | } | 
|  | 194 | ; CHECK: atomic8_add_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 195 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 196 |  | 
|  | 197 | define void @atomic8_sub_release(i8* %a) nounwind uwtable { | 
|  | 198 | entry: | 
|  | 199 | atomicrmw sub i8* %a, i8 0 release | 
|  | 200 | ret void | 
|  | 201 | } | 
|  | 202 | ; CHECK: atomic8_sub_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 203 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 204 |  | 
|  | 205 | define void @atomic8_and_release(i8* %a) nounwind uwtable { | 
|  | 206 | entry: | 
|  | 207 | atomicrmw and i8* %a, i8 0 release | 
|  | 208 | ret void | 
|  | 209 | } | 
|  | 210 | ; CHECK: atomic8_and_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 211 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 212 |  | 
|  | 213 | define void @atomic8_or_release(i8* %a) nounwind uwtable { | 
|  | 214 | entry: | 
|  | 215 | atomicrmw or i8* %a, i8 0 release | 
|  | 216 | ret void | 
|  | 217 | } | 
|  | 218 | ; CHECK: atomic8_or_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 219 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 220 |  | 
|  | 221 | define void @atomic8_xor_release(i8* %a) nounwind uwtable { | 
|  | 222 | entry: | 
|  | 223 | atomicrmw xor i8* %a, i8 0 release | 
|  | 224 | ret void | 
|  | 225 | } | 
|  | 226 | ; CHECK: atomic8_xor_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 227 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 228 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 229 | define void @atomic8_nand_release(i8* %a) nounwind uwtable { | 
|  | 230 | entry: | 
|  | 231 | atomicrmw nand i8* %a, i8 0 release | 
|  | 232 | ret void | 
|  | 233 | } | 
|  | 234 | ; CHECK: atomic8_nand_release | 
|  | 235 | ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 3) | 
|  | 236 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 237 | define void @atomic8_xchg_acq_rel(i8* %a) nounwind uwtable { | 
|  | 238 | entry: | 
|  | 239 | atomicrmw xchg i8* %a, i8 0 acq_rel | 
|  | 240 | ret void | 
|  | 241 | } | 
|  | 242 | ; CHECK: atomic8_xchg_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 243 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 244 |  | 
|  | 245 | define void @atomic8_add_acq_rel(i8* %a) nounwind uwtable { | 
|  | 246 | entry: | 
|  | 247 | atomicrmw add i8* %a, i8 0 acq_rel | 
|  | 248 | ret void | 
|  | 249 | } | 
|  | 250 | ; CHECK: atomic8_add_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 251 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 252 |  | 
|  | 253 | define void @atomic8_sub_acq_rel(i8* %a) nounwind uwtable { | 
|  | 254 | entry: | 
|  | 255 | atomicrmw sub i8* %a, i8 0 acq_rel | 
|  | 256 | ret void | 
|  | 257 | } | 
|  | 258 | ; CHECK: atomic8_sub_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 259 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 260 |  | 
|  | 261 | define void @atomic8_and_acq_rel(i8* %a) nounwind uwtable { | 
|  | 262 | entry: | 
|  | 263 | atomicrmw and i8* %a, i8 0 acq_rel | 
|  | 264 | ret void | 
|  | 265 | } | 
|  | 266 | ; CHECK: atomic8_and_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 267 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 268 |  | 
|  | 269 | define void @atomic8_or_acq_rel(i8* %a) nounwind uwtable { | 
|  | 270 | entry: | 
|  | 271 | atomicrmw or i8* %a, i8 0 acq_rel | 
|  | 272 | ret void | 
|  | 273 | } | 
|  | 274 | ; CHECK: atomic8_or_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 275 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 276 |  | 
|  | 277 | define void @atomic8_xor_acq_rel(i8* %a) nounwind uwtable { | 
|  | 278 | entry: | 
|  | 279 | atomicrmw xor i8* %a, i8 0 acq_rel | 
|  | 280 | ret void | 
|  | 281 | } | 
|  | 282 | ; CHECK: atomic8_xor_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 283 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 284 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 285 | define void @atomic8_nand_acq_rel(i8* %a) nounwind uwtable { | 
|  | 286 | entry: | 
|  | 287 | atomicrmw nand i8* %a, i8 0 acq_rel | 
|  | 288 | ret void | 
|  | 289 | } | 
|  | 290 | ; CHECK: atomic8_nand_acq_rel | 
|  | 291 | ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 4) | 
|  | 292 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 293 | define void @atomic8_xchg_seq_cst(i8* %a) nounwind uwtable { | 
|  | 294 | entry: | 
|  | 295 | atomicrmw xchg i8* %a, i8 0 seq_cst | 
|  | 296 | ret void | 
|  | 297 | } | 
|  | 298 | ; CHECK: atomic8_xchg_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 299 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 300 |  | 
|  | 301 | define void @atomic8_add_seq_cst(i8* %a) nounwind uwtable { | 
|  | 302 | entry: | 
|  | 303 | atomicrmw add i8* %a, i8 0 seq_cst | 
|  | 304 | ret void | 
|  | 305 | } | 
|  | 306 | ; CHECK: atomic8_add_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 307 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 308 |  | 
|  | 309 | define void @atomic8_sub_seq_cst(i8* %a) nounwind uwtable { | 
|  | 310 | entry: | 
|  | 311 | atomicrmw sub i8* %a, i8 0 seq_cst | 
|  | 312 | ret void | 
|  | 313 | } | 
|  | 314 | ; CHECK: atomic8_sub_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 315 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 316 |  | 
|  | 317 | define void @atomic8_and_seq_cst(i8* %a) nounwind uwtable { | 
|  | 318 | entry: | 
|  | 319 | atomicrmw and i8* %a, i8 0 seq_cst | 
|  | 320 | ret void | 
|  | 321 | } | 
|  | 322 | ; CHECK: atomic8_and_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 323 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 324 |  | 
|  | 325 | define void @atomic8_or_seq_cst(i8* %a) nounwind uwtable { | 
|  | 326 | entry: | 
|  | 327 | atomicrmw or i8* %a, i8 0 seq_cst | 
|  | 328 | ret void | 
|  | 329 | } | 
|  | 330 | ; CHECK: atomic8_or_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 331 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 332 |  | 
|  | 333 | define void @atomic8_xor_seq_cst(i8* %a) nounwind uwtable { | 
|  | 334 | entry: | 
|  | 335 | atomicrmw xor i8* %a, i8 0 seq_cst | 
|  | 336 | ret void | 
|  | 337 | } | 
|  | 338 | ; CHECK: atomic8_xor_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 339 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 340 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 341 | define void @atomic8_nand_seq_cst(i8* %a) nounwind uwtable { | 
|  | 342 | entry: | 
|  | 343 | atomicrmw nand i8* %a, i8 0 seq_cst | 
|  | 344 | ret void | 
|  | 345 | } | 
|  | 346 | ; CHECK: atomic8_nand_seq_cst | 
|  | 347 | ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 5) | 
|  | 348 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 349 | define void @atomic8_cas_monotonic(i8* %a) nounwind uwtable { | 
|  | 350 | entry: | 
|  | 351 | cmpxchg i8* %a, i8 0, i8 1 monotonic | 
|  | 352 | ret void | 
|  | 353 | } | 
|  | 354 | ; CHECK: atomic8_cas_monotonic | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 355 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 356 |  | 
|  | 357 | define void @atomic8_cas_acquire(i8* %a) nounwind uwtable { | 
|  | 358 | entry: | 
|  | 359 | cmpxchg i8* %a, i8 0, i8 1 acquire | 
|  | 360 | ret void | 
|  | 361 | } | 
|  | 362 | ; CHECK: atomic8_cas_acquire | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 363 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 2, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 364 |  | 
|  | 365 | define void @atomic8_cas_release(i8* %a) nounwind uwtable { | 
|  | 366 | entry: | 
|  | 367 | cmpxchg i8* %a, i8 0, i8 1 release | 
|  | 368 | ret void | 
|  | 369 | } | 
|  | 370 | ; CHECK: atomic8_cas_release | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 371 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 372 |  | 
|  | 373 | define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable { | 
|  | 374 | entry: | 
|  | 375 | cmpxchg i8* %a, i8 0, i8 1 acq_rel | 
|  | 376 | ret void | 
|  | 377 | } | 
|  | 378 | ; CHECK: atomic8_cas_acq_rel | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 379 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 4, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 380 |  | 
|  | 381 | define void @atomic8_cas_seq_cst(i8* %a) nounwind uwtable { | 
|  | 382 | entry: | 
|  | 383 | cmpxchg i8* %a, i8 0, i8 1 seq_cst | 
|  | 384 | ret void | 
|  | 385 | } | 
|  | 386 | ; CHECK: atomic8_cas_seq_cst | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 387 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 5, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 388 |  | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 389 | define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable { | 
|  | 390 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 391 | %0 = load atomic i16* %a unordered, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 392 | ret i16 %0 | 
|  | 393 | } | 
|  | 394 | ; CHECK: atomic16_load_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 395 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 396 |  | 
|  | 397 | define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable { | 
|  | 398 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 399 | %0 = load atomic i16* %a monotonic, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 400 | ret i16 %0 | 
|  | 401 | } | 
|  | 402 | ; CHECK: atomic16_load_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 403 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 404 |  | 
|  | 405 | define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable { | 
|  | 406 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 407 | %0 = load atomic i16* %a acquire, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 408 | ret i16 %0 | 
|  | 409 | } | 
|  | 410 | ; CHECK: atomic16_load_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 411 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 2) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 412 |  | 
|  | 413 | define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable { | 
|  | 414 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 415 | %0 = load atomic i16* %a seq_cst, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 416 | ret i16 %0 | 
|  | 417 | } | 
|  | 418 | ; CHECK: atomic16_load_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 419 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 420 |  | 
|  | 421 | define void @atomic16_store_unordered(i16* %a) nounwind uwtable { | 
|  | 422 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 423 | store atomic i16 0, i16* %a unordered, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 424 | ret void | 
|  | 425 | } | 
|  | 426 | ; CHECK: atomic16_store_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 427 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 428 |  | 
|  | 429 | define void @atomic16_store_monotonic(i16* %a) nounwind uwtable { | 
|  | 430 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 431 | store atomic i16 0, i16* %a monotonic, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 432 | ret void | 
|  | 433 | } | 
|  | 434 | ; CHECK: atomic16_store_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 435 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 436 |  | 
|  | 437 | define void @atomic16_store_release(i16* %a) nounwind uwtable { | 
|  | 438 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 439 | store atomic i16 0, i16* %a release, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 440 | ret void | 
|  | 441 | } | 
|  | 442 | ; CHECK: atomic16_store_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 443 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 3) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 444 |  | 
|  | 445 | define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable { | 
|  | 446 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 447 | store atomic i16 0, i16* %a seq_cst, align 2 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 448 | ret void | 
|  | 449 | } | 
|  | 450 | ; CHECK: atomic16_store_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 451 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 452 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 453 | define void @atomic16_xchg_monotonic(i16* %a) nounwind uwtable { | 
|  | 454 | entry: | 
|  | 455 | atomicrmw xchg i16* %a, i16 0 monotonic | 
|  | 456 | ret void | 
|  | 457 | } | 
|  | 458 | ; CHECK: atomic16_xchg_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 459 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 460 |  | 
|  | 461 | define void @atomic16_add_monotonic(i16* %a) nounwind uwtable { | 
|  | 462 | entry: | 
|  | 463 | atomicrmw add i16* %a, i16 0 monotonic | 
|  | 464 | ret void | 
|  | 465 | } | 
|  | 466 | ; CHECK: atomic16_add_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 467 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 468 |  | 
|  | 469 | define void @atomic16_sub_monotonic(i16* %a) nounwind uwtable { | 
|  | 470 | entry: | 
|  | 471 | atomicrmw sub i16* %a, i16 0 monotonic | 
|  | 472 | ret void | 
|  | 473 | } | 
|  | 474 | ; CHECK: atomic16_sub_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 475 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 476 |  | 
|  | 477 | define void @atomic16_and_monotonic(i16* %a) nounwind uwtable { | 
|  | 478 | entry: | 
|  | 479 | atomicrmw and i16* %a, i16 0 monotonic | 
|  | 480 | ret void | 
|  | 481 | } | 
|  | 482 | ; CHECK: atomic16_and_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 483 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 484 |  | 
|  | 485 | define void @atomic16_or_monotonic(i16* %a) nounwind uwtable { | 
|  | 486 | entry: | 
|  | 487 | atomicrmw or i16* %a, i16 0 monotonic | 
|  | 488 | ret void | 
|  | 489 | } | 
|  | 490 | ; CHECK: atomic16_or_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 491 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 492 |  | 
|  | 493 | define void @atomic16_xor_monotonic(i16* %a) nounwind uwtable { | 
|  | 494 | entry: | 
|  | 495 | atomicrmw xor i16* %a, i16 0 monotonic | 
|  | 496 | ret void | 
|  | 497 | } | 
|  | 498 | ; CHECK: atomic16_xor_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 499 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 500 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 501 | define void @atomic16_nand_monotonic(i16* %a) nounwind uwtable { | 
|  | 502 | entry: | 
|  | 503 | atomicrmw nand i16* %a, i16 0 monotonic | 
|  | 504 | ret void | 
|  | 505 | } | 
|  | 506 | ; CHECK: atomic16_nand_monotonic | 
|  | 507 | ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 0) | 
|  | 508 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 509 | define void @atomic16_xchg_acquire(i16* %a) nounwind uwtable { | 
|  | 510 | entry: | 
|  | 511 | atomicrmw xchg i16* %a, i16 0 acquire | 
|  | 512 | ret void | 
|  | 513 | } | 
|  | 514 | ; CHECK: atomic16_xchg_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 515 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 516 |  | 
|  | 517 | define void @atomic16_add_acquire(i16* %a) nounwind uwtable { | 
|  | 518 | entry: | 
|  | 519 | atomicrmw add i16* %a, i16 0 acquire | 
|  | 520 | ret void | 
|  | 521 | } | 
|  | 522 | ; CHECK: atomic16_add_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 523 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 524 |  | 
|  | 525 | define void @atomic16_sub_acquire(i16* %a) nounwind uwtable { | 
|  | 526 | entry: | 
|  | 527 | atomicrmw sub i16* %a, i16 0 acquire | 
|  | 528 | ret void | 
|  | 529 | } | 
|  | 530 | ; CHECK: atomic16_sub_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 531 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 532 |  | 
|  | 533 | define void @atomic16_and_acquire(i16* %a) nounwind uwtable { | 
|  | 534 | entry: | 
|  | 535 | atomicrmw and i16* %a, i16 0 acquire | 
|  | 536 | ret void | 
|  | 537 | } | 
|  | 538 | ; CHECK: atomic16_and_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 539 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 540 |  | 
|  | 541 | define void @atomic16_or_acquire(i16* %a) nounwind uwtable { | 
|  | 542 | entry: | 
|  | 543 | atomicrmw or i16* %a, i16 0 acquire | 
|  | 544 | ret void | 
|  | 545 | } | 
|  | 546 | ; CHECK: atomic16_or_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 547 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 548 |  | 
|  | 549 | define void @atomic16_xor_acquire(i16* %a) nounwind uwtable { | 
|  | 550 | entry: | 
|  | 551 | atomicrmw xor i16* %a, i16 0 acquire | 
|  | 552 | ret void | 
|  | 553 | } | 
|  | 554 | ; CHECK: atomic16_xor_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 555 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 556 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 557 | define void @atomic16_nand_acquire(i16* %a) nounwind uwtable { | 
|  | 558 | entry: | 
|  | 559 | atomicrmw nand i16* %a, i16 0 acquire | 
|  | 560 | ret void | 
|  | 561 | } | 
|  | 562 | ; CHECK: atomic16_nand_acquire | 
|  | 563 | ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 2) | 
|  | 564 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 565 | define void @atomic16_xchg_release(i16* %a) nounwind uwtable { | 
|  | 566 | entry: | 
|  | 567 | atomicrmw xchg i16* %a, i16 0 release | 
|  | 568 | ret void | 
|  | 569 | } | 
|  | 570 | ; CHECK: atomic16_xchg_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 571 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 572 |  | 
|  | 573 | define void @atomic16_add_release(i16* %a) nounwind uwtable { | 
|  | 574 | entry: | 
|  | 575 | atomicrmw add i16* %a, i16 0 release | 
|  | 576 | ret void | 
|  | 577 | } | 
|  | 578 | ; CHECK: atomic16_add_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 579 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 580 |  | 
|  | 581 | define void @atomic16_sub_release(i16* %a) nounwind uwtable { | 
|  | 582 | entry: | 
|  | 583 | atomicrmw sub i16* %a, i16 0 release | 
|  | 584 | ret void | 
|  | 585 | } | 
|  | 586 | ; CHECK: atomic16_sub_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 587 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 588 |  | 
|  | 589 | define void @atomic16_and_release(i16* %a) nounwind uwtable { | 
|  | 590 | entry: | 
|  | 591 | atomicrmw and i16* %a, i16 0 release | 
|  | 592 | ret void | 
|  | 593 | } | 
|  | 594 | ; CHECK: atomic16_and_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 595 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 596 |  | 
|  | 597 | define void @atomic16_or_release(i16* %a) nounwind uwtable { | 
|  | 598 | entry: | 
|  | 599 | atomicrmw or i16* %a, i16 0 release | 
|  | 600 | ret void | 
|  | 601 | } | 
|  | 602 | ; CHECK: atomic16_or_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 603 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 604 |  | 
|  | 605 | define void @atomic16_xor_release(i16* %a) nounwind uwtable { | 
|  | 606 | entry: | 
|  | 607 | atomicrmw xor i16* %a, i16 0 release | 
|  | 608 | ret void | 
|  | 609 | } | 
|  | 610 | ; CHECK: atomic16_xor_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 611 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 612 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 613 | define void @atomic16_nand_release(i16* %a) nounwind uwtable { | 
|  | 614 | entry: | 
|  | 615 | atomicrmw nand i16* %a, i16 0 release | 
|  | 616 | ret void | 
|  | 617 | } | 
|  | 618 | ; CHECK: atomic16_nand_release | 
|  | 619 | ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 3) | 
|  | 620 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 621 | define void @atomic16_xchg_acq_rel(i16* %a) nounwind uwtable { | 
|  | 622 | entry: | 
|  | 623 | atomicrmw xchg i16* %a, i16 0 acq_rel | 
|  | 624 | ret void | 
|  | 625 | } | 
|  | 626 | ; CHECK: atomic16_xchg_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 627 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 628 |  | 
|  | 629 | define void @atomic16_add_acq_rel(i16* %a) nounwind uwtable { | 
|  | 630 | entry: | 
|  | 631 | atomicrmw add i16* %a, i16 0 acq_rel | 
|  | 632 | ret void | 
|  | 633 | } | 
|  | 634 | ; CHECK: atomic16_add_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 635 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 636 |  | 
|  | 637 | define void @atomic16_sub_acq_rel(i16* %a) nounwind uwtable { | 
|  | 638 | entry: | 
|  | 639 | atomicrmw sub i16* %a, i16 0 acq_rel | 
|  | 640 | ret void | 
|  | 641 | } | 
|  | 642 | ; CHECK: atomic16_sub_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 643 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 644 |  | 
|  | 645 | define void @atomic16_and_acq_rel(i16* %a) nounwind uwtable { | 
|  | 646 | entry: | 
|  | 647 | atomicrmw and i16* %a, i16 0 acq_rel | 
|  | 648 | ret void | 
|  | 649 | } | 
|  | 650 | ; CHECK: atomic16_and_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 651 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 652 |  | 
|  | 653 | define void @atomic16_or_acq_rel(i16* %a) nounwind uwtable { | 
|  | 654 | entry: | 
|  | 655 | atomicrmw or i16* %a, i16 0 acq_rel | 
|  | 656 | ret void | 
|  | 657 | } | 
|  | 658 | ; CHECK: atomic16_or_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 659 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 660 |  | 
|  | 661 | define void @atomic16_xor_acq_rel(i16* %a) nounwind uwtable { | 
|  | 662 | entry: | 
|  | 663 | atomicrmw xor i16* %a, i16 0 acq_rel | 
|  | 664 | ret void | 
|  | 665 | } | 
|  | 666 | ; CHECK: atomic16_xor_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 667 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 668 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 669 | define void @atomic16_nand_acq_rel(i16* %a) nounwind uwtable { | 
|  | 670 | entry: | 
|  | 671 | atomicrmw nand i16* %a, i16 0 acq_rel | 
|  | 672 | ret void | 
|  | 673 | } | 
|  | 674 | ; CHECK: atomic16_nand_acq_rel | 
|  | 675 | ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 4) | 
|  | 676 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 677 | define void @atomic16_xchg_seq_cst(i16* %a) nounwind uwtable { | 
|  | 678 | entry: | 
|  | 679 | atomicrmw xchg i16* %a, i16 0 seq_cst | 
|  | 680 | ret void | 
|  | 681 | } | 
|  | 682 | ; CHECK: atomic16_xchg_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 683 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 684 |  | 
|  | 685 | define void @atomic16_add_seq_cst(i16* %a) nounwind uwtable { | 
|  | 686 | entry: | 
|  | 687 | atomicrmw add i16* %a, i16 0 seq_cst | 
|  | 688 | ret void | 
|  | 689 | } | 
|  | 690 | ; CHECK: atomic16_add_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 691 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 692 |  | 
|  | 693 | define void @atomic16_sub_seq_cst(i16* %a) nounwind uwtable { | 
|  | 694 | entry: | 
|  | 695 | atomicrmw sub i16* %a, i16 0 seq_cst | 
|  | 696 | ret void | 
|  | 697 | } | 
|  | 698 | ; CHECK: atomic16_sub_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 699 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 700 |  | 
|  | 701 | define void @atomic16_and_seq_cst(i16* %a) nounwind uwtable { | 
|  | 702 | entry: | 
|  | 703 | atomicrmw and i16* %a, i16 0 seq_cst | 
|  | 704 | ret void | 
|  | 705 | } | 
|  | 706 | ; CHECK: atomic16_and_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 707 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 708 |  | 
|  | 709 | define void @atomic16_or_seq_cst(i16* %a) nounwind uwtable { | 
|  | 710 | entry: | 
|  | 711 | atomicrmw or i16* %a, i16 0 seq_cst | 
|  | 712 | ret void | 
|  | 713 | } | 
|  | 714 | ; CHECK: atomic16_or_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 715 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 716 |  | 
|  | 717 | define void @atomic16_xor_seq_cst(i16* %a) nounwind uwtable { | 
|  | 718 | entry: | 
|  | 719 | atomicrmw xor i16* %a, i16 0 seq_cst | 
|  | 720 | ret void | 
|  | 721 | } | 
|  | 722 | ; CHECK: atomic16_xor_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 723 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 724 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 725 | define void @atomic16_nand_seq_cst(i16* %a) nounwind uwtable { | 
|  | 726 | entry: | 
|  | 727 | atomicrmw nand i16* %a, i16 0 seq_cst | 
|  | 728 | ret void | 
|  | 729 | } | 
|  | 730 | ; CHECK: atomic16_nand_seq_cst | 
|  | 731 | ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 5) | 
|  | 732 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 733 | define void @atomic16_cas_monotonic(i16* %a) nounwind uwtable { | 
|  | 734 | entry: | 
|  | 735 | cmpxchg i16* %a, i16 0, i16 1 monotonic | 
|  | 736 | ret void | 
|  | 737 | } | 
|  | 738 | ; CHECK: atomic16_cas_monotonic | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 739 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 740 |  | 
|  | 741 | define void @atomic16_cas_acquire(i16* %a) nounwind uwtable { | 
|  | 742 | entry: | 
|  | 743 | cmpxchg i16* %a, i16 0, i16 1 acquire | 
|  | 744 | ret void | 
|  | 745 | } | 
|  | 746 | ; CHECK: atomic16_cas_acquire | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 747 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 2, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 748 |  | 
|  | 749 | define void @atomic16_cas_release(i16* %a) nounwind uwtable { | 
|  | 750 | entry: | 
|  | 751 | cmpxchg i16* %a, i16 0, i16 1 release | 
|  | 752 | ret void | 
|  | 753 | } | 
|  | 754 | ; CHECK: atomic16_cas_release | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 755 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 3, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 756 |  | 
|  | 757 | define void @atomic16_cas_acq_rel(i16* %a) nounwind uwtable { | 
|  | 758 | entry: | 
|  | 759 | cmpxchg i16* %a, i16 0, i16 1 acq_rel | 
|  | 760 | ret void | 
|  | 761 | } | 
|  | 762 | ; CHECK: atomic16_cas_acq_rel | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 763 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 4, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 764 |  | 
|  | 765 | define void @atomic16_cas_seq_cst(i16* %a) nounwind uwtable { | 
|  | 766 | entry: | 
|  | 767 | cmpxchg i16* %a, i16 0, i16 1 seq_cst | 
|  | 768 | ret void | 
|  | 769 | } | 
|  | 770 | ; CHECK: atomic16_cas_seq_cst | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 771 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 5, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 772 |  | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 773 | define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable { | 
|  | 774 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 775 | %0 = load atomic i32* %a unordered, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 776 | ret i32 %0 | 
|  | 777 | } | 
|  | 778 | ; CHECK: atomic32_load_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 779 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 780 |  | 
|  | 781 | define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable { | 
|  | 782 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 783 | %0 = load atomic i32* %a monotonic, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 784 | ret i32 %0 | 
|  | 785 | } | 
|  | 786 | ; CHECK: atomic32_load_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 787 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 788 |  | 
|  | 789 | define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable { | 
|  | 790 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 791 | %0 = load atomic i32* %a acquire, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 792 | ret i32 %0 | 
|  | 793 | } | 
|  | 794 | ; CHECK: atomic32_load_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 795 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 2) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 796 |  | 
|  | 797 | define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable { | 
|  | 798 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 799 | %0 = load atomic i32* %a seq_cst, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 800 | ret i32 %0 | 
|  | 801 | } | 
|  | 802 | ; CHECK: atomic32_load_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 803 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 804 |  | 
|  | 805 | define void @atomic32_store_unordered(i32* %a) nounwind uwtable { | 
|  | 806 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 807 | store atomic i32 0, i32* %a unordered, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 808 | ret void | 
|  | 809 | } | 
|  | 810 | ; CHECK: atomic32_store_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 811 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 812 |  | 
|  | 813 | define void @atomic32_store_monotonic(i32* %a) nounwind uwtable { | 
|  | 814 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 815 | store atomic i32 0, i32* %a monotonic, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 816 | ret void | 
|  | 817 | } | 
|  | 818 | ; CHECK: atomic32_store_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 819 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 820 |  | 
|  | 821 | define void @atomic32_store_release(i32* %a) nounwind uwtable { | 
|  | 822 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 823 | store atomic i32 0, i32* %a release, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 824 | ret void | 
|  | 825 | } | 
|  | 826 | ; CHECK: atomic32_store_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 827 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 3) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 828 |  | 
|  | 829 | define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable { | 
|  | 830 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 831 | store atomic i32 0, i32* %a seq_cst, align 4 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 832 | ret void | 
|  | 833 | } | 
|  | 834 | ; CHECK: atomic32_store_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 835 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 836 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 837 | define void @atomic32_xchg_monotonic(i32* %a) nounwind uwtable { | 
|  | 838 | entry: | 
|  | 839 | atomicrmw xchg i32* %a, i32 0 monotonic | 
|  | 840 | ret void | 
|  | 841 | } | 
|  | 842 | ; CHECK: atomic32_xchg_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 843 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 844 |  | 
|  | 845 | define void @atomic32_add_monotonic(i32* %a) nounwind uwtable { | 
|  | 846 | entry: | 
|  | 847 | atomicrmw add i32* %a, i32 0 monotonic | 
|  | 848 | ret void | 
|  | 849 | } | 
|  | 850 | ; CHECK: atomic32_add_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 851 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 852 |  | 
|  | 853 | define void @atomic32_sub_monotonic(i32* %a) nounwind uwtable { | 
|  | 854 | entry: | 
|  | 855 | atomicrmw sub i32* %a, i32 0 monotonic | 
|  | 856 | ret void | 
|  | 857 | } | 
|  | 858 | ; CHECK: atomic32_sub_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 859 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 860 |  | 
|  | 861 | define void @atomic32_and_monotonic(i32* %a) nounwind uwtable { | 
|  | 862 | entry: | 
|  | 863 | atomicrmw and i32* %a, i32 0 monotonic | 
|  | 864 | ret void | 
|  | 865 | } | 
|  | 866 | ; CHECK: atomic32_and_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 867 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 868 |  | 
|  | 869 | define void @atomic32_or_monotonic(i32* %a) nounwind uwtable { | 
|  | 870 | entry: | 
|  | 871 | atomicrmw or i32* %a, i32 0 monotonic | 
|  | 872 | ret void | 
|  | 873 | } | 
|  | 874 | ; CHECK: atomic32_or_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 875 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 876 |  | 
|  | 877 | define void @atomic32_xor_monotonic(i32* %a) nounwind uwtable { | 
|  | 878 | entry: | 
|  | 879 | atomicrmw xor i32* %a, i32 0 monotonic | 
|  | 880 | ret void | 
|  | 881 | } | 
|  | 882 | ; CHECK: atomic32_xor_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 883 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 884 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 885 | define void @atomic32_nand_monotonic(i32* %a) nounwind uwtable { | 
|  | 886 | entry: | 
|  | 887 | atomicrmw nand i32* %a, i32 0 monotonic | 
|  | 888 | ret void | 
|  | 889 | } | 
|  | 890 | ; CHECK: atomic32_nand_monotonic | 
|  | 891 | ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 0) | 
|  | 892 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 893 | define void @atomic32_xchg_acquire(i32* %a) nounwind uwtable { | 
|  | 894 | entry: | 
|  | 895 | atomicrmw xchg i32* %a, i32 0 acquire | 
|  | 896 | ret void | 
|  | 897 | } | 
|  | 898 | ; CHECK: atomic32_xchg_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 899 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 900 |  | 
|  | 901 | define void @atomic32_add_acquire(i32* %a) nounwind uwtable { | 
|  | 902 | entry: | 
|  | 903 | atomicrmw add i32* %a, i32 0 acquire | 
|  | 904 | ret void | 
|  | 905 | } | 
|  | 906 | ; CHECK: atomic32_add_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 907 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 908 |  | 
|  | 909 | define void @atomic32_sub_acquire(i32* %a) nounwind uwtable { | 
|  | 910 | entry: | 
|  | 911 | atomicrmw sub i32* %a, i32 0 acquire | 
|  | 912 | ret void | 
|  | 913 | } | 
|  | 914 | ; CHECK: atomic32_sub_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 915 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 916 |  | 
|  | 917 | define void @atomic32_and_acquire(i32* %a) nounwind uwtable { | 
|  | 918 | entry: | 
|  | 919 | atomicrmw and i32* %a, i32 0 acquire | 
|  | 920 | ret void | 
|  | 921 | } | 
|  | 922 | ; CHECK: atomic32_and_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 923 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 924 |  | 
|  | 925 | define void @atomic32_or_acquire(i32* %a) nounwind uwtable { | 
|  | 926 | entry: | 
|  | 927 | atomicrmw or i32* %a, i32 0 acquire | 
|  | 928 | ret void | 
|  | 929 | } | 
|  | 930 | ; CHECK: atomic32_or_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 931 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 932 |  | 
|  | 933 | define void @atomic32_xor_acquire(i32* %a) nounwind uwtable { | 
|  | 934 | entry: | 
|  | 935 | atomicrmw xor i32* %a, i32 0 acquire | 
|  | 936 | ret void | 
|  | 937 | } | 
|  | 938 | ; CHECK: atomic32_xor_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 939 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 940 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 941 | define void @atomic32_nand_acquire(i32* %a) nounwind uwtable { | 
|  | 942 | entry: | 
|  | 943 | atomicrmw nand i32* %a, i32 0 acquire | 
|  | 944 | ret void | 
|  | 945 | } | 
|  | 946 | ; CHECK: atomic32_nand_acquire | 
|  | 947 | ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 2) | 
|  | 948 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 949 | define void @atomic32_xchg_release(i32* %a) nounwind uwtable { | 
|  | 950 | entry: | 
|  | 951 | atomicrmw xchg i32* %a, i32 0 release | 
|  | 952 | ret void | 
|  | 953 | } | 
|  | 954 | ; CHECK: atomic32_xchg_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 955 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 956 |  | 
|  | 957 | define void @atomic32_add_release(i32* %a) nounwind uwtable { | 
|  | 958 | entry: | 
|  | 959 | atomicrmw add i32* %a, i32 0 release | 
|  | 960 | ret void | 
|  | 961 | } | 
|  | 962 | ; CHECK: atomic32_add_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 963 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 964 |  | 
|  | 965 | define void @atomic32_sub_release(i32* %a) nounwind uwtable { | 
|  | 966 | entry: | 
|  | 967 | atomicrmw sub i32* %a, i32 0 release | 
|  | 968 | ret void | 
|  | 969 | } | 
|  | 970 | ; CHECK: atomic32_sub_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 971 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 972 |  | 
|  | 973 | define void @atomic32_and_release(i32* %a) nounwind uwtable { | 
|  | 974 | entry: | 
|  | 975 | atomicrmw and i32* %a, i32 0 release | 
|  | 976 | ret void | 
|  | 977 | } | 
|  | 978 | ; CHECK: atomic32_and_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 979 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 980 |  | 
|  | 981 | define void @atomic32_or_release(i32* %a) nounwind uwtable { | 
|  | 982 | entry: | 
|  | 983 | atomicrmw or i32* %a, i32 0 release | 
|  | 984 | ret void | 
|  | 985 | } | 
|  | 986 | ; CHECK: atomic32_or_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 987 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 988 |  | 
|  | 989 | define void @atomic32_xor_release(i32* %a) nounwind uwtable { | 
|  | 990 | entry: | 
|  | 991 | atomicrmw xor i32* %a, i32 0 release | 
|  | 992 | ret void | 
|  | 993 | } | 
|  | 994 | ; CHECK: atomic32_xor_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 995 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 996 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 997 | define void @atomic32_nand_release(i32* %a) nounwind uwtable { | 
|  | 998 | entry: | 
|  | 999 | atomicrmw nand i32* %a, i32 0 release | 
|  | 1000 | ret void | 
|  | 1001 | } | 
|  | 1002 | ; CHECK: atomic32_nand_release | 
|  | 1003 | ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 3) | 
|  | 1004 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1005 | define void @atomic32_xchg_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1006 | entry: | 
|  | 1007 | atomicrmw xchg i32* %a, i32 0 acq_rel | 
|  | 1008 | ret void | 
|  | 1009 | } | 
|  | 1010 | ; CHECK: atomic32_xchg_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1011 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1012 |  | 
|  | 1013 | define void @atomic32_add_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1014 | entry: | 
|  | 1015 | atomicrmw add i32* %a, i32 0 acq_rel | 
|  | 1016 | ret void | 
|  | 1017 | } | 
|  | 1018 | ; CHECK: atomic32_add_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1019 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1020 |  | 
|  | 1021 | define void @atomic32_sub_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1022 | entry: | 
|  | 1023 | atomicrmw sub i32* %a, i32 0 acq_rel | 
|  | 1024 | ret void | 
|  | 1025 | } | 
|  | 1026 | ; CHECK: atomic32_sub_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1027 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1028 |  | 
|  | 1029 | define void @atomic32_and_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1030 | entry: | 
|  | 1031 | atomicrmw and i32* %a, i32 0 acq_rel | 
|  | 1032 | ret void | 
|  | 1033 | } | 
|  | 1034 | ; CHECK: atomic32_and_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1035 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1036 |  | 
|  | 1037 | define void @atomic32_or_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1038 | entry: | 
|  | 1039 | atomicrmw or i32* %a, i32 0 acq_rel | 
|  | 1040 | ret void | 
|  | 1041 | } | 
|  | 1042 | ; CHECK: atomic32_or_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1043 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1044 |  | 
|  | 1045 | define void @atomic32_xor_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1046 | entry: | 
|  | 1047 | atomicrmw xor i32* %a, i32 0 acq_rel | 
|  | 1048 | ret void | 
|  | 1049 | } | 
|  | 1050 | ; CHECK: atomic32_xor_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1051 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1052 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1053 | define void @atomic32_nand_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1054 | entry: | 
|  | 1055 | atomicrmw nand i32* %a, i32 0 acq_rel | 
|  | 1056 | ret void | 
|  | 1057 | } | 
|  | 1058 | ; CHECK: atomic32_nand_acq_rel | 
|  | 1059 | ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 4) | 
|  | 1060 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1061 | define void @atomic32_xchg_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1062 | entry: | 
|  | 1063 | atomicrmw xchg i32* %a, i32 0 seq_cst | 
|  | 1064 | ret void | 
|  | 1065 | } | 
|  | 1066 | ; CHECK: atomic32_xchg_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1067 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1068 |  | 
|  | 1069 | define void @atomic32_add_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1070 | entry: | 
|  | 1071 | atomicrmw add i32* %a, i32 0 seq_cst | 
|  | 1072 | ret void | 
|  | 1073 | } | 
|  | 1074 | ; CHECK: atomic32_add_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1075 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1076 |  | 
|  | 1077 | define void @atomic32_sub_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1078 | entry: | 
|  | 1079 | atomicrmw sub i32* %a, i32 0 seq_cst | 
|  | 1080 | ret void | 
|  | 1081 | } | 
|  | 1082 | ; CHECK: atomic32_sub_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1083 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1084 |  | 
|  | 1085 | define void @atomic32_and_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1086 | entry: | 
|  | 1087 | atomicrmw and i32* %a, i32 0 seq_cst | 
|  | 1088 | ret void | 
|  | 1089 | } | 
|  | 1090 | ; CHECK: atomic32_and_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1091 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1092 |  | 
|  | 1093 | define void @atomic32_or_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1094 | entry: | 
|  | 1095 | atomicrmw or i32* %a, i32 0 seq_cst | 
|  | 1096 | ret void | 
|  | 1097 | } | 
|  | 1098 | ; CHECK: atomic32_or_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1099 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1100 |  | 
|  | 1101 | define void @atomic32_xor_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1102 | entry: | 
|  | 1103 | atomicrmw xor i32* %a, i32 0 seq_cst | 
|  | 1104 | ret void | 
|  | 1105 | } | 
|  | 1106 | ; CHECK: atomic32_xor_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1107 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1108 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1109 | define void @atomic32_nand_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1110 | entry: | 
|  | 1111 | atomicrmw nand i32* %a, i32 0 seq_cst | 
|  | 1112 | ret void | 
|  | 1113 | } | 
|  | 1114 | ; CHECK: atomic32_nand_seq_cst | 
|  | 1115 | ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 5) | 
|  | 1116 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1117 | define void @atomic32_cas_monotonic(i32* %a) nounwind uwtable { | 
|  | 1118 | entry: | 
|  | 1119 | cmpxchg i32* %a, i32 0, i32 1 monotonic | 
|  | 1120 | ret void | 
|  | 1121 | } | 
|  | 1122 | ; CHECK: atomic32_cas_monotonic | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1123 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1124 |  | 
|  | 1125 | define void @atomic32_cas_acquire(i32* %a) nounwind uwtable { | 
|  | 1126 | entry: | 
|  | 1127 | cmpxchg i32* %a, i32 0, i32 1 acquire | 
|  | 1128 | ret void | 
|  | 1129 | } | 
|  | 1130 | ; CHECK: atomic32_cas_acquire | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1131 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 2, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1132 |  | 
|  | 1133 | define void @atomic32_cas_release(i32* %a) nounwind uwtable { | 
|  | 1134 | entry: | 
|  | 1135 | cmpxchg i32* %a, i32 0, i32 1 release | 
|  | 1136 | ret void | 
|  | 1137 | } | 
|  | 1138 | ; CHECK: atomic32_cas_release | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1139 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 3, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1140 |  | 
|  | 1141 | define void @atomic32_cas_acq_rel(i32* %a) nounwind uwtable { | 
|  | 1142 | entry: | 
|  | 1143 | cmpxchg i32* %a, i32 0, i32 1 acq_rel | 
|  | 1144 | ret void | 
|  | 1145 | } | 
|  | 1146 | ; CHECK: atomic32_cas_acq_rel | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1147 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 4, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1148 |  | 
|  | 1149 | define void @atomic32_cas_seq_cst(i32* %a) nounwind uwtable { | 
|  | 1150 | entry: | 
|  | 1151 | cmpxchg i32* %a, i32 0, i32 1 seq_cst | 
|  | 1152 | ret void | 
|  | 1153 | } | 
|  | 1154 | ; CHECK: atomic32_cas_seq_cst | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1155 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 5, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1156 |  | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1157 | define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable { | 
|  | 1158 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1159 | %0 = load atomic i64* %a unordered, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1160 | ret i64 %0 | 
|  | 1161 | } | 
|  | 1162 | ; CHECK: atomic64_load_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1163 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1164 |  | 
|  | 1165 | define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable { | 
|  | 1166 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1167 | %0 = load atomic i64* %a monotonic, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1168 | ret i64 %0 | 
|  | 1169 | } | 
|  | 1170 | ; CHECK: atomic64_load_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1171 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1172 |  | 
|  | 1173 | define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable { | 
|  | 1174 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1175 | %0 = load atomic i64* %a acquire, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1176 | ret i64 %0 | 
|  | 1177 | } | 
|  | 1178 | ; CHECK: atomic64_load_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1179 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 2) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1180 |  | 
|  | 1181 | define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1182 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1183 | %0 = load atomic i64* %a seq_cst, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1184 | ret i64 %0 | 
|  | 1185 | } | 
|  | 1186 | ; CHECK: atomic64_load_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1187 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1188 |  | 
|  | 1189 | define void @atomic64_store_unordered(i64* %a) nounwind uwtable { | 
|  | 1190 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1191 | store atomic i64 0, i64* %a unordered, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1192 | ret void | 
|  | 1193 | } | 
|  | 1194 | ; CHECK: atomic64_store_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1195 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1196 |  | 
|  | 1197 | define void @atomic64_store_monotonic(i64* %a) nounwind uwtable { | 
|  | 1198 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1199 | store atomic i64 0, i64* %a monotonic, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1200 | ret void | 
|  | 1201 | } | 
|  | 1202 | ; CHECK: atomic64_store_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1203 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1204 |  | 
|  | 1205 | define void @atomic64_store_release(i64* %a) nounwind uwtable { | 
|  | 1206 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1207 | store atomic i64 0, i64* %a release, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1208 | ret void | 
|  | 1209 | } | 
|  | 1210 | ; CHECK: atomic64_store_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1211 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 3) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1212 |  | 
|  | 1213 | define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1214 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1215 | store atomic i64 0, i64* %a seq_cst, align 8 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1216 | ret void | 
|  | 1217 | } | 
|  | 1218 | ; CHECK: atomic64_store_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1219 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1220 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1221 | define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable { | 
|  | 1222 | entry: | 
|  | 1223 | atomicrmw xchg i64* %a, i64 0 monotonic | 
|  | 1224 | ret void | 
|  | 1225 | } | 
|  | 1226 | ; CHECK: atomic64_xchg_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1227 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1228 |  | 
|  | 1229 | define void @atomic64_add_monotonic(i64* %a) nounwind uwtable { | 
|  | 1230 | entry: | 
|  | 1231 | atomicrmw add i64* %a, i64 0 monotonic | 
|  | 1232 | ret void | 
|  | 1233 | } | 
|  | 1234 | ; CHECK: atomic64_add_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1235 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1236 |  | 
|  | 1237 | define void @atomic64_sub_monotonic(i64* %a) nounwind uwtable { | 
|  | 1238 | entry: | 
|  | 1239 | atomicrmw sub i64* %a, i64 0 monotonic | 
|  | 1240 | ret void | 
|  | 1241 | } | 
|  | 1242 | ; CHECK: atomic64_sub_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1243 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1244 |  | 
|  | 1245 | define void @atomic64_and_monotonic(i64* %a) nounwind uwtable { | 
|  | 1246 | entry: | 
|  | 1247 | atomicrmw and i64* %a, i64 0 monotonic | 
|  | 1248 | ret void | 
|  | 1249 | } | 
|  | 1250 | ; CHECK: atomic64_and_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1251 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1252 |  | 
|  | 1253 | define void @atomic64_or_monotonic(i64* %a) nounwind uwtable { | 
|  | 1254 | entry: | 
|  | 1255 | atomicrmw or i64* %a, i64 0 monotonic | 
|  | 1256 | ret void | 
|  | 1257 | } | 
|  | 1258 | ; CHECK: atomic64_or_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1259 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1260 |  | 
|  | 1261 | define void @atomic64_xor_monotonic(i64* %a) nounwind uwtable { | 
|  | 1262 | entry: | 
|  | 1263 | atomicrmw xor i64* %a, i64 0 monotonic | 
|  | 1264 | ret void | 
|  | 1265 | } | 
|  | 1266 | ; CHECK: atomic64_xor_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1267 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1268 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1269 | define void @atomic64_nand_monotonic(i64* %a) nounwind uwtable { | 
|  | 1270 | entry: | 
|  | 1271 | atomicrmw nand i64* %a, i64 0 monotonic | 
|  | 1272 | ret void | 
|  | 1273 | } | 
|  | 1274 | ; CHECK: atomic64_nand_monotonic | 
|  | 1275 | ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 0) | 
|  | 1276 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1277 | define void @atomic64_xchg_acquire(i64* %a) nounwind uwtable { | 
|  | 1278 | entry: | 
|  | 1279 | atomicrmw xchg i64* %a, i64 0 acquire | 
|  | 1280 | ret void | 
|  | 1281 | } | 
|  | 1282 | ; CHECK: atomic64_xchg_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1283 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1284 |  | 
|  | 1285 | define void @atomic64_add_acquire(i64* %a) nounwind uwtable { | 
|  | 1286 | entry: | 
|  | 1287 | atomicrmw add i64* %a, i64 0 acquire | 
|  | 1288 | ret void | 
|  | 1289 | } | 
|  | 1290 | ; CHECK: atomic64_add_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1291 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1292 |  | 
|  | 1293 | define void @atomic64_sub_acquire(i64* %a) nounwind uwtable { | 
|  | 1294 | entry: | 
|  | 1295 | atomicrmw sub i64* %a, i64 0 acquire | 
|  | 1296 | ret void | 
|  | 1297 | } | 
|  | 1298 | ; CHECK: atomic64_sub_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1299 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1300 |  | 
|  | 1301 | define void @atomic64_and_acquire(i64* %a) nounwind uwtable { | 
|  | 1302 | entry: | 
|  | 1303 | atomicrmw and i64* %a, i64 0 acquire | 
|  | 1304 | ret void | 
|  | 1305 | } | 
|  | 1306 | ; CHECK: atomic64_and_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1307 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1308 |  | 
|  | 1309 | define void @atomic64_or_acquire(i64* %a) nounwind uwtable { | 
|  | 1310 | entry: | 
|  | 1311 | atomicrmw or i64* %a, i64 0 acquire | 
|  | 1312 | ret void | 
|  | 1313 | } | 
|  | 1314 | ; CHECK: atomic64_or_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1315 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1316 |  | 
|  | 1317 | define void @atomic64_xor_acquire(i64* %a) nounwind uwtable { | 
|  | 1318 | entry: | 
|  | 1319 | atomicrmw xor i64* %a, i64 0 acquire | 
|  | 1320 | ret void | 
|  | 1321 | } | 
|  | 1322 | ; CHECK: atomic64_xor_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1323 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1324 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1325 | define void @atomic64_nand_acquire(i64* %a) nounwind uwtable { | 
|  | 1326 | entry: | 
|  | 1327 | atomicrmw nand i64* %a, i64 0 acquire | 
|  | 1328 | ret void | 
|  | 1329 | } | 
|  | 1330 | ; CHECK: atomic64_nand_acquire | 
|  | 1331 | ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 2) | 
|  | 1332 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1333 | define void @atomic64_xchg_release(i64* %a) nounwind uwtable { | 
|  | 1334 | entry: | 
|  | 1335 | atomicrmw xchg i64* %a, i64 0 release | 
|  | 1336 | ret void | 
|  | 1337 | } | 
|  | 1338 | ; CHECK: atomic64_xchg_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1339 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1340 |  | 
|  | 1341 | define void @atomic64_add_release(i64* %a) nounwind uwtable { | 
|  | 1342 | entry: | 
|  | 1343 | atomicrmw add i64* %a, i64 0 release | 
|  | 1344 | ret void | 
|  | 1345 | } | 
|  | 1346 | ; CHECK: atomic64_add_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1347 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1348 |  | 
|  | 1349 | define void @atomic64_sub_release(i64* %a) nounwind uwtable { | 
|  | 1350 | entry: | 
|  | 1351 | atomicrmw sub i64* %a, i64 0 release | 
|  | 1352 | ret void | 
|  | 1353 | } | 
|  | 1354 | ; CHECK: atomic64_sub_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1355 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1356 |  | 
|  | 1357 | define void @atomic64_and_release(i64* %a) nounwind uwtable { | 
|  | 1358 | entry: | 
|  | 1359 | atomicrmw and i64* %a, i64 0 release | 
|  | 1360 | ret void | 
|  | 1361 | } | 
|  | 1362 | ; CHECK: atomic64_and_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1363 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1364 |  | 
|  | 1365 | define void @atomic64_or_release(i64* %a) nounwind uwtable { | 
|  | 1366 | entry: | 
|  | 1367 | atomicrmw or i64* %a, i64 0 release | 
|  | 1368 | ret void | 
|  | 1369 | } | 
|  | 1370 | ; CHECK: atomic64_or_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1371 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1372 |  | 
|  | 1373 | define void @atomic64_xor_release(i64* %a) nounwind uwtable { | 
|  | 1374 | entry: | 
|  | 1375 | atomicrmw xor i64* %a, i64 0 release | 
|  | 1376 | ret void | 
|  | 1377 | } | 
|  | 1378 | ; CHECK: atomic64_xor_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1379 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1380 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1381 | define void @atomic64_nand_release(i64* %a) nounwind uwtable { | 
|  | 1382 | entry: | 
|  | 1383 | atomicrmw nand i64* %a, i64 0 release | 
|  | 1384 | ret void | 
|  | 1385 | } | 
|  | 1386 | ; CHECK: atomic64_nand_release | 
|  | 1387 | ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 3) | 
|  | 1388 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1389 | define void @atomic64_xchg_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1390 | entry: | 
|  | 1391 | atomicrmw xchg i64* %a, i64 0 acq_rel | 
|  | 1392 | ret void | 
|  | 1393 | } | 
|  | 1394 | ; CHECK: atomic64_xchg_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1395 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1396 |  | 
|  | 1397 | define void @atomic64_add_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1398 | entry: | 
|  | 1399 | atomicrmw add i64* %a, i64 0 acq_rel | 
|  | 1400 | ret void | 
|  | 1401 | } | 
|  | 1402 | ; CHECK: atomic64_add_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1403 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1404 |  | 
|  | 1405 | define void @atomic64_sub_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1406 | entry: | 
|  | 1407 | atomicrmw sub i64* %a, i64 0 acq_rel | 
|  | 1408 | ret void | 
|  | 1409 | } | 
|  | 1410 | ; CHECK: atomic64_sub_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1411 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1412 |  | 
|  | 1413 | define void @atomic64_and_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1414 | entry: | 
|  | 1415 | atomicrmw and i64* %a, i64 0 acq_rel | 
|  | 1416 | ret void | 
|  | 1417 | } | 
|  | 1418 | ; CHECK: atomic64_and_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1419 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1420 |  | 
|  | 1421 | define void @atomic64_or_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1422 | entry: | 
|  | 1423 | atomicrmw or i64* %a, i64 0 acq_rel | 
|  | 1424 | ret void | 
|  | 1425 | } | 
|  | 1426 | ; CHECK: atomic64_or_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1427 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1428 |  | 
|  | 1429 | define void @atomic64_xor_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1430 | entry: | 
|  | 1431 | atomicrmw xor i64* %a, i64 0 acq_rel | 
|  | 1432 | ret void | 
|  | 1433 | } | 
|  | 1434 | ; CHECK: atomic64_xor_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1435 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1436 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1437 | define void @atomic64_nand_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1438 | entry: | 
|  | 1439 | atomicrmw nand i64* %a, i64 0 acq_rel | 
|  | 1440 | ret void | 
|  | 1441 | } | 
|  | 1442 | ; CHECK: atomic64_nand_acq_rel | 
|  | 1443 | ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 4) | 
|  | 1444 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1445 | define void @atomic64_xchg_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1446 | entry: | 
|  | 1447 | atomicrmw xchg i64* %a, i64 0 seq_cst | 
|  | 1448 | ret void | 
|  | 1449 | } | 
|  | 1450 | ; CHECK: atomic64_xchg_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1451 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1452 |  | 
|  | 1453 | define void @atomic64_add_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1454 | entry: | 
|  | 1455 | atomicrmw add i64* %a, i64 0 seq_cst | 
|  | 1456 | ret void | 
|  | 1457 | } | 
|  | 1458 | ; CHECK: atomic64_add_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1459 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1460 |  | 
|  | 1461 | define void @atomic64_sub_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1462 | entry: | 
|  | 1463 | atomicrmw sub i64* %a, i64 0 seq_cst | 
|  | 1464 | ret void | 
|  | 1465 | } | 
|  | 1466 | ; CHECK: atomic64_sub_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1467 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1468 |  | 
|  | 1469 | define void @atomic64_and_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1470 | entry: | 
|  | 1471 | atomicrmw and i64* %a, i64 0 seq_cst | 
|  | 1472 | ret void | 
|  | 1473 | } | 
|  | 1474 | ; CHECK: atomic64_and_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1475 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1476 |  | 
|  | 1477 | define void @atomic64_or_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1478 | entry: | 
|  | 1479 | atomicrmw or i64* %a, i64 0 seq_cst | 
|  | 1480 | ret void | 
|  | 1481 | } | 
|  | 1482 | ; CHECK: atomic64_or_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1483 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1484 |  | 
|  | 1485 | define void @atomic64_xor_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1486 | entry: | 
|  | 1487 | atomicrmw xor i64* %a, i64 0 seq_cst | 
|  | 1488 | ret void | 
|  | 1489 | } | 
|  | 1490 | ; CHECK: atomic64_xor_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1491 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1492 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1493 | define void @atomic64_nand_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1494 | entry: | 
|  | 1495 | atomicrmw nand i64* %a, i64 0 seq_cst | 
|  | 1496 | ret void | 
|  | 1497 | } | 
|  | 1498 | ; CHECK: atomic64_nand_seq_cst | 
|  | 1499 | ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 5) | 
|  | 1500 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1501 | define void @atomic64_cas_monotonic(i64* %a) nounwind uwtable { | 
|  | 1502 | entry: | 
|  | 1503 | cmpxchg i64* %a, i64 0, i64 1 monotonic | 
|  | 1504 | ret void | 
|  | 1505 | } | 
|  | 1506 | ; CHECK: atomic64_cas_monotonic | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1507 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1508 |  | 
|  | 1509 | define void @atomic64_cas_acquire(i64* %a) nounwind uwtable { | 
|  | 1510 | entry: | 
|  | 1511 | cmpxchg i64* %a, i64 0, i64 1 acquire | 
|  | 1512 | ret void | 
|  | 1513 | } | 
|  | 1514 | ; CHECK: atomic64_cas_acquire | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1515 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 2, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1516 |  | 
|  | 1517 | define void @atomic64_cas_release(i64* %a) nounwind uwtable { | 
|  | 1518 | entry: | 
|  | 1519 | cmpxchg i64* %a, i64 0, i64 1 release | 
|  | 1520 | ret void | 
|  | 1521 | } | 
|  | 1522 | ; CHECK: atomic64_cas_release | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1523 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 3, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1524 |  | 
|  | 1525 | define void @atomic64_cas_acq_rel(i64* %a) nounwind uwtable { | 
|  | 1526 | entry: | 
|  | 1527 | cmpxchg i64* %a, i64 0, i64 1 acq_rel | 
|  | 1528 | ret void | 
|  | 1529 | } | 
|  | 1530 | ; CHECK: atomic64_cas_acq_rel | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1531 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 4, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1532 |  | 
|  | 1533 | define void @atomic64_cas_seq_cst(i64* %a) nounwind uwtable { | 
|  | 1534 | entry: | 
|  | 1535 | cmpxchg i64* %a, i64 0, i64 1 seq_cst | 
|  | 1536 | ret void | 
|  | 1537 | } | 
|  | 1538 | ; CHECK: atomic64_cas_seq_cst | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1539 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1540 |  | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1541 | define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable { | 
|  | 1542 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1543 | %0 = load atomic i128* %a unordered, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1544 | ret i128 %0 | 
|  | 1545 | } | 
|  | 1546 | ; CHECK: atomic128_load_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1547 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1548 |  | 
|  | 1549 | define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable { | 
|  | 1550 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1551 | %0 = load atomic i128* %a monotonic, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1552 | ret i128 %0 | 
|  | 1553 | } | 
|  | 1554 | ; CHECK: atomic128_load_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1555 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1556 |  | 
|  | 1557 | define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable { | 
|  | 1558 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1559 | %0 = load atomic i128* %a acquire, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1560 | ret i128 %0 | 
|  | 1561 | } | 
|  | 1562 | ; CHECK: atomic128_load_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1563 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 2) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1564 |  | 
|  | 1565 | define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1566 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1567 | %0 = load atomic i128* %a seq_cst, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1568 | ret i128 %0 | 
|  | 1569 | } | 
|  | 1570 | ; CHECK: atomic128_load_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1571 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 5) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1572 |  | 
|  | 1573 | define void @atomic128_store_unordered(i128* %a) nounwind uwtable { | 
|  | 1574 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1575 | store atomic i128 0, i128* %a unordered, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1576 | ret void | 
|  | 1577 | } | 
|  | 1578 | ; CHECK: atomic128_store_unordered | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1579 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1580 |  | 
|  | 1581 | define void @atomic128_store_monotonic(i128* %a) nounwind uwtable { | 
|  | 1582 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1583 | store atomic i128 0, i128* %a monotonic, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1584 | ret void | 
|  | 1585 | } | 
|  | 1586 | ; CHECK: atomic128_store_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1587 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1588 |  | 
|  | 1589 | define void @atomic128_store_release(i128* %a) nounwind uwtable { | 
|  | 1590 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1591 | store atomic i128 0, i128* %a release, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1592 | ret void | 
|  | 1593 | } | 
|  | 1594 | ; CHECK: atomic128_store_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1595 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 3) | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1596 |  | 
|  | 1597 | define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1598 | entry: | 
| Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1599 | store atomic i128 0, i128* %a seq_cst, align 16 | 
| Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1600 | ret void | 
|  | 1601 | } | 
|  | 1602 | ; CHECK: atomic128_store_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1603 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1604 |  | 
|  | 1605 | define void @atomic128_xchg_monotonic(i128* %a) nounwind uwtable { | 
|  | 1606 | entry: | 
|  | 1607 | atomicrmw xchg i128* %a, i128 0 monotonic | 
|  | 1608 | ret void | 
|  | 1609 | } | 
|  | 1610 | ; CHECK: atomic128_xchg_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1611 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1612 |  | 
|  | 1613 | define void @atomic128_add_monotonic(i128* %a) nounwind uwtable { | 
|  | 1614 | entry: | 
|  | 1615 | atomicrmw add i128* %a, i128 0 monotonic | 
|  | 1616 | ret void | 
|  | 1617 | } | 
|  | 1618 | ; CHECK: atomic128_add_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1619 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1620 |  | 
|  | 1621 | define void @atomic128_sub_monotonic(i128* %a) nounwind uwtable { | 
|  | 1622 | entry: | 
|  | 1623 | atomicrmw sub i128* %a, i128 0 monotonic | 
|  | 1624 | ret void | 
|  | 1625 | } | 
|  | 1626 | ; CHECK: atomic128_sub_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1627 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1628 |  | 
|  | 1629 | define void @atomic128_and_monotonic(i128* %a) nounwind uwtable { | 
|  | 1630 | entry: | 
|  | 1631 | atomicrmw and i128* %a, i128 0 monotonic | 
|  | 1632 | ret void | 
|  | 1633 | } | 
|  | 1634 | ; CHECK: atomic128_and_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1635 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1636 |  | 
|  | 1637 | define void @atomic128_or_monotonic(i128* %a) nounwind uwtable { | 
|  | 1638 | entry: | 
|  | 1639 | atomicrmw or i128* %a, i128 0 monotonic | 
|  | 1640 | ret void | 
|  | 1641 | } | 
|  | 1642 | ; CHECK: atomic128_or_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1643 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1644 |  | 
|  | 1645 | define void @atomic128_xor_monotonic(i128* %a) nounwind uwtable { | 
|  | 1646 | entry: | 
|  | 1647 | atomicrmw xor i128* %a, i128 0 monotonic | 
|  | 1648 | ret void | 
|  | 1649 | } | 
|  | 1650 | ; CHECK: atomic128_xor_monotonic | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1651 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1652 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1653 | define void @atomic128_nand_monotonic(i128* %a) nounwind uwtable { | 
|  | 1654 | entry: | 
|  | 1655 | atomicrmw nand i128* %a, i128 0 monotonic | 
|  | 1656 | ret void | 
|  | 1657 | } | 
|  | 1658 | ; CHECK: atomic128_nand_monotonic | 
|  | 1659 | ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 0) | 
|  | 1660 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1661 | define void @atomic128_xchg_acquire(i128* %a) nounwind uwtable { | 
|  | 1662 | entry: | 
|  | 1663 | atomicrmw xchg i128* %a, i128 0 acquire | 
|  | 1664 | ret void | 
|  | 1665 | } | 
|  | 1666 | ; CHECK: atomic128_xchg_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1667 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1668 |  | 
|  | 1669 | define void @atomic128_add_acquire(i128* %a) nounwind uwtable { | 
|  | 1670 | entry: | 
|  | 1671 | atomicrmw add i128* %a, i128 0 acquire | 
|  | 1672 | ret void | 
|  | 1673 | } | 
|  | 1674 | ; CHECK: atomic128_add_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1675 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1676 |  | 
|  | 1677 | define void @atomic128_sub_acquire(i128* %a) nounwind uwtable { | 
|  | 1678 | entry: | 
|  | 1679 | atomicrmw sub i128* %a, i128 0 acquire | 
|  | 1680 | ret void | 
|  | 1681 | } | 
|  | 1682 | ; CHECK: atomic128_sub_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1683 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1684 |  | 
|  | 1685 | define void @atomic128_and_acquire(i128* %a) nounwind uwtable { | 
|  | 1686 | entry: | 
|  | 1687 | atomicrmw and i128* %a, i128 0 acquire | 
|  | 1688 | ret void | 
|  | 1689 | } | 
|  | 1690 | ; CHECK: atomic128_and_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1691 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1692 |  | 
|  | 1693 | define void @atomic128_or_acquire(i128* %a) nounwind uwtable { | 
|  | 1694 | entry: | 
|  | 1695 | atomicrmw or i128* %a, i128 0 acquire | 
|  | 1696 | ret void | 
|  | 1697 | } | 
|  | 1698 | ; CHECK: atomic128_or_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1699 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1700 |  | 
|  | 1701 | define void @atomic128_xor_acquire(i128* %a) nounwind uwtable { | 
|  | 1702 | entry: | 
|  | 1703 | atomicrmw xor i128* %a, i128 0 acquire | 
|  | 1704 | ret void | 
|  | 1705 | } | 
|  | 1706 | ; CHECK: atomic128_xor_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1707 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1708 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1709 | define void @atomic128_nand_acquire(i128* %a) nounwind uwtable { | 
|  | 1710 | entry: | 
|  | 1711 | atomicrmw nand i128* %a, i128 0 acquire | 
|  | 1712 | ret void | 
|  | 1713 | } | 
|  | 1714 | ; CHECK: atomic128_nand_acquire | 
|  | 1715 | ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 2) | 
|  | 1716 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1717 | define void @atomic128_xchg_release(i128* %a) nounwind uwtable { | 
|  | 1718 | entry: | 
|  | 1719 | atomicrmw xchg i128* %a, i128 0 release | 
|  | 1720 | ret void | 
|  | 1721 | } | 
|  | 1722 | ; CHECK: atomic128_xchg_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1723 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1724 |  | 
|  | 1725 | define void @atomic128_add_release(i128* %a) nounwind uwtable { | 
|  | 1726 | entry: | 
|  | 1727 | atomicrmw add i128* %a, i128 0 release | 
|  | 1728 | ret void | 
|  | 1729 | } | 
|  | 1730 | ; CHECK: atomic128_add_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1731 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1732 |  | 
|  | 1733 | define void @atomic128_sub_release(i128* %a) nounwind uwtable { | 
|  | 1734 | entry: | 
|  | 1735 | atomicrmw sub i128* %a, i128 0 release | 
|  | 1736 | ret void | 
|  | 1737 | } | 
|  | 1738 | ; CHECK: atomic128_sub_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1739 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1740 |  | 
|  | 1741 | define void @atomic128_and_release(i128* %a) nounwind uwtable { | 
|  | 1742 | entry: | 
|  | 1743 | atomicrmw and i128* %a, i128 0 release | 
|  | 1744 | ret void | 
|  | 1745 | } | 
|  | 1746 | ; CHECK: atomic128_and_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1747 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1748 |  | 
|  | 1749 | define void @atomic128_or_release(i128* %a) nounwind uwtable { | 
|  | 1750 | entry: | 
|  | 1751 | atomicrmw or i128* %a, i128 0 release | 
|  | 1752 | ret void | 
|  | 1753 | } | 
|  | 1754 | ; CHECK: atomic128_or_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1755 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1756 |  | 
|  | 1757 | define void @atomic128_xor_release(i128* %a) nounwind uwtable { | 
|  | 1758 | entry: | 
|  | 1759 | atomicrmw xor i128* %a, i128 0 release | 
|  | 1760 | ret void | 
|  | 1761 | } | 
|  | 1762 | ; CHECK: atomic128_xor_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1763 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1764 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1765 | define void @atomic128_nand_release(i128* %a) nounwind uwtable { | 
|  | 1766 | entry: | 
|  | 1767 | atomicrmw nand i128* %a, i128 0 release | 
|  | 1768 | ret void | 
|  | 1769 | } | 
|  | 1770 | ; CHECK: atomic128_nand_release | 
|  | 1771 | ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 3) | 
|  | 1772 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1773 | define void @atomic128_xchg_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1774 | entry: | 
|  | 1775 | atomicrmw xchg i128* %a, i128 0 acq_rel | 
|  | 1776 | ret void | 
|  | 1777 | } | 
|  | 1778 | ; CHECK: atomic128_xchg_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1779 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1780 |  | 
|  | 1781 | define void @atomic128_add_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1782 | entry: | 
|  | 1783 | atomicrmw add i128* %a, i128 0 acq_rel | 
|  | 1784 | ret void | 
|  | 1785 | } | 
|  | 1786 | ; CHECK: atomic128_add_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1787 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1788 |  | 
|  | 1789 | define void @atomic128_sub_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1790 | entry: | 
|  | 1791 | atomicrmw sub i128* %a, i128 0 acq_rel | 
|  | 1792 | ret void | 
|  | 1793 | } | 
|  | 1794 | ; CHECK: atomic128_sub_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1795 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1796 |  | 
|  | 1797 | define void @atomic128_and_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1798 | entry: | 
|  | 1799 | atomicrmw and i128* %a, i128 0 acq_rel | 
|  | 1800 | ret void | 
|  | 1801 | } | 
|  | 1802 | ; CHECK: atomic128_and_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1803 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1804 |  | 
|  | 1805 | define void @atomic128_or_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1806 | entry: | 
|  | 1807 | atomicrmw or i128* %a, i128 0 acq_rel | 
|  | 1808 | ret void | 
|  | 1809 | } | 
|  | 1810 | ; CHECK: atomic128_or_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1811 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1812 |  | 
|  | 1813 | define void @atomic128_xor_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1814 | entry: | 
|  | 1815 | atomicrmw xor i128* %a, i128 0 acq_rel | 
|  | 1816 | ret void | 
|  | 1817 | } | 
|  | 1818 | ; CHECK: atomic128_xor_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1819 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1820 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1821 | define void @atomic128_nand_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1822 | entry: | 
|  | 1823 | atomicrmw nand i128* %a, i128 0 acq_rel | 
|  | 1824 | ret void | 
|  | 1825 | } | 
|  | 1826 | ; CHECK: atomic128_nand_acq_rel | 
|  | 1827 | ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 4) | 
|  | 1828 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1829 | define void @atomic128_xchg_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1830 | entry: | 
|  | 1831 | atomicrmw xchg i128* %a, i128 0 seq_cst | 
|  | 1832 | ret void | 
|  | 1833 | } | 
|  | 1834 | ; CHECK: atomic128_xchg_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1835 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1836 |  | 
|  | 1837 | define void @atomic128_add_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1838 | entry: | 
|  | 1839 | atomicrmw add i128* %a, i128 0 seq_cst | 
|  | 1840 | ret void | 
|  | 1841 | } | 
|  | 1842 | ; CHECK: atomic128_add_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1843 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1844 |  | 
|  | 1845 | define void @atomic128_sub_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1846 | entry: | 
|  | 1847 | atomicrmw sub i128* %a, i128 0 seq_cst | 
|  | 1848 | ret void | 
|  | 1849 | } | 
|  | 1850 | ; CHECK: atomic128_sub_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1851 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1852 |  | 
|  | 1853 | define void @atomic128_and_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1854 | entry: | 
|  | 1855 | atomicrmw and i128* %a, i128 0 seq_cst | 
|  | 1856 | ret void | 
|  | 1857 | } | 
|  | 1858 | ; CHECK: atomic128_and_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1859 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1860 |  | 
|  | 1861 | define void @atomic128_or_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1862 | entry: | 
|  | 1863 | atomicrmw or i128* %a, i128 0 seq_cst | 
|  | 1864 | ret void | 
|  | 1865 | } | 
|  | 1866 | ; CHECK: atomic128_or_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1867 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1868 |  | 
|  | 1869 | define void @atomic128_xor_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1870 | entry: | 
|  | 1871 | atomicrmw xor i128* %a, i128 0 seq_cst | 
|  | 1872 | ret void | 
|  | 1873 | } | 
|  | 1874 | ; CHECK: atomic128_xor_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1875 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1876 |  | 
| Dmitry Vyukov | a878e74 | 2012-11-27 08:09:25 +0000 | [diff] [blame] | 1877 | define void @atomic128_nand_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1878 | entry: | 
|  | 1879 | atomicrmw nand i128* %a, i128 0 seq_cst | 
|  | 1880 | ret void | 
|  | 1881 | } | 
|  | 1882 | ; CHECK: atomic128_nand_seq_cst | 
|  | 1883 | ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 5) | 
|  | 1884 |  | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1885 | define void @atomic128_cas_monotonic(i128* %a) nounwind uwtable { | 
|  | 1886 | entry: | 
|  | 1887 | cmpxchg i128* %a, i128 0, i128 1 monotonic | 
|  | 1888 | ret void | 
|  | 1889 | } | 
|  | 1890 | ; CHECK: atomic128_cas_monotonic | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1891 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 0, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1892 |  | 
|  | 1893 | define void @atomic128_cas_acquire(i128* %a) nounwind uwtable { | 
|  | 1894 | entry: | 
|  | 1895 | cmpxchg i128* %a, i128 0, i128 1 acquire | 
|  | 1896 | ret void | 
|  | 1897 | } | 
|  | 1898 | ; CHECK: atomic128_cas_acquire | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1899 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 2, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1900 |  | 
|  | 1901 | define void @atomic128_cas_release(i128* %a) nounwind uwtable { | 
|  | 1902 | entry: | 
|  | 1903 | cmpxchg i128* %a, i128 0, i128 1 release | 
|  | 1904 | ret void | 
|  | 1905 | } | 
|  | 1906 | ; CHECK: atomic128_cas_release | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1907 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 3, i32 0) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1908 |  | 
|  | 1909 | define void @atomic128_cas_acq_rel(i128* %a) nounwind uwtable { | 
|  | 1910 | entry: | 
|  | 1911 | cmpxchg i128* %a, i128 0, i128 1 acq_rel | 
|  | 1912 | ret void | 
|  | 1913 | } | 
|  | 1914 | ; CHECK: atomic128_cas_acq_rel | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1915 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 4, i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1916 |  | 
|  | 1917 | define void @atomic128_cas_seq_cst(i128* %a) nounwind uwtable { | 
|  | 1918 | entry: | 
|  | 1919 | cmpxchg i128* %a, i128 0, i128 1 seq_cst | 
|  | 1920 | ret void | 
|  | 1921 | } | 
|  | 1922 | ; CHECK: atomic128_cas_seq_cst | 
| Dmitry Vyukov | 12b5cb9 | 2012-11-26 11:36:19 +0000 | [diff] [blame] | 1923 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 5, i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1924 |  | 
|  | 1925 | define void @atomic_signal_fence_acquire() nounwind uwtable { | 
|  | 1926 | entry: | 
|  | 1927 | fence singlethread acquire | 
|  | 1928 | ret void | 
|  | 1929 | } | 
|  | 1930 | ; CHECK: atomic_signal_fence_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1931 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1932 |  | 
|  | 1933 | define void @atomic_thread_fence_acquire() nounwind uwtable { | 
|  | 1934 | entry: | 
|  | 1935 | fence  acquire | 
|  | 1936 | ret void | 
|  | 1937 | } | 
|  | 1938 | ; CHECK: atomic_thread_fence_acquire | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1939 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 2) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1940 |  | 
|  | 1941 | define void @atomic_signal_fence_release() nounwind uwtable { | 
|  | 1942 | entry: | 
|  | 1943 | fence singlethread release | 
|  | 1944 | ret void | 
|  | 1945 | } | 
|  | 1946 | ; CHECK: atomic_signal_fence_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1947 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1948 |  | 
|  | 1949 | define void @atomic_thread_fence_release() nounwind uwtable { | 
|  | 1950 | entry: | 
|  | 1951 | fence  release | 
|  | 1952 | ret void | 
|  | 1953 | } | 
|  | 1954 | ; CHECK: atomic_thread_fence_release | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1955 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 3) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1956 |  | 
|  | 1957 | define void @atomic_signal_fence_acq_rel() nounwind uwtable { | 
|  | 1958 | entry: | 
|  | 1959 | fence singlethread acq_rel | 
|  | 1960 | ret void | 
|  | 1961 | } | 
|  | 1962 | ; CHECK: atomic_signal_fence_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1963 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1964 |  | 
|  | 1965 | define void @atomic_thread_fence_acq_rel() nounwind uwtable { | 
|  | 1966 | entry: | 
|  | 1967 | fence  acq_rel | 
|  | 1968 | ret void | 
|  | 1969 | } | 
|  | 1970 | ; CHECK: atomic_thread_fence_acq_rel | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1971 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 4) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1972 |  | 
|  | 1973 | define void @atomic_signal_fence_seq_cst() nounwind uwtable { | 
|  | 1974 | entry: | 
|  | 1975 | fence singlethread seq_cst | 
|  | 1976 | ret void | 
|  | 1977 | } | 
|  | 1978 | ; CHECK: atomic_signal_fence_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1979 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 5) | 
| Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1980 |  | 
|  | 1981 | define void @atomic_thread_fence_seq_cst() nounwind uwtable { | 
|  | 1982 | entry: | 
|  | 1983 | fence  seq_cst | 
|  | 1984 | ret void | 
|  | 1985 | } | 
|  | 1986 | ; CHECK: atomic_thread_fence_seq_cst | 
| Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame] | 1987 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 5) |