Chad Rosier | 232e29e | 2016-07-06 21:20:47 +0000 | [diff] [blame] | 1 | ; RUN: opt -basicaa -print-memoryssa -verify-memoryssa -analyze < %s 2>&1 | FileCheck %s |
Geoff Berry | b96d3b2 | 2016-06-01 21:30:40 +0000 | [diff] [blame] | 2 | ; RUN: opt -aa-pipeline=basic-aa -passes='print<memoryssa>,verify<memoryssa>' -disable-output < %s 2>&1 | FileCheck %s |
George Burgess IV | e1100f5 | 2016-02-02 22:46:49 +0000 | [diff] [blame] | 3 | ; |
| 4 | ; Ensures that atomic loads count as MemoryDefs |
| 5 | |
George Burgess IV | 82e355c | 2016-08-03 19:39:54 +0000 | [diff] [blame] | 6 | ; CHECK-LABEL: define i32 @foo |
George Burgess IV | e1100f5 | 2016-02-02 22:46:49 +0000 | [diff] [blame] | 7 | define i32 @foo(i32* %a, i32* %b) { |
| 8 | ; CHECK: 1 = MemoryDef(liveOnEntry) |
| 9 | ; CHECK-NEXT: store i32 4 |
| 10 | store i32 4, i32* %a, align 4 |
| 11 | ; CHECK: 2 = MemoryDef(1) |
| 12 | ; CHECK-NEXT: %1 = load atomic i32 |
| 13 | %1 = load atomic i32, i32* %b acquire, align 4 |
| 14 | ; CHECK: MemoryUse(2) |
| 15 | ; CHECK-NEXT: %2 = load i32 |
| 16 | %2 = load i32, i32* %a, align 4 |
| 17 | %3 = add i32 %1, %2 |
| 18 | ret i32 %3 |
| 19 | } |
George Burgess IV | 82e355c | 2016-08-03 19:39:54 +0000 | [diff] [blame] | 20 | |
| 21 | ; CHECK-LABEL: define void @bar |
| 22 | define void @bar(i32* %a) { |
| 23 | ; CHECK: MemoryUse(liveOnEntry) |
| 24 | ; CHECK-NEXT: load atomic i32, i32* %a unordered, align 4 |
| 25 | load atomic i32, i32* %a unordered, align 4 |
| 26 | ; CHECK: 1 = MemoryDef(liveOnEntry) |
| 27 | ; CHECK-NEXT: load atomic i32, i32* %a monotonic, align 4 |
| 28 | load atomic i32, i32* %a monotonic, align 4 |
| 29 | ; CHECK: 2 = MemoryDef(1) |
| 30 | ; CHECK-NEXT: load atomic i32, i32* %a acquire, align 4 |
| 31 | load atomic i32, i32* %a acquire, align 4 |
| 32 | ; CHECK: 3 = MemoryDef(2) |
| 33 | ; CHECK-NEXT: load atomic i32, i32* %a seq_cst, align 4 |
| 34 | load atomic i32, i32* %a seq_cst, align 4 |
| 35 | ret void |
| 36 | } |
| 37 | |
| 38 | ; CHECK-LABEL: define void @baz |
| 39 | define void @baz(i32* %a) { |
| 40 | ; CHECK: 1 = MemoryDef(liveOnEntry) |
| 41 | ; CHECK-NEXT: %1 = load atomic i32 |
| 42 | %1 = load atomic i32, i32* %a acquire, align 4 |
| 43 | ; CHECK: MemoryUse(1) |
| 44 | ; CHECK-NEXT: %2 = load atomic i32, i32* %a unordered, align 4 |
| 45 | %2 = load atomic i32, i32* %a unordered, align 4 |
| 46 | ; CHECK: 2 = MemoryDef(1) |
| 47 | ; CHECK-NEXT: %3 = load atomic i32, i32* %a monotonic, align 4 |
| 48 | %3 = load atomic i32, i32* %a monotonic, align 4 |
| 49 | ret void |
| 50 | } |
| 51 | |
| 52 | ; CHECK-LABEL: define void @fences |
| 53 | define void @fences(i32* %a) { |
| 54 | ; CHECK: 1 = MemoryDef(liveOnEntry) |
| 55 | ; CHECK-NEXT: fence acquire |
| 56 | fence acquire |
| 57 | ; CHECK: MemoryUse(1) |
| 58 | ; CHECK-NEXT: %1 = load i32, i32* %a |
| 59 | %1 = load i32, i32* %a |
| 60 | |
| 61 | ; CHECK: 2 = MemoryDef(1) |
| 62 | ; CHECK-NEXT: fence release |
| 63 | fence release |
| 64 | ; CHECK: MemoryUse(2) |
| 65 | ; CHECK-NEXT: %2 = load i32, i32* %a |
| 66 | %2 = load i32, i32* %a |
| 67 | |
| 68 | ; CHECK: 3 = MemoryDef(2) |
| 69 | ; CHECK-NEXT: fence acq_rel |
| 70 | fence acq_rel |
| 71 | ; CHECK: MemoryUse(3) |
| 72 | ; CHECK-NEXT: %3 = load i32, i32* %a |
| 73 | %3 = load i32, i32* %a |
| 74 | |
| 75 | ; CHECK: 4 = MemoryDef(3) |
| 76 | ; CHECK-NEXT: fence seq_cst |
| 77 | fence seq_cst |
| 78 | ; CHECK: MemoryUse(4) |
| 79 | ; CHECK-NEXT: %4 = load i32, i32* %a |
| 80 | %4 = load i32, i32* %a |
| 81 | ret void |
| 82 | } |
| 83 | |
| 84 | ; CHECK-LABEL: define void @seq_cst_clobber |
| 85 | define void @seq_cst_clobber(i32* noalias %a, i32* noalias %b) { |
| 86 | ; CHECK: 1 = MemoryDef(liveOnEntry) |
| 87 | ; CHECK-NEXT: %1 = load atomic i32, i32* %a monotonic, align 4 |
| 88 | load atomic i32, i32* %a monotonic, align 4 |
| 89 | |
| 90 | ; CHECK: 2 = MemoryDef(1) |
| 91 | ; CHECK-NEXT: %2 = load atomic i32, i32* %a seq_cst, align 4 |
| 92 | load atomic i32, i32* %a seq_cst, align 4 |
| 93 | |
| 94 | ; CHECK: 3 = MemoryDef(2) |
| 95 | ; CHECK-NEXT: load atomic i32, i32* %a monotonic, align 4 |
| 96 | load atomic i32, i32* %a monotonic, align 4 |
| 97 | |
| 98 | ret void |
| 99 | } |
| 100 | |
| 101 | ; Ensure that AA hands us MRI_Mod on unreorderable atomic ops. |
| 102 | ; |
| 103 | ; This test is a bit implementation-specific. In particular, it depends on that |
| 104 | ; we pass cmpxchg-load queries to AA, without trying to reason about them on |
| 105 | ; our own. |
| 106 | ; |
| 107 | ; If AA gets more aggressive, we can find another way. |
| 108 | ; |
| 109 | ; CHECK-LABEL: define void @check_aa_is_sane |
| 110 | define void @check_aa_is_sane(i32* noalias %a, i32* noalias %b) { |
| 111 | ; CHECK: 1 = MemoryDef(liveOnEntry) |
| 112 | ; CHECK-NEXT: cmpxchg i32* %a, i32 0, i32 1 acquire acquire |
| 113 | cmpxchg i32* %a, i32 0, i32 1 acquire acquire |
| 114 | ; CHECK: MemoryUse(1) |
| 115 | ; CHECK-NEXT: load i32, i32* %b, align 4 |
| 116 | load i32, i32* %b, align 4 |
| 117 | |
| 118 | ret void |
| 119 | } |