blob: fdb3a009ff0ae5de422f21194658b6350ec8289c [file] [log] [blame]
Piotr Padlewski5dde8092018-05-03 11:03:01 +00001; RUN: opt -S -early-cse < %s | FileCheck %s
2; RUN: opt -S -gvn < %s | FileCheck %s
3; RUN: opt -S -newgvn < %s | FileCheck %s
4; RUN: opt -S -O3 < %s | FileCheck %s
5
6; These tests checks if passes with CSE functionality can do CSE on
7; launder.invariant.group, that is prohibited if there is a memory clobber
8; between barriers call.
9
10; CHECK-LABEL: define i8 @optimizable()
11define i8 @optimizable() {
12entry:
13 %ptr = alloca i8
14 store i8 42, i8* %ptr, !invariant.group !0
15; CHECK: call i8* @llvm.launder.invariant.group.p0i8
16 %ptr2 = call i8* @llvm.launder.invariant.group.p0i8(i8* %ptr)
17; FIXME: This one could be CSE
18; CHECK: call i8* @llvm.launder.invariant.group
19 %ptr3 = call i8* @llvm.launder.invariant.group.p0i8(i8* %ptr)
20; CHECK: call void @clobber(i8* {{.*}}%ptr)
21 call void @clobber(i8* %ptr)
22
23; CHECK: call void @use(i8* {{.*}}%ptr2)
24 call void @use(i8* %ptr2)
25; CHECK: call void @use(i8* {{.*}}%ptr3)
26 call void @use(i8* %ptr3)
27; CHECK: load i8, i8* %ptr3, {{.*}}!invariant.group
28 %v = load i8, i8* %ptr3, !invariant.group !0
29
30 ret i8 %v
31}
32
33; CHECK-LABEL: define i8 @unoptimizable()
34define i8 @unoptimizable() {
35entry:
36 %ptr = alloca i8
37 store i8 42, i8* %ptr, !invariant.group !0
38; CHECK: call i8* @llvm.launder.invariant.group.p0i8
39 %ptr2 = call i8* @llvm.launder.invariant.group.p0i8(i8* %ptr)
40 call void @clobber(i8* %ptr)
41; CHECK: call i8* @llvm.launder.invariant.group.p0i8
42 %ptr3 = call i8* @llvm.launder.invariant.group.p0i8(i8* %ptr)
43; CHECK: call void @clobber(i8* {{.*}}%ptr)
44 call void @clobber(i8* %ptr)
45; CHECK: call void @use(i8* {{.*}}%ptr2)
46 call void @use(i8* %ptr2)
47; CHECK: call void @use(i8* {{.*}}%ptr3)
48 call void @use(i8* %ptr3)
49; CHECK: load i8, i8* %ptr3, {{.*}}!invariant.group
50 %v = load i8, i8* %ptr3, !invariant.group !0
51
52 ret i8 %v
53}
54
55; CHECK-LABEL: define i8 @unoptimizable2()
56define i8 @unoptimizable2() {
57 %ptr = alloca i8
58 store i8 42, i8* %ptr, !invariant.group !0
59; CHECK: call i8* @llvm.launder.invariant.group
60 %ptr2 = call i8* @llvm.launder.invariant.group.p0i8(i8* %ptr)
61 store i8 43, i8* %ptr
62; CHECK: call i8* @llvm.launder.invariant.group
63 %ptr3 = call i8* @llvm.launder.invariant.group.p0i8(i8* %ptr)
64; CHECK: call void @clobber(i8* {{.*}}%ptr)
65 call void @clobber(i8* %ptr)
66; CHECK: call void @use(i8* {{.*}}%ptr2)
67 call void @use(i8* %ptr2)
68; CHECK: call void @use(i8* {{.*}}%ptr3)
69 call void @use(i8* %ptr3)
70; CHECK: load i8, i8* %ptr3, {{.*}}!invariant.group
71 %v = load i8, i8* %ptr3, !invariant.group !0
72 ret i8 %v
73}
74
75; This test check if optimizer is not proving equality based on mustalias
76; CHECK-LABEL: define void @dontProveEquality(i8* %a)
77define void @dontProveEquality(i8* %a) {
78 %b = call i8* @llvm.launder.invariant.group.p0i8(i8* %a)
79 %r = icmp eq i8* %b, %a
Piotr Padlewski5b3db452018-07-02 04:49:30 +000080; CHECK: call void @useBool(i1 %r)
Piotr Padlewski5dde8092018-05-03 11:03:01 +000081 call void @useBool(i1 %r)
Piotr Padlewski5b3db452018-07-02 04:49:30 +000082
83 %b2 = call i8* @llvm.strip.invariant.group.p0i8(i8* %a)
84 %r2 = icmp eq i8* %b2, %a
85; CHECK: call void @useBool(i1 %r2)
86 call void @useBool(i1 %r2)
87
Piotr Padlewski5dde8092018-05-03 11:03:01 +000088 ret void
89}
90
91declare void @use(i8* readonly)
92declare void @useBool(i1)
93
94declare void @clobber(i8*)
95; CHECK: Function Attrs: inaccessiblememonly nounwind speculatable{{$}}
96; CHECK-NEXT: declare i8* @llvm.launder.invariant.group.p0i8(i8*)
97declare i8* @llvm.launder.invariant.group.p0i8(i8*)
98
Piotr Padlewski5b3db452018-07-02 04:49:30 +000099; CHECK: Function Attrs: nounwind readnone speculatable{{$}}
100; CHECK-NEXT: declare i8* @llvm.strip.invariant.group.p0i8(i8*)
101declare i8* @llvm.strip.invariant.group.p0i8(i8*)
Piotr Padlewski5dde8092018-05-03 11:03:01 +0000102
Piotr Padlewski5b3db452018-07-02 04:49:30 +0000103
104!0 = !{}