blob: 8a090cbd0d3edb49a621715a6d720251f573183d [file] [log] [blame]
Piotr Padlewski6c15ec42015-09-15 18:32:14 +00001; RUN: opt -S -globalopt < %s | FileCheck %s
2
3; This test is hint, what could globalOpt optimize and what it can't
4; FIXME: @tmp and @tmp2 can be safely set to 42
Peter Collingbourne96efdd62016-06-14 21:01:22 +00005; CHECK: @tmp = local_unnamed_addr global i32 0
6; CHECK: @tmp2 = local_unnamed_addr global i32 0
Piotr Padlewski6c15ec42015-09-15 18:32:14 +00007; CHECK: @tmp3 = global i32 0
8
9@tmp = global i32 0
10@tmp2 = global i32 0
11@tmp3 = global i32 0
12@ptrToTmp3 = global i32* null
13
14@llvm.global_ctors = appending global [1 x { i32, void ()* }] [{ i32, void ()* } { i32 65535, void ()* @_GLOBAL__I_a }]
15
16define i32 @TheAnswerToLifeTheUniverseAndEverything() {
17 ret i32 42
18}
19
20define void @_GLOBAL__I_a() {
21enter:
22 call void @_optimizable()
23 call void @_not_optimizable()
24 ret void
25}
26
27define void @_optimizable() {
28enter:
29 %valptr = alloca i32
Piotr Padlewski5b3db452018-07-02 04:49:30 +000030
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000031 %val = call i32 @TheAnswerToLifeTheUniverseAndEverything()
32 store i32 %val, i32* @tmp
33 store i32 %val, i32* %valptr
Piotr Padlewski5b3db452018-07-02 04:49:30 +000034
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000035 %0 = bitcast i32* %valptr to i8*
Piotr Padlewski5dde8092018-05-03 11:03:01 +000036 %barr = call i8* @llvm.launder.invariant.group(i8* %0)
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000037 %1 = bitcast i8* %barr to i32*
Piotr Padlewski5b3db452018-07-02 04:49:30 +000038
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000039 %val2 = load i32, i32* %1
40 store i32 %val2, i32* @tmp2
41 ret void
42}
43
Piotr Padlewski5dde8092018-05-03 11:03:01 +000044; We can't step through launder.invariant.group here, because that would change
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000045; this load in @usage_of_globals()
Piotr Padlewski5b3db452018-07-02 04:49:30 +000046; val = load i32, i32* %ptrVal, !invariant.group !0
47; into
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000048; %val = load i32, i32* @tmp3, !invariant.group !0
Piotr Padlewski5b3db452018-07-02 04:49:30 +000049; and then we could assume that %val and %val2 to be the same, which coud be
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000050; false, because @changeTmp3ValAndCallBarrierInside() may change the value
51; of @tmp3.
52define void @_not_optimizable() {
53enter:
54 store i32 13, i32* @tmp3, !invariant.group !0
Piotr Padlewski5b3db452018-07-02 04:49:30 +000055
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000056 %0 = bitcast i32* @tmp3 to i8*
Piotr Padlewski5dde8092018-05-03 11:03:01 +000057 %barr = call i8* @llvm.launder.invariant.group(i8* %0)
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000058 %1 = bitcast i8* %barr to i32*
Piotr Padlewski5b3db452018-07-02 04:49:30 +000059
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000060 store i32* %1, i32** @ptrToTmp3
61 store i32 42, i32* %1, !invariant.group !0
Piotr Padlewski5b3db452018-07-02 04:49:30 +000062
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000063 ret void
64}
65define void @usage_of_globals() {
66entry:
67 %ptrVal = load i32*, i32** @ptrToTmp3
68 %val = load i32, i32* %ptrVal, !invariant.group !0
Piotr Padlewski5b3db452018-07-02 04:49:30 +000069
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000070 call void @changeTmp3ValAndCallBarrierInside()
71 %val2 = load i32, i32* @tmp3, !invariant.group !0
72 ret void;
73}
74
75declare void @changeTmp3ValAndCallBarrierInside()
76
Piotr Padlewski5dde8092018-05-03 11:03:01 +000077declare i8* @llvm.launder.invariant.group(i8*)
Piotr Padlewski6c15ec42015-09-15 18:32:14 +000078
Piotr Padlewskice358262018-05-18 23:53:46 +000079!0 = !{}