Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 1 | ; RUN: opt -S -globalopt < %s | FileCheck %s |
| 2 | |
| 3 | ; This test is hint, what could globalOpt optimize and what it can't |
| 4 | ; FIXME: @tmp and @tmp2 can be safely set to 42 |
Peter Collingbourne | 96efdd6 | 2016-06-14 21:01:22 +0000 | [diff] [blame] | 5 | ; CHECK: @tmp = local_unnamed_addr global i32 0 |
| 6 | ; CHECK: @tmp2 = local_unnamed_addr global i32 0 |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 7 | ; CHECK: @tmp3 = global i32 0 |
| 8 | |
| 9 | @tmp = global i32 0 |
| 10 | @tmp2 = global i32 0 |
| 11 | @tmp3 = global i32 0 |
| 12 | @ptrToTmp3 = global i32* null |
| 13 | |
| 14 | @llvm.global_ctors = appending global [1 x { i32, void ()* }] [{ i32, void ()* } { i32 65535, void ()* @_GLOBAL__I_a }] |
| 15 | |
| 16 | define i32 @TheAnswerToLifeTheUniverseAndEverything() { |
| 17 | ret i32 42 |
| 18 | } |
| 19 | |
| 20 | define void @_GLOBAL__I_a() { |
| 21 | enter: |
| 22 | call void @_optimizable() |
| 23 | call void @_not_optimizable() |
| 24 | ret void |
| 25 | } |
| 26 | |
| 27 | define void @_optimizable() { |
| 28 | enter: |
| 29 | %valptr = alloca i32 |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 30 | |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 31 | %val = call i32 @TheAnswerToLifeTheUniverseAndEverything() |
| 32 | store i32 %val, i32* @tmp |
| 33 | store i32 %val, i32* %valptr |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 34 | |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 35 | %0 = bitcast i32* %valptr to i8* |
Piotr Padlewski | 5dde809 | 2018-05-03 11:03:01 +0000 | [diff] [blame] | 36 | %barr = call i8* @llvm.launder.invariant.group(i8* %0) |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 37 | %1 = bitcast i8* %barr to i32* |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 38 | |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 39 | %val2 = load i32, i32* %1 |
| 40 | store i32 %val2, i32* @tmp2 |
| 41 | ret void |
| 42 | } |
| 43 | |
Piotr Padlewski | 5dde809 | 2018-05-03 11:03:01 +0000 | [diff] [blame] | 44 | ; We can't step through launder.invariant.group here, because that would change |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 45 | ; this load in @usage_of_globals() |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 46 | ; val = load i32, i32* %ptrVal, !invariant.group !0 |
| 47 | ; into |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 48 | ; %val = load i32, i32* @tmp3, !invariant.group !0 |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 49 | ; and then we could assume that %val and %val2 to be the same, which coud be |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 50 | ; false, because @changeTmp3ValAndCallBarrierInside() may change the value |
| 51 | ; of @tmp3. |
| 52 | define void @_not_optimizable() { |
| 53 | enter: |
| 54 | store i32 13, i32* @tmp3, !invariant.group !0 |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 55 | |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 56 | %0 = bitcast i32* @tmp3 to i8* |
Piotr Padlewski | 5dde809 | 2018-05-03 11:03:01 +0000 | [diff] [blame] | 57 | %barr = call i8* @llvm.launder.invariant.group(i8* %0) |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 58 | %1 = bitcast i8* %barr to i32* |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 59 | |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 60 | store i32* %1, i32** @ptrToTmp3 |
| 61 | store i32 42, i32* %1, !invariant.group !0 |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 62 | |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 63 | ret void |
| 64 | } |
| 65 | define void @usage_of_globals() { |
| 66 | entry: |
| 67 | %ptrVal = load i32*, i32** @ptrToTmp3 |
| 68 | %val = load i32, i32* %ptrVal, !invariant.group !0 |
Piotr Padlewski | 5b3db45 | 2018-07-02 04:49:30 +0000 | [diff] [blame] | 69 | |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 70 | call void @changeTmp3ValAndCallBarrierInside() |
| 71 | %val2 = load i32, i32* @tmp3, !invariant.group !0 |
| 72 | ret void; |
| 73 | } |
| 74 | |
| 75 | declare void @changeTmp3ValAndCallBarrierInside() |
| 76 | |
Piotr Padlewski | 5dde809 | 2018-05-03 11:03:01 +0000 | [diff] [blame] | 77 | declare i8* @llvm.launder.invariant.group(i8*) |
Piotr Padlewski | 6c15ec4 | 2015-09-15 18:32:14 +0000 | [diff] [blame] | 78 | |
Piotr Padlewski | ce35826 | 2018-05-18 23:53:46 +0000 | [diff] [blame] | 79 | !0 = !{} |