Jakob Stoklund Olesen | 78b9f8f | 2012-09-13 16:52:17 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s | FileCheck %s |
Chris Lattner | 22afea7 | 2012-06-01 05:03:31 +0000 | [diff] [blame] | 2 | target datalayout = "e-p:64:64:64-S128-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f16:16:16-f32:32:32-f64:64:64-f128:128:128-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64" |
| 3 | target triple = "x86_64-apple-darwin11.4.0" |
| 4 | |
| 5 | declare i64 @testi() |
| 6 | |
| 7 | define i64 @test_trivial() { |
| 8 | %A = tail call i64 @testi() |
| 9 | ret i64 %A |
| 10 | } |
| 11 | ; CHECK: test_trivial: |
| 12 | ; CHECK: jmp _testi ## TAILCALL |
| 13 | |
| 14 | |
| 15 | define i64 @test_noop_bitcast() { |
| 16 | %A = tail call i64 @testi() |
| 17 | %B = bitcast i64 %A to i64 |
| 18 | ret i64 %B |
| 19 | } |
| 20 | ; CHECK: test_noop_bitcast: |
| 21 | ; CHECK: jmp _testi ## TAILCALL |
Chris Lattner | 182fe3e | 2012-06-01 05:16:33 +0000 | [diff] [blame] | 22 | |
| 23 | |
| 24 | ; Tail call shouldn't be blocked by no-op inttoptr. |
| 25 | define i8* @test_inttoptr() { |
| 26 | %A = tail call i64 @testi() |
| 27 | %B = inttoptr i64 %A to i8* |
| 28 | ret i8* %B |
| 29 | } |
| 30 | |
| 31 | ; CHECK: test_inttoptr: |
| 32 | ; CHECK: jmp _testi ## TAILCALL |
| 33 | |
| 34 | |
| 35 | declare <4 x float> @testv() |
| 36 | |
| 37 | define <4 x i32> @test_vectorbitcast() { |
| 38 | %A = tail call <4 x float> @testv() |
| 39 | %B = bitcast <4 x float> %A to <4 x i32> |
| 40 | ret <4 x i32> %B |
| 41 | } |
| 42 | ; CHECK: test_vectorbitcast: |
| 43 | ; CHECK: jmp _testv ## TAILCALL |
Chris Lattner | 466076b | 2012-06-01 05:29:15 +0000 | [diff] [blame] | 44 | |
| 45 | |
| 46 | declare { i64, i64 } @testp() |
| 47 | |
| 48 | define {i64, i64} @test_pair_trivial() { |
| 49 | %A = tail call { i64, i64} @testp() |
| 50 | ret { i64, i64} %A |
| 51 | } |
| 52 | ; CHECK: test_pair_trivial: |
| 53 | ; CHECK: jmp _testp ## TAILCALL |
| 54 | |
| 55 | |
| 56 | |
| 57 | define {i64, i64} @test_pair_trivial_extract() { |
| 58 | %A = tail call { i64, i64} @testp() |
| 59 | %x = extractvalue { i64, i64} %A, 0 |
| 60 | %y = extractvalue { i64, i64} %A, 1 |
| 61 | |
| 62 | %b = insertvalue {i64, i64} undef, i64 %x, 0 |
| 63 | %c = insertvalue {i64, i64} %b, i64 %y, 1 |
| 64 | |
| 65 | ret { i64, i64} %c |
| 66 | } |
| 67 | |
| 68 | ; CHECK: test_pair_trivial_extract: |
| 69 | ; CHECK: jmp _testp ## TAILCALL |
| 70 | |
| 71 | define {i8*, i64} @test_pair_conv_extract() { |
| 72 | %A = tail call { i64, i64} @testp() |
| 73 | %x = extractvalue { i64, i64} %A, 0 |
| 74 | %y = extractvalue { i64, i64} %A, 1 |
| 75 | |
| 76 | %x1 = inttoptr i64 %x to i8* |
| 77 | |
| 78 | %b = insertvalue {i8*, i64} undef, i8* %x1, 0 |
| 79 | %c = insertvalue {i8*, i64} %b, i64 %y, 1 |
| 80 | |
| 81 | ret { i8*, i64} %c |
| 82 | } |
| 83 | |
| 84 | ; CHECK: test_pair_conv_extract: |
| 85 | ; CHECK: jmp _testp ## TAILCALL |
| 86 | |
| 87 | |
| 88 | |
Chris Lattner | b135989 | 2012-06-01 18:19:46 +0000 | [diff] [blame] | 89 | ; PR13006 |
| 90 | define { i64, i64 } @crash(i8* %this) { |
| 91 | %c = tail call { i64, i64 } @testp() |
| 92 | %mrv7 = insertvalue { i64, i64 } %c, i64 undef, 1 |
| 93 | ret { i64, i64 } %mrv7 |
| 94 | } |
| 95 | |
Jakob Stoklund Olesen | 3cf3ffc | 2012-09-13 18:31:27 +0000 | [diff] [blame] | 96 | ; Check that we can fold an indexed load into a tail call instruction. |
| 97 | ; CHECK: fold_indexed_load |
| 98 | ; CHECK: leaq (%rsi,%rsi,4), %[[RAX:r..]] |
Jakob Stoklund Olesen | 32a56fa | 2012-09-13 19:47:45 +0000 | [diff] [blame] | 99 | ; CHECK: jmpq *16(%{{r..}},%[[RAX]],8) # TAILCALL |
Jakob Stoklund Olesen | 3cf3ffc | 2012-09-13 18:31:27 +0000 | [diff] [blame] | 100 | %struct.funcs = type { i32 (i8*, i32*, i32)*, i32 (i8*)*, i32 (i8*)*, i32 (i8*, i32)*, i32 } |
| 101 | @func_table = external global [0 x %struct.funcs] |
| 102 | define void @fold_indexed_load(i8* %mbstr, i64 %idxprom) nounwind uwtable ssp { |
| 103 | entry: |
| 104 | %dsplen = getelementptr inbounds [0 x %struct.funcs]* @func_table, i64 0, i64 %idxprom, i32 2 |
| 105 | %x1 = load i32 (i8*)** %dsplen, align 8 |
| 106 | %call = tail call i32 %x1(i8* %mbstr) nounwind |
| 107 | ret void |
| 108 | } |
Chris Lattner | 466076b | 2012-06-01 05:29:15 +0000 | [diff] [blame] | 109 | |
Jakob Stoklund Olesen | 3cf3ffc | 2012-09-13 18:31:27 +0000 | [diff] [blame] | 110 | ; <rdar://problem/12282281> Fold an indexed load into the tail call instruction. |
| 111 | ; Calling a varargs function with 6 arguments requires 7 registers (%al is the |
| 112 | ; vector count for varargs functions). This leaves %r11 as the only available |
| 113 | ; scratch register. |
| 114 | ; |
| 115 | ; It is not possible to fold an indexed load into TCRETURNmi64 in that case. |
| 116 | ; |
| 117 | ; typedef int (*funcptr)(void*, ...); |
| 118 | ; extern const funcptr funcs[]; |
| 119 | ; int f(int n) { |
| 120 | ; return funcs[n](0, 0, 0, 0, 0, 0); |
| 121 | ; } |
| 122 | ; |
| 123 | ; CHECK: rdar12282281 |
| 124 | ; CHECK: jmpq *%r11 # TAILCALL |
| 125 | @funcs = external constant [0 x i32 (i8*, ...)*] |
| 126 | |
| 127 | define i32 @rdar12282281(i32 %n) nounwind uwtable ssp { |
| 128 | entry: |
| 129 | %idxprom = sext i32 %n to i64 |
| 130 | %arrayidx = getelementptr inbounds [0 x i32 (i8*, ...)*]* @funcs, i64 0, i64 %idxprom |
| 131 | %0 = load i32 (i8*, ...)** %arrayidx, align 8 |
| 132 | %call = tail call i32 (i8*, ...)* %0(i8* null, i32 0, i32 0, i32 0, i32 0, i32 0) nounwind |
| 133 | ret i32 %call |
| 134 | } |