blob: 681db0094384a352e62d68a4ed696f1529a64193 [file] [log] [blame]
Michael Liao97bf3632012-10-15 22:39:43 +00001; RUN: llc < %s -mtriple=i386-pc-linux -mcpu=corei7 -relocation-model=static | FileCheck --check-prefix=X86 %s
Michael Liao6f720612012-10-17 02:22:27 +00002; RUN: llc < %s -mtriple=i386-pc-linux -mcpu=corei7 -relocation-model=pic | FileCheck --check-prefix=PIC86 %s
3; RUN: llc < %s -mtriple=x86_64-pc-linux -mcpu=corei7 -relocation-model=static | FileCheck --check-prefix=X64 %s
4; RUN: llc < %s -mtriple=x86_64-pc-linux -mcpu=corei7 -relocation-model=pic | FileCheck --check-prefix=PIC64 %s
Michael Liao97bf3632012-10-15 22:39:43 +00005
6@buf = internal global [5 x i8*] zeroinitializer
7
8declare i8* @llvm.frameaddress(i32) nounwind readnone
9
10declare i8* @llvm.stacksave() nounwind
11
12declare i32 @llvm.eh.sjlj.setjmp(i8*) nounwind
13
14declare void @llvm.eh.sjlj.longjmp(i8*) nounwind
15
16define i32 @sj0() nounwind {
17 %fp = tail call i8* @llvm.frameaddress(i32 0)
18 store i8* %fp, i8** getelementptr inbounds ([5 x i8*]* @buf, i64 0, i64 0), align 16
19 %sp = tail call i8* @llvm.stacksave()
20 store i8* %sp, i8** getelementptr inbounds ([5 x i8*]* @buf, i64 0, i64 2), align 16
21 %r = tail call i32 @llvm.eh.sjlj.setjmp(i8* bitcast ([5 x i8*]* @buf to i8*))
22 ret i32 %r
23; X86: sj0
24; x86: movl %ebp, buf
Michael Liao97bf3632012-10-15 22:39:43 +000025; X86: movl %esp, buf+8
Michael Liao6f720612012-10-17 02:22:27 +000026; x86: movl ${{.*LBB.*}}, buf+4
Michael Liao97bf3632012-10-15 22:39:43 +000027; X86: ret
Michael Liao6f720612012-10-17 02:22:27 +000028; PIC86: sj0
29; PIC86: movl %ebp, buf@GOTOFF(%[[GOT:.*]])
30; PIC86: movl %esp, buf@GOTOFF+8(%[[GOT]])
31; PIC86: leal {{.*LBB.*}}@GOTOFF(%[[GOT]]), %[[LREG:.*]]
32; PIC86: movl %[[LREG]], buf@GOTOFF+4
33; PIC86: ret
Michael Liao97bf3632012-10-15 22:39:43 +000034; X64: sj0
35; x64: movq %rbp, buf(%rip)
36; x64: movq ${{.*LBB.*}}, buf+8(%rip)
37; X64: movq %rsp, buf+16(%rip)
38; X64: ret
Michael Liao6f720612012-10-17 02:22:27 +000039; PIC64: sj0
40; PIC64: movq %rbp, buf(%rip)
41; PIC64: movq %rsp, buf+16(%rip)
42; PIC64: leaq {{.*LBB.*}}(%rip), %[[LREG:.*]]
43; PIC64: movq %[[LREG]], buf+8(%rip)
44; PIC64: ret
Michael Liao97bf3632012-10-15 22:39:43 +000045}
46
47define void @lj0() nounwind {
48 tail call void @llvm.eh.sjlj.longjmp(i8* bitcast ([5 x i8*]* @buf to i8*))
49 unreachable
50; X86: lj0
51; X86: movl buf, %ebp
52; X86: movl buf+4, %[[REG32:.*]]
53; X86: movl buf+8, %esp
54; X86: jmpl *%[[REG32]]
55; X64: lj0
56; X64: movq buf(%rip), %rbp
57; X64: movq buf+8(%rip), %[[REG64:.*]]
58; X64: movq buf+16(%rip), %rsp
59; X64: jmpq *%[[REG64]]
60}