blob: 4f529c119f050f2be9921b2d658fcdc2c4ae8a6a [file] [log] [blame]
Rafael Espindola2a1286e2011-10-26 21:20:26 +00001; RUN: llc < %s -mtriple=i686-linux -segmented-stacks -verify-machineinstrs | FileCheck %s -check-prefix=X32
2; RUN: llc < %s -mtriple=x86_64-linux -segmented-stacks -verify-machineinstrs | FileCheck %s -check-prefix=X64
Rafael Espindolab0bf8932011-08-30 19:51:29 +00003
4; Just to prevent the alloca from being optimized away
5declare void @dummy_use(i32*, i32)
6
7define i32 @test_basic(i32 %l) {
8 %mem = alloca i32, i32 %l
9 call void @dummy_use (i32* %mem, i32 %l)
10 %terminate = icmp eq i32 %l, 0
11 br i1 %terminate, label %true, label %false
12
13true:
14 ret i32 0
15
16false:
17 %newlen = sub i32 %l, 1
18 %retvalue = call i32 @test_basic(i32 %newlen)
19 ret i32 %retvalue
20
21; X32: test_basic:
22
Sanjoy Das199ce332011-12-03 09:32:07 +000023; X32: cmpl %gs:48, %esp
Rafael Espindolab0bf8932011-08-30 19:51:29 +000024
Sanjoy Das40f82222011-12-03 09:21:07 +000025; X32: pushl $4
Rafael Espindolab0bf8932011-08-30 19:51:29 +000026; X32-NEXT: pushl $12
27; X32-NEXT: calll __morestack
Rafael Espindolab0bf8932011-08-30 19:51:29 +000028; X32-NEXT: ret
29
Rafael Espindola66bf7432011-10-26 21:16:41 +000030; X32: movl %esp, %eax
31; X32-NEXT: subl %ecx, %eax
32; X32-NEXT: cmpl %eax, %gs:48
33
Rafael Espindolab0bf8932011-08-30 19:51:29 +000034; X32: movl %eax, %esp
35
36; X32: subl $12, %esp
37; X32-NEXT: pushl %ecx
38; X32-NEXT: calll __morestack_allocate_stack_space
39; X32-NEXT: addl $16, %esp
40
41; X64: test_basic:
42
Sanjoy Das199ce332011-12-03 09:32:07 +000043; X64: cmpq %fs:112, %rsp
Rafael Espindolab0bf8932011-08-30 19:51:29 +000044
45; X64: movabsq $24, %r10
46; X64-NEXT: movabsq $0, %r11
47; X64-NEXT: callq __morestack
48; X64-NEXT: ret
49
Rafael Espindola66bf7432011-10-26 21:16:41 +000050; X64: movq %rsp, %rdi
51; X64-NEXT: subq %rax, %rdi
52; X64-NEXT: cmpq %rdi, %fs:112
Rafael Espindolab0bf8932011-08-30 19:51:29 +000053
Rafael Espindola66bf7432011-10-26 21:16:41 +000054; X64: movq %rdi, %rsp
Rafael Espindolab0bf8932011-08-30 19:51:29 +000055
Rafael Espindola66bf7432011-10-26 21:16:41 +000056; X64: movq %rax, %rdi
Rafael Espindolab0bf8932011-08-30 19:51:29 +000057; X64-NEXT: callq __morestack_allocate_stack_space
Rafael Espindola66bf7432011-10-26 21:16:41 +000058; X64-NEXT: movq %rax, %rdi
Rafael Espindolab0bf8932011-08-30 19:51:29 +000059
60}
61
62define i32 @test_nested(i32 * nest %closure, i32 %other) {
63 %addend = load i32 * %closure
64 %result = add i32 %other, %addend
65 ret i32 %result
66
Sanjoy Das199ce332011-12-03 09:32:07 +000067; X32: cmpl %gs:48, %esp
Rafael Espindolab0bf8932011-08-30 19:51:29 +000068
Sanjoy Das40f82222011-12-03 09:21:07 +000069; X32: pushl $4
Rafael Espindolab0bf8932011-08-30 19:51:29 +000070; X32-NEXT: pushl $0
71; X32-NEXT: calll __morestack
Rafael Espindolab0bf8932011-08-30 19:51:29 +000072; X32-NEXT: ret
73
Sanjoy Das199ce332011-12-03 09:32:07 +000074; X64: cmpq %fs:112, %rsp
Rafael Espindolab0bf8932011-08-30 19:51:29 +000075
76; X64: movq %r10, %rax
77; X64-NEXT: movabsq $0, %r10
78; X64-NEXT: movabsq $0, %r11
79; X64-NEXT: callq __morestack
Bill Wendling82222c22011-10-13 07:42:32 +000080; X64-NEXT: ret
Rafael Espindolae840e882011-10-26 21:12:27 +000081; X64-NEXT: movq %rax, %r10
Rafael Espindolab0bf8932011-08-30 19:51:29 +000082
83}
Sanjoy Das199ce332011-12-03 09:32:07 +000084
85define void @test_large() {
86 %mem = alloca i32, i32 10000
87 call void @dummy_use (i32* %mem, i32 0)
88 ret void
89
90; X32: leal -40012(%esp), %ecx
91; X32-NEXT: cmpl %gs:48, %ecx
92
93; X32: pushl $0
94; X32-NEXT: pushl $40012
95; X32-NEXT: calll __morestack
96; X32-NEXT: ret
97
98; X64: leaq -40008(%rsp), %r11
99; X64-NEXT: cmpq %fs:112, %r11
100
101; X64: movabsq $40008, %r10
102; X64-NEXT: movabsq $0, %r11
103; X64-NEXT: callq __morestack
104; X64-NEXT: ret
105
106}