blob: c28c31911d24aa83a41fd6fcf4455a31c6059a7b [file] [log] [blame]
Rafael Espindola2a1286e2011-10-26 21:20:26 +00001; RUN: llc < %s -mtriple=i686-linux -segmented-stacks -verify-machineinstrs | FileCheck %s -check-prefix=X32
2; RUN: llc < %s -mtriple=x86_64-linux -segmented-stacks -verify-machineinstrs | FileCheck %s -check-prefix=X64
Rafael Espindolab0bf8932011-08-30 19:51:29 +00003
4; Just to prevent the alloca from being optimized away
5declare void @dummy_use(i32*, i32)
6
7define i32 @test_basic(i32 %l) {
8 %mem = alloca i32, i32 %l
9 call void @dummy_use (i32* %mem, i32 %l)
10 %terminate = icmp eq i32 %l, 0
11 br i1 %terminate, label %true, label %false
12
13true:
14 ret i32 0
15
16false:
17 %newlen = sub i32 %l, 1
18 %retvalue = call i32 @test_basic(i32 %newlen)
19 ret i32 %retvalue
20
21; X32: test_basic:
22
23; X32: leal -12(%esp), %ecx
24; X32-NEXT: cmpl %gs:48, %ecx
25
Sanjoy Das40f82222011-12-03 09:21:07 +000026; X32: pushl $4
Rafael Espindolab0bf8932011-08-30 19:51:29 +000027; X32-NEXT: pushl $12
28; X32-NEXT: calll __morestack
Rafael Espindolab0bf8932011-08-30 19:51:29 +000029; X32-NEXT: ret
30
Rafael Espindola66bf7432011-10-26 21:16:41 +000031; X32: movl %esp, %eax
32; X32-NEXT: subl %ecx, %eax
33; X32-NEXT: cmpl %eax, %gs:48
34
Rafael Espindolab0bf8932011-08-30 19:51:29 +000035; X32: movl %eax, %esp
36
37; X32: subl $12, %esp
38; X32-NEXT: pushl %ecx
39; X32-NEXT: calll __morestack_allocate_stack_space
40; X32-NEXT: addl $16, %esp
41
42; X64: test_basic:
43
44; X64: leaq -24(%rsp), %r11
45; X64-NEXT: cmpq %fs:112, %r11
46
47; X64: movabsq $24, %r10
48; X64-NEXT: movabsq $0, %r11
49; X64-NEXT: callq __morestack
50; X64-NEXT: ret
51
Rafael Espindola66bf7432011-10-26 21:16:41 +000052; X64: movq %rsp, %rdi
53; X64-NEXT: subq %rax, %rdi
54; X64-NEXT: cmpq %rdi, %fs:112
Rafael Espindolab0bf8932011-08-30 19:51:29 +000055
Rafael Espindola66bf7432011-10-26 21:16:41 +000056; X64: movq %rdi, %rsp
Rafael Espindolab0bf8932011-08-30 19:51:29 +000057
Rafael Espindola66bf7432011-10-26 21:16:41 +000058; X64: movq %rax, %rdi
Rafael Espindolab0bf8932011-08-30 19:51:29 +000059; X64-NEXT: callq __morestack_allocate_stack_space
Rafael Espindola66bf7432011-10-26 21:16:41 +000060; X64-NEXT: movq %rax, %rdi
Rafael Espindolab0bf8932011-08-30 19:51:29 +000061
62}
63
64define i32 @test_nested(i32 * nest %closure, i32 %other) {
65 %addend = load i32 * %closure
66 %result = add i32 %other, %addend
67 ret i32 %result
68
69; X32: leal (%esp), %edx
70; X32-NEXT: cmpl %gs:48, %edx
71
72
Sanjoy Das40f82222011-12-03 09:21:07 +000073; X32: pushl $4
Rafael Espindolab0bf8932011-08-30 19:51:29 +000074; X32-NEXT: pushl $0
75; X32-NEXT: calll __morestack
Rafael Espindolab0bf8932011-08-30 19:51:29 +000076; X32-NEXT: ret
77
78; X64: leaq (%rsp), %r11
79; X64-NEXT: cmpq %fs:112, %r11
80
81; X64: movq %r10, %rax
82; X64-NEXT: movabsq $0, %r10
83; X64-NEXT: movabsq $0, %r11
84; X64-NEXT: callq __morestack
Bill Wendling82222c22011-10-13 07:42:32 +000085; X64-NEXT: ret
Rafael Espindolae840e882011-10-26 21:12:27 +000086; X64-NEXT: movq %rax, %r10
Rafael Espindolab0bf8932011-08-30 19:51:29 +000087
88}