blob: 62b12732ded42c73052ddcbdf83491853071afbe [file] [log] [blame]
Andrew Trick4f0794f2013-11-15 05:52:56 +00001; RUN: llc < %s -mtriple=x86_64-apple-darwin -mcpu=corei7 -disable-fp-elim | FileCheck %s
Andrew Trick153ebe62013-10-31 22:11:56 +00002
3; Trivial patchpoint codegen
4;
Andrew Trick153ebe62013-10-31 22:11:56 +00005define i64 @trivial_patchpoint_codegen(i64 %p1, i64 %p2, i64 %p3, i64 %p4) {
6entry:
Juergen Ributzka87ed9062013-11-09 01:51:33 +00007; CHECK-LABEL: trivial_patchpoint_codegen:
8; CHECK: movabsq $-559038736, %r11
9; CHECK-NEXT: callq *%r11
Juergen Ributzka17e0d9e2013-12-04 00:39:08 +000010; CHECK-NEXT: xchgw %ax, %ax
Andrew Trick153ebe62013-10-31 22:11:56 +000011; CHECK: movq %rax, %[[REG:r.+]]
Juergen Ributzka87ed9062013-11-09 01:51:33 +000012; CHECK: callq *%r11
Juergen Ributzka17e0d9e2013-12-04 00:39:08 +000013; CHECK-NEXT: xchgw %ax, %ax
Andrew Trick153ebe62013-10-31 22:11:56 +000014; CHECK: movq %[[REG]], %rax
15; CHECK: ret
16 %resolveCall2 = inttoptr i64 -559038736 to i8*
Andrew Tricke8cba372013-12-13 18:37:10 +000017 %result = tail call i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 2, i32 15, i8* %resolveCall2, i32 4, i64 %p1, i64 %p2, i64 %p3, i64 %p4)
Andrew Trick153ebe62013-10-31 22:11:56 +000018 %resolveCall3 = inttoptr i64 -559038737 to i8*
Andrew Tricke8cba372013-12-13 18:37:10 +000019 tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 3, i32 15, i8* %resolveCall3, i32 2, i64 %p1, i64 %result)
Andrew Trick153ebe62013-10-31 22:11:56 +000020 ret i64 %result
21}
22
23; Caller frame metadata with stackmaps. This should not be optimized
24; as a leaf function.
25;
Andrew Trickf9904112013-10-31 22:46:51 +000026; CHECK-LABEL: caller_meta_leaf
Andrew Trick4f0794f2013-11-15 05:52:56 +000027; CHECK: subq $32, %rsp
Andrew Trick153ebe62013-10-31 22:11:56 +000028; CHECK: Ltmp
Andrew Trick4f0794f2013-11-15 05:52:56 +000029; CHECK: addq $32, %rsp
Andrew Trick153ebe62013-10-31 22:11:56 +000030; CHECK: ret
31define void @caller_meta_leaf() {
32entry:
33 %metadata = alloca i64, i32 3, align 8
34 store i64 11, i64* %metadata
35 store i64 12, i64* %metadata
36 store i64 13, i64* %metadata
Andrew Tricke8cba372013-12-13 18:37:10 +000037 call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 4, i32 0, i64* %metadata)
Andrew Trick153ebe62013-10-31 22:11:56 +000038 ret void
39}
40
Andrew Tricka3a11de2013-10-31 22:12:01 +000041; Test the webkit_jscc calling convention.
Juergen Ributzkab1612c12013-12-16 19:53:31 +000042; One argument will be passed in register, the other will be pushed on the stack.
Andrew Tricka3a11de2013-10-31 22:12:01 +000043; Return value in $rax.
44define void @jscall_patchpoint_codegen(i64 %p1, i64 %p2, i64 %p3, i64 %p4) {
45entry:
Andrew Trickf9904112013-10-31 22:46:51 +000046; CHECK-LABEL: jscall_patchpoint_codegen:
Juergen Ributzka87ed9062013-11-09 01:51:33 +000047; CHECK: Ltmp
Juergen Ributzka87ed9062013-11-09 01:51:33 +000048; CHECK: movq %r{{.+}}, (%rsp)
Juergen Ributzka9ed985b2013-12-16 22:05:32 +000049; CHECK: movq %r{{.+}}, %rax
Juergen Ributzka87ed9062013-11-09 01:51:33 +000050; CHECK: Ltmp
51; CHECK-NEXT: movabsq $-559038736, %r11
52; CHECK-NEXT: callq *%r11
Juergen Ributzkab1612c12013-12-16 19:53:31 +000053; CHECK: movq %rax, (%rsp)
Juergen Ributzka87ed9062013-11-09 01:51:33 +000054; CHECK: callq
Andrew Tricka3a11de2013-10-31 22:12:01 +000055 %resolveCall2 = inttoptr i64 -559038736 to i8*
Juergen Ributzkab1612c12013-12-16 19:53:31 +000056 %result = tail call webkit_jscc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 5, i32 15, i8* %resolveCall2, i32 2, i64 %p4, i64 %p2)
Andrew Tricka3a11de2013-10-31 22:12:01 +000057 %resolveCall3 = inttoptr i64 -559038737 to i8*
Juergen Ributzkab1612c12013-12-16 19:53:31 +000058 tail call webkit_jscc void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 6, i32 15, i8* %resolveCall3, i32 2, i64 %p4, i64 %result)
Andrew Tricka3a11de2013-10-31 22:12:01 +000059 ret void
60}
Andrew Trick153ebe62013-10-31 22:11:56 +000061
Juergen Ributzka9ed985b2013-12-16 22:05:32 +000062; Test if the arguments are properly aligned and that we don't store undef arguments.
63define i64 @jscall_patchpoint_codegen2(i64 %callee) {
64entry:
65; CHECK-LABEL: jscall_patchpoint_codegen2:
66; CHECK: Ltmp
67; CHECK: movq $6, 24(%rsp)
68; CHECK-NEXT: movl $4, 16(%rsp)
69; CHECK-NEXT: movq $2, (%rsp)
70; CHECK: Ltmp
71; CHECK-NEXT: movabsq $-559038736, %r11
72; CHECK-NEXT: callq *%r11
73 %call = inttoptr i64 -559038736 to i8*
74 %result = call webkit_jscc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 7, i32 15, i8* %call, i32 6, i64 %callee, i64 2, i64 undef, i32 4, i32 undef, i64 6)
75 ret i64 %result
76}
77
78; Test if the arguments are properly aligned and that we don't store undef arguments.
79define i64 @jscall_patchpoint_codegen3(i64 %callee) {
80entry:
81; CHECK-LABEL: jscall_patchpoint_codegen3:
82; CHECK: Ltmp
83; CHECK: movq $10, 48(%rsp)
84; CHECK-NEXT: movl $8, 36(%rsp)
85; CHECK-NEXT: movq $6, 24(%rsp)
86; CHECK-NEXT: movl $4, 16(%rsp)
87; CHECK-NEXT: movq $2, (%rsp)
88; CHECK: Ltmp
89; CHECK-NEXT: movabsq $-559038736, %r11
90; CHECK-NEXT: callq *%r11
91 %call = inttoptr i64 -559038736 to i8*
92 %result = call webkit_jscc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 7, i32 15, i8* %call, i32 10, i64 %callee, i64 2, i64 undef, i32 4, i32 undef, i64 6, i32 undef, i32 8, i32 undef, i64 10)
93 ret i64 %result
94}
95
Andrew Trick6664df12013-11-05 22:44:04 +000096; Test patchpoints reusing the same TargetConstant.
97; <rdar:15390785> Assertion failed: (CI.getNumArgOperands() >= NumArgs + 4)
98; There is no way to verify this, since it depends on memory allocation.
99; But I think it's useful to include as a working example.
100define i64 @testLowerConstant(i64 %arg, i64 %tmp2, i64 %tmp10, i64* %tmp33, i64 %tmp79) {
101entry:
102 %tmp80 = add i64 %tmp79, -16
103 %tmp81 = inttoptr i64 %tmp80 to i64*
104 %tmp82 = load i64* %tmp81, align 8
Andrew Tricke8cba372013-12-13 18:37:10 +0000105 tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 14, i32 5, i64 %arg, i64 %tmp2, i64 %tmp10, i64 %tmp82)
106 tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 15, i32 30, i8* null, i32 3, i64 %arg, i64 %tmp10, i64 %tmp82)
Andrew Trick6664df12013-11-05 22:44:04 +0000107 %tmp83 = load i64* %tmp33, align 8
108 %tmp84 = add i64 %tmp83, -24
109 %tmp85 = inttoptr i64 %tmp84 to i64*
110 %tmp86 = load i64* %tmp85, align 8
Andrew Tricke8cba372013-12-13 18:37:10 +0000111 tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 17, i32 5, i64 %arg, i64 %tmp10, i64 %tmp86)
112 tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 18, i32 30, i8* null, i32 3, i64 %arg, i64 %tmp10, i64 %tmp86)
Andrew Trick6664df12013-11-05 22:44:04 +0000113 ret i64 10
114}
115
Andrew Trick561f2212013-11-14 06:54:10 +0000116; Test small patchpoints that don't emit calls.
117define void @small_patchpoint_codegen(i64 %p1, i64 %p2, i64 %p3, i64 %p4) {
118entry:
119; CHECK-LABEL: small_patchpoint_codegen:
120; CHECK: Ltmp
Juergen Ributzka17e0d9e2013-12-04 00:39:08 +0000121; CHECK: nopl 8(%rax,%rax)
Andrew Trick561f2212013-11-14 06:54:10 +0000122; CHECK-NEXT: popq
123; CHECK-NEXT: ret
Andrew Tricke8cba372013-12-13 18:37:10 +0000124 %result = tail call i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 5, i32 5, i8* null, i32 2, i64 %p1, i64 %p2)
Andrew Trick561f2212013-11-14 06:54:10 +0000125 ret void
126}
127
Andrew Tricke8cba372013-12-13 18:37:10 +0000128declare void @llvm.experimental.stackmap(i64, i32, ...)
129declare void @llvm.experimental.patchpoint.void(i64, i32, i8*, i32, ...)
130declare i64 @llvm.experimental.patchpoint.i64(i64, i32, i8*, i32, ...)