blob: 8ca0b12b547f8041a5c06ca2662a34bec28f8ca4 [file] [log] [blame]
Charles Davis5dfa2672010-02-19 18:17:13 +00001; RUN: llc < %s -relocation-model=static -realign-stack=1 -mcpu=yonah | FileCheck %s
Chris Lattner1329cb82008-01-26 19:45:50 +00002
3; The double argument is at 4(esp) which is 16-byte aligned, allowing us to
4; fold the load into the andpd.
5
6target datalayout = "e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-f32:32:32-f64:32:64-v64:64:64-v128:128:128-a0:0:64-f80:128:128"
7target triple = "i686-apple-darwin8"
8@G = external global double
9
10define void @test({ double, double }* byval %z, double* %P) {
11entry:
Chris Lattner1329cb82008-01-26 19:45:50 +000012 %tmp3 = load double* @G, align 16 ; <double> [#uses=1]
13 %tmp4 = tail call double @fabs( double %tmp3 ) ; <double> [#uses=1]
Dan Gohman584fedf2010-06-21 22:17:20 +000014 volatile store double %tmp4, double* %P
15 %tmp = getelementptr { double, double }* %z, i32 0, i32 0 ; <double*> [#uses=1]
16 %tmp1 = volatile load double* %tmp, align 8 ; <double> [#uses=1]
17 %tmp2 = tail call double @fabs( double %tmp1 ) ; <double> [#uses=1]
18 ; CHECK: andpd{{.*}}4(%esp), %xmm
Dan Gohmanae3a0be2009-06-04 22:49:04 +000019 %tmp6 = fadd double %tmp4, %tmp2 ; <double> [#uses=1]
Dan Gohman584fedf2010-06-21 22:17:20 +000020 volatile store double %tmp6, double* %P, align 8
Chris Lattner1329cb82008-01-26 19:45:50 +000021 ret void
22}
23
Charles Davis5dfa2672010-02-19 18:17:13 +000024define void @test2() alignstack(16) {
25entry:
26 ; CHECK: andl{{.*}}$-16, %esp
27 ret void
28}
29
30; Use a call to force a spill.
31define <2 x double> @test3(<2 x double> %x, <2 x double> %y) alignstack(32) {
32entry:
33 ; CHECK: andl{{.*}}$-32, %esp
34 call void @test2()
Dan Gohmand4d01152010-05-03 22:36:46 +000035 %A = fmul <2 x double> %x, %y
Charles Davis5dfa2672010-02-19 18:17:13 +000036 ret <2 x double> %A
37}
38
Chris Lattner1329cb82008-01-26 19:45:50 +000039declare double @fabs(double)
Charles Davis5dfa2672010-02-19 18:17:13 +000040