blob: 1cecd4f1fcec19ab3fffc74425661c656360eaa4 [file] [log] [blame]
mtklein5da116f2014-11-11 06:16:42 -08001#include "Test.h"
2#include "Sk4x.h"
3
4#define ASSERT_EQ(a, b) REPORTER_ASSERT(r, a.equal(b).allTrue())
5#define ASSERT_NE(a, b) REPORTER_ASSERT(r, a.notEqual(b).allTrue())
6
7DEF_TEST(Sk4x_Construction, r) {
8 Sk4f uninitialized;
9 Sk4f zero(0,0,0,0);
10 Sk4f foo(1,2,3,4),
11 bar(foo),
12 baz = bar;
13 ASSERT_EQ(foo, bar);
14 ASSERT_EQ(bar, baz);
15 ASSERT_EQ(baz, foo);
16}
17
mtkleinb1166192014-11-25 11:00:38 -080018struct AlignedFloats {
19 Sk4f forces16ByteAlignment; // On 64-bit machines, the stack starts 128-bit aligned,
20 float fs[5]; // but not necessarily so on 32-bit. Adding an Sk4f forces it.
21};
22
mtklein5da116f2014-11-11 06:16:42 -080023DEF_TEST(Sk4x_LoadStore, r) {
mtkleinb1166192014-11-25 11:00:38 -080024 AlignedFloats aligned;
mtklein5da116f2014-11-11 06:16:42 -080025 // fs will be 16-byte aligned, fs+1 not.
mtkleinb1166192014-11-25 11:00:38 -080026 float* fs = aligned.fs;
27 for (int i = 0; i < 5; i++) { // set to 5,6,7,8,9
28 fs[i] = float(i+5);
29 }
mtklein5da116f2014-11-11 06:16:42 -080030
31 Sk4f foo = Sk4f::Load(fs);
32 Sk4f bar = Sk4f::LoadAligned(fs);
33 ASSERT_EQ(foo, bar);
34
35 foo = Sk4f::Load(fs+1);
36 ASSERT_NE(foo, bar);
37
38 foo.storeAligned(fs);
39 bar.store(fs+1);
40 REPORTER_ASSERT(r, fs[0] == 6 &&
41 fs[1] == 5 &&
42 fs[2] == 6 &&
43 fs[3] == 7 &&
44 fs[4] == 8);
45}
46
47DEF_TEST(Sk4x_Conversions, r) {
48 // Assuming IEEE floats.
49 Sk4f zerof(0,0,0,0);
50 Sk4i zeroi(0,0,0,0);
51 ASSERT_EQ(zeroi, zerof.cast<Sk4i>());
52 ASSERT_EQ(zeroi, zerof.reinterpret<Sk4i>());
mtkleinef099912014-11-12 07:27:01 -080053 ASSERT_EQ(zerof, zeroi.cast<Sk4f>());
54 ASSERT_EQ(zerof, zeroi.reinterpret<Sk4f>());
mtklein5da116f2014-11-11 06:16:42 -080055
56 Sk4f twof(2,2,2,2);
57 Sk4i twoi(2,2,2,2);
58 ASSERT_EQ(twoi, twof.cast<Sk4i>());
59 ASSERT_NE(twoi, twof.reinterpret<Sk4i>());
mtkleinef099912014-11-12 07:27:01 -080060 ASSERT_EQ(twof, twoi.cast<Sk4f>());
61 ASSERT_NE(twof, twoi.reinterpret<Sk4f>());
mtklein5da116f2014-11-11 06:16:42 -080062}
63
64DEF_TEST(Sk4x_Bits, r) {
65 ASSERT_EQ(Sk4i(0,0,0,0).bitNot(), Sk4i(-1,-1,-1,-1));
66
67 Sk4i a(2,3,4,5),
68 b(1,3,5,7);
69 ASSERT_EQ(Sk4i(0,3,4,5), a.bitAnd(b));
70 ASSERT_EQ(Sk4i(3,3,5,7), a.bitOr(b));
71}
72
73DEF_TEST(Sk4x_Arith, r) {
74 ASSERT_EQ(Sk4f(4,6,8,10), Sk4f(1,2,3,4).add(Sk4f(3,4,5,6)));
75 ASSERT_EQ(Sk4f(-2,-2,-2,-2), Sk4f(1,2,3,4).subtract(Sk4f(3,4,5,6)));
76 ASSERT_EQ(Sk4f(3,8,15,24), Sk4f(1,2,3,4).multiply(Sk4f(3,4,5,6)));
77
78 float third = 1.0f/3.0f;
79 ASSERT_EQ(Sk4f(1*third, 0.5f, 0.6f, 2*third), Sk4f(1,2,3,4).divide(Sk4f(3,4,5,6)));
mtkleinef099912014-11-12 07:27:01 -080080 ASSERT_EQ(Sk4i(4,6,8,10), Sk4i(1,2,3,4).add(Sk4i(3,4,5,6)));
81 ASSERT_EQ(Sk4i(-2,-2,-2,-2), Sk4i(1,2,3,4).subtract(Sk4i(3,4,5,6)));
82 ASSERT_EQ(Sk4i(3,8,15,24), Sk4i(1,2,3,4).multiply(Sk4i(3,4,5,6)));
mtklein5da116f2014-11-11 06:16:42 -080083}
84
mtklein27195522015-02-26 12:21:25 -080085DEF_TEST(Sk4x_ImplicitPromotion, r) {
mtkleina27cdef2015-03-06 16:20:22 -080086 ASSERT_EQ(Sk4f(2,4,6,8), Sk4f(1,2,3,4).multiply(Sk4f(2.0f)));
mtklein27195522015-02-26 12:21:25 -080087}
88
mtklein24aa0f02015-02-26 12:48:05 -080089DEF_TEST(Sk4x_Sqrt, r) {
90 Sk4f squares(4, 16, 25, 121),
91 roots(2, 4, 5, 11);
92 // .sqrt() should be pretty precise.
msarettc877a712015-03-04 15:55:54 -080093 Sk4f error = roots.subtract(squares.sqrt());
mtkleina27cdef2015-03-06 16:20:22 -080094 REPORTER_ASSERT(r, error.greaterThanEqual(Sk4f(0.0f)).allTrue());
95 REPORTER_ASSERT(r, error.lessThan(Sk4f(0.000001f)).allTrue());
mtklein24aa0f02015-02-26 12:48:05 -080096
msarettc877a712015-03-04 15:55:54 -080097 // .rsqrt() isn't so precise (for SSE), but should be pretty close.
98 error = roots.subtract(squares.multiply(squares.rsqrt()));
mtkleina27cdef2015-03-06 16:20:22 -080099 REPORTER_ASSERT(r, error.greaterThanEqual(Sk4f(0.0f)).allTrue());
100 REPORTER_ASSERT(r, error.lessThan(Sk4f(0.01f)).allTrue());
mtklein24aa0f02015-02-26 12:48:05 -0800101}
102
mtklein5da116f2014-11-11 06:16:42 -0800103DEF_TEST(Sk4x_Comparison, r) {
104 ASSERT_EQ(Sk4f(1,2,3,4), Sk4f(1,2,3,4));
105 ASSERT_NE(Sk4f(4,3,2,1), Sk4f(1,2,3,4));
106
107 ASSERT_EQ(Sk4i(-1,-1,0,-1), Sk4f(1,2,5,4).equal(Sk4f(1,2,3,4)));
108
109 ASSERT_EQ(Sk4i(-1,-1,-1,-1), Sk4f(1,2,3,4).lessThan(Sk4f(2,3,4,5)));
110 ASSERT_EQ(Sk4i(-1,-1,-1,-1), Sk4f(1,2,3,4).lessThanEqual(Sk4f(2,3,4,5)));
111 ASSERT_EQ(Sk4i(0,0,0,0), Sk4f(1,2,3,4).greaterThan(Sk4f(2,3,4,5)));
112 ASSERT_EQ(Sk4i(0,0,0,0), Sk4f(1,2,3,4).greaterThanEqual(Sk4f(2,3,4,5)));
mtkleinef099912014-11-12 07:27:01 -0800113
114 ASSERT_EQ(Sk4i(1,2,3,4), Sk4i(1,2,3,4));
115 ASSERT_NE(Sk4i(4,3,2,1), Sk4i(1,2,3,4));
116
117 ASSERT_EQ(Sk4i(-1,-1,0,-1), Sk4i(1,2,5,4).equal(Sk4i(1,2,3,4)));
118
119 ASSERT_EQ(Sk4i(-1,-1,-1,-1), Sk4i(1,2,3,4).lessThan(Sk4i(2,3,4,5)));
120 ASSERT_EQ(Sk4i(-1,-1,-1,-1), Sk4i(1,2,3,4).lessThanEqual(Sk4i(2,3,4,5)));
121 ASSERT_EQ(Sk4i(0,0,0,0), Sk4i(1,2,3,4).greaterThan(Sk4i(2,3,4,5)));
122 ASSERT_EQ(Sk4i(0,0,0,0), Sk4i(1,2,3,4).greaterThanEqual(Sk4i(2,3,4,5)));
mtklein5da116f2014-11-11 06:16:42 -0800123}
124
125DEF_TEST(Sk4x_MinMax, r) {
126 ASSERT_EQ(Sk4f(1,2,2,1), Sk4f::Min(Sk4f(1,2,3,4), Sk4f(4,3,2,1)));
127 ASSERT_EQ(Sk4f(4,3,3,4), Sk4f::Max(Sk4f(1,2,3,4), Sk4f(4,3,2,1)));
mtkleinef099912014-11-12 07:27:01 -0800128 ASSERT_EQ(Sk4i(1,2,2,1), Sk4i::Min(Sk4i(1,2,3,4), Sk4i(4,3,2,1)));
129 ASSERT_EQ(Sk4i(4,3,3,4), Sk4i::Max(Sk4i(1,2,3,4), Sk4i(4,3,2,1)));
mtklein5da116f2014-11-11 06:16:42 -0800130}
131
132DEF_TEST(Sk4x_Swizzle, r) {
133 ASSERT_EQ(Sk4f(3,4,1,2), Sk4f(1,2,3,4).zwxy());
134 ASSERT_EQ(Sk4f(1,2,5,6), Sk4f::XYAB(Sk4f(1,2,3,4), Sk4f(5,6,7,8)));
135 ASSERT_EQ(Sk4f(3,4,7,8), Sk4f::ZWCD(Sk4f(1,2,3,4), Sk4f(5,6,7,8)));
mtkleinef099912014-11-12 07:27:01 -0800136 ASSERT_EQ(Sk4i(3,4,1,2), Sk4i(1,2,3,4).zwxy());
137 ASSERT_EQ(Sk4i(1,2,5,6), Sk4i::XYAB(Sk4i(1,2,3,4), Sk4i(5,6,7,8)));
138 ASSERT_EQ(Sk4i(3,4,7,8), Sk4i::ZWCD(Sk4i(1,2,3,4), Sk4i(5,6,7,8)));
mtklein5da116f2014-11-11 06:16:42 -0800139}