blob: cd3c31d9a780c2fe7a14e3039942008e5e87950d [file] [log] [blame]
mike@reedtribe.orge51755f2011-12-10 19:36:56 +00001#include "SkArithmeticMode.h"
2#include "SkColorPriv.h"
3#include "SkUnPreMultiply.h"
4
5class SkArithmeticMode_scalar : public SkXfermode {
6public:
7 SkArithmeticMode_scalar(SkScalar k1, SkScalar k2, SkScalar k3, SkScalar k4) {
8 fK[0] = k1;
9 fK[1] = k2;
10 fK[2] = k3;
11 fK[3] = k4;
12 }
13
14 virtual void xfer32(SkPMColor dst[], const SkPMColor src[], int count,
15 const SkAlpha aa[]) SK_OVERRIDE;
mike@reedtribe.orge51755f2011-12-10 19:36:56 +000016
djsollen@google.comba28d032012-03-26 17:57:35 +000017 SK_DECLARE_UNFLATTENABLE_OBJECT()
mike@reedtribe.orge51755f2011-12-10 19:36:56 +000018
19private:
20 SkScalar fK[4];
21};
22
mike@reedtribe.orge51755f2011-12-10 19:36:56 +000023static int pinToByte(int value) {
24 if (value < 0) {
25 value = 0;
26 } else if (value > 255) {
27 value = 255;
28 }
29 return value;
30}
31
32static int arith(SkScalar k1, SkScalar k2, SkScalar k3, SkScalar k4,
33 int src, int dst) {
34 SkScalar result = SkScalarMul(k1, src * dst) +
35 SkScalarMul(k2, src) +
36 SkScalarMul(k3, dst) +
37 k4;
38 int res = SkScalarRoundToInt(result);
39 return pinToByte(res);
40}
41
42static int blend(int src, int dst, int scale) {
43 return dst + ((src - dst) * scale >> 8);
44}
45
46static bool needsUnpremul(int alpha) {
47 return 0 != alpha && 0xFF != alpha;
48}
49
50void SkArithmeticMode_scalar::xfer32(SkPMColor dst[], const SkPMColor src[],
51 int count, const SkAlpha aaCoverage[]) {
52 SkScalar k1 = fK[0] / 255;
53 SkScalar k2 = fK[1];
54 SkScalar k3 = fK[2];
55 SkScalar k4 = fK[3] * 255;
56
57 for (int i = 0; i < count; ++i) {
58 if ((NULL == aaCoverage) || aaCoverage[i]) {
59 SkPMColor sc = src[i];
60 SkPMColor dc = dst[i];
61 int sa = SkGetPackedA32(sc);
62 int da = SkGetPackedA32(dc);
63
64 int srcNeedsUnpremul = needsUnpremul(sa);
65 int dstNeedsUnpremul = needsUnpremul(sa);
66
67 int a, r, g, b;
68
tomhudson@google.com5efe0cb2012-04-10 19:14:48 +000069 if (!srcNeedsUnpremul && !dstNeedsUnpremul) {
mike@reedtribe.orge51755f2011-12-10 19:36:56 +000070 a = arith(k1, k2, k3, k4, sa, sa);
71 r = arith(k1, k2, k3, k4, SkGetPackedR32(sc), SkGetPackedR32(dc));
72 g = arith(k1, k2, k3, k4, SkGetPackedG32(sc), SkGetPackedG32(dc));
73 b = arith(k1, k2, k3, k4, SkGetPackedB32(sc), SkGetPackedB32(dc));
74 } else {
75 int sr = SkGetPackedR32(sc);
76 int sg = SkGetPackedG32(sc);
77 int sb = SkGetPackedB32(sc);
78 if (srcNeedsUnpremul) {
79 SkUnPreMultiply::Scale scale = SkUnPreMultiply::GetScale(sa);
80 sr = SkUnPreMultiply::ApplyScale(scale, sr);
81 sg = SkUnPreMultiply::ApplyScale(scale, sg);
82 sb = SkUnPreMultiply::ApplyScale(scale, sb);
83 }
84
85 int dr = SkGetPackedR32(dc);
86 int dg = SkGetPackedG32(dc);
87 int db = SkGetPackedB32(dc);
88 if (dstNeedsUnpremul) {
89 SkUnPreMultiply::Scale scale = SkUnPreMultiply::GetScale(da);
90 dr = SkUnPreMultiply::ApplyScale(scale, dr);
91 dg = SkUnPreMultiply::ApplyScale(scale, dg);
92 db = SkUnPreMultiply::ApplyScale(scale, db);
93 }
94
95 a = arith(k1, k2, k3, k4, sa, sa);
96 r = arith(k1, k2, k3, k4, sr, dr);
97 g = arith(k1, k2, k3, k4, sg, dg);
98 b = arith(k1, k2, k3, k4, sb, db);
99 }
100
101 // apply antialias coverage if necessary
102 if (aaCoverage && 0xFF != aaCoverage[i]) {
103 int scale = aaCoverage[i] + (aaCoverage[i] >> 7);
104 a = blend(a, SkGetPackedA32(sc), scale);
105 r = blend(r, SkGetPackedR32(sc), scale);
106 g = blend(g, SkGetPackedG32(sc), scale);
107 b = blend(b, SkGetPackedB32(sc), scale);
108 }
109
mike@reedtribe.orgd51ea262012-08-12 18:50:34 +0000110 dst[i] = SkPremultiplyARGBInline(a, r, g, b);
mike@reedtribe.orge51755f2011-12-10 19:36:56 +0000111 }
112 }
113}
114
115
116///////////////////////////////////////////////////////////////////////////////
117
118static bool fitsInBits(SkScalar x, int bits) {
119#ifdef SK_SCALAR_IS_FIXED
120 x = SkAbs32(x);
121 x += 1 << 7;
122 x >>= 8;
123 return x < (1 << (bits - 1));
124#else
125 return SkScalarAbs(x) < (1 << (bits - 1));
126#endif
127}
128
caryclark@google.com383d5d42012-06-06 12:09:18 +0000129#if 0 // UNUSED
mike@reedtribe.orge51755f2011-12-10 19:36:56 +0000130static int32_t toDot8(SkScalar x) {
131#ifdef SK_SCALAR_IS_FIXED
132 x += 1 << 7;
133 x >>= 8;
134 return x;
135#else
136 return (int32_t)(x * 256);
137#endif
138}
caryclark@google.com383d5d42012-06-06 12:09:18 +0000139#endif
mike@reedtribe.orge51755f2011-12-10 19:36:56 +0000140
141SkXfermode* SkArithmeticMode::Create(SkScalar k1, SkScalar k2,
142 SkScalar k3, SkScalar k4) {
143 if (fitsInBits(k1, 8) && fitsInBits(k2, 16) &&
144 fitsInBits(k2, 16) && fitsInBits(k2, 24)) {
145
caryclark@google.com383d5d42012-06-06 12:09:18 +0000146#if 0 // UNUSED
mike@reedtribe.orge51755f2011-12-10 19:36:56 +0000147 int32_t i1 = toDot8(k1);
148 int32_t i2 = toDot8(k2);
149 int32_t i3 = toDot8(k3);
150 int32_t i4 = toDot8(k4);
mike@reedtribe.orge51755f2011-12-10 19:36:56 +0000151 if (i1) {
152 return SkNEW_ARGS(SkArithmeticMode_quad, (i1, i2, i3, i4));
153 }
154 if (0 == i2) {
155 return SkNEW_ARGS(SkArithmeticMode_dst, (i3, i4));
156 }
157 if (0 == i3) {
158 return SkNEW_ARGS(SkArithmeticMode_src, (i2, i4));
159 }
160 return SkNEW_ARGS(SkArithmeticMode_linear, (i2, i3, i4));
161#endif
162 }
163 return SkNEW_ARGS(SkArithmeticMode_scalar, (k1, k2, k3, k4));
164}
165