blob: 77ee55d3add0e0efa27c706ca789260be2cbaf8f [file] [log] [blame]
Chris Daltona32a3c32017-12-05 10:05:21 -07001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCCClipProcessor.h"
Chris Daltona32a3c32017-12-05 10:05:21 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/gpu/GrTexture.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040011#include "src/gpu/GrTextureProxy.h"
Ben Wagner729a23f2019-05-17 16:29:34 -040012#include "src/gpu/ccpr/GrCCClipPath.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/glsl/GrGLSLFragmentProcessor.h"
14#include "src/gpu/glsl/GrGLSLFragmentShaderBuilder.h"
Chris Daltona32a3c32017-12-05 10:05:21 -070015
Greg Daniele810d832020-02-07 17:20:56 -050016static GrSurfaceProxyView make_view(const GrCaps& caps, GrSurfaceProxy* proxy,
17 bool isCoverageCount) {
18 GrColorType ct = isCoverageCount ? GrColorType::kAlpha_F16 : GrColorType::kAlpha_8;
19 GrSwizzle swizzle = caps.getReadSwizzle(proxy->backendFormat(), ct);
20 return { sk_ref_sp(proxy), GrCCAtlas::kTextureOrigin, swizzle };
21}
22
23GrCCClipProcessor::GrCCClipProcessor(GrSurfaceProxyView view, const GrCCClipPath* clipPath,
24 IsCoverageCount isCoverageCount,
Chris Daltonc3318f02019-07-19 14:20:53 -060025 MustCheckBounds mustCheckBounds)
Chris Dalton383a2ef2018-01-08 17:21:41 -050026 : INHERITED(kGrCCClipProcessor_ClassID, kCompatibleWithCoverageAsAlpha_OptimizationFlag)
Chris Daltona32a3c32017-12-05 10:05:21 -070027 , fClipPath(clipPath)
Chris Daltonc3318f02019-07-19 14:20:53 -060028 , fIsCoverageCount(IsCoverageCount::kYes == isCoverageCount)
29 , fMustCheckBounds(MustCheckBounds::kYes == mustCheckBounds)
Greg Daniele810d832020-02-07 17:20:56 -050030 , fAtlasAccess(std::move(view)) {
31 SkASSERT(fAtlasAccess.view());
Brian Salomonf7dcd762018-07-30 14:48:15 -040032 this->setTextureSamplerCnt(1);
Chris Daltona32a3c32017-12-05 10:05:21 -070033}
34
Greg Daniele810d832020-02-07 17:20:56 -050035GrCCClipProcessor::GrCCClipProcessor(const GrCaps& caps, const GrCCClipPath* clipPath,
36 IsCoverageCount isCoverageCount,
37 MustCheckBounds mustCheckBounds)
38 : GrCCClipProcessor(make_view(caps, clipPath->atlasLazyProxy(),
39 IsCoverageCount::kYes == isCoverageCount),
40 clipPath, isCoverageCount, mustCheckBounds) {
41}
42
Chris Dalton383a2ef2018-01-08 17:21:41 -050043std::unique_ptr<GrFragmentProcessor> GrCCClipProcessor::clone() const {
Mike Kleinf46d5ca2019-12-11 10:45:01 -050044 return std::make_unique<GrCCClipProcessor>(
Greg Daniele810d832020-02-07 17:20:56 -050045 fAtlasAccess.view(), fClipPath, IsCoverageCount(fIsCoverageCount),
46 MustCheckBounds(fMustCheckBounds));
Chris Daltona32a3c32017-12-05 10:05:21 -070047}
48
Chris Dalton383a2ef2018-01-08 17:21:41 -050049void GrCCClipProcessor::onGetGLSLProcessorKey(const GrShaderCaps&, GrProcessorKeyBuilder* b) const {
Chris Daltonc3318f02019-07-19 14:20:53 -060050 const SkPath& clipPath = fClipPath->deviceSpacePath();
51 uint32_t key = (fIsCoverageCount) ? (uint32_t)GrFillRuleForSkPath(clipPath) : 0;
52 key = (key << 1) | ((clipPath.isInverseFillType()) ? 1 : 0);
53 key = (key << 1) | ((fMustCheckBounds) ? 1 : 0);
54 b->add32(key);
Chris Daltona32a3c32017-12-05 10:05:21 -070055}
56
Chris Dalton383a2ef2018-01-08 17:21:41 -050057bool GrCCClipProcessor::onIsEqual(const GrFragmentProcessor& fp) const {
58 const GrCCClipProcessor& that = fp.cast<GrCCClipProcessor>();
Chris Daltona32a3c32017-12-05 10:05:21 -070059 // Each ClipPath path has a unique atlas proxy, so hasSameSamplersAndAccesses should have
60 // already weeded out FPs with different ClipPaths.
61 SkASSERT(that.fClipPath->deviceSpacePath().getGenerationID() ==
62 fClipPath->deviceSpacePath().getGenerationID());
Chris Daltonc3318f02019-07-19 14:20:53 -060063 return that.fClipPath->deviceSpacePath().getFillType() ==
64 fClipPath->deviceSpacePath().getFillType() &&
65 that.fIsCoverageCount == fIsCoverageCount && that.fMustCheckBounds == fMustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -070066}
67
Chris Dalton383a2ef2018-01-08 17:21:41 -050068class GrCCClipProcessor::Impl : public GrGLSLFragmentProcessor {
Chris Daltona32a3c32017-12-05 10:05:21 -070069public:
70 void emitCode(EmitArgs& args) override {
Chris Dalton383a2ef2018-01-08 17:21:41 -050071 const GrCCClipProcessor& proc = args.fFp.cast<GrCCClipProcessor>();
Chris Daltona32a3c32017-12-05 10:05:21 -070072 GrGLSLUniformHandler* uniHandler = args.fUniformHandler;
73 GrGLSLFPFragmentBuilder* f = args.fFragBuilder;
74
75 f->codeAppend ("half coverage;");
Chris Daltonc3318f02019-07-19 14:20:53 -060076
Chris Daltona32a3c32017-12-05 10:05:21 -070077 if (proc.fMustCheckBounds) {
78 const char* pathIBounds;
79 fPathIBoundsUniform = uniHandler->addUniform(kFragment_GrShaderFlag, kFloat4_GrSLType,
80 "path_ibounds", &pathIBounds);
81 f->codeAppendf("if (all(greaterThan(float4(sk_FragCoord.xy, %s.zw), "
82 "float4(%s.xy, sk_FragCoord.xy)))) {",
83 pathIBounds, pathIBounds);
84 }
85
86 const char* atlasTransform;
87 fAtlasTransformUniform = uniHandler->addUniform(kFragment_GrShaderFlag, kFloat4_GrSLType,
88 "atlas_transform", &atlasTransform);
89 f->codeAppendf("float2 texcoord = sk_FragCoord.xy * %s.xy + %s.zw;",
90 atlasTransform, atlasTransform);
91
Chris Daltonc3318f02019-07-19 14:20:53 -060092 f->codeAppend ("coverage = ");
Brian Salomond19cd762020-01-06 13:16:31 -050093 f->appendTextureLookup(args.fTexSamplers[0], "texcoord");
Chris Daltona32a3c32017-12-05 10:05:21 -070094 f->codeAppend (".a;");
95
Chris Daltonc3318f02019-07-19 14:20:53 -060096 if (proc.fIsCoverageCount) {
97 auto fillRule = GrFillRuleForSkPath(proc.fClipPath->deviceSpacePath());
98 if (GrFillRule::kEvenOdd == fillRule) {
99 f->codeAppend ("half t = mod(abs(coverage), 2);");
100 f->codeAppend ("coverage = 1 - abs(t - 1);");
101 } else {
102 SkASSERT(GrFillRule::kNonzero == fillRule);
103 f->codeAppend ("coverage = min(abs(coverage), 1);");
104 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700105 }
106
107 if (proc.fMustCheckBounds) {
108 f->codeAppend ("} else {");
109 f->codeAppend ( "coverage = 0;");
110 f->codeAppend ("}");
111 }
112
Chris Daltonc3318f02019-07-19 14:20:53 -0600113 if (proc.fClipPath->deviceSpacePath().isInverseFillType()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700114 f->codeAppend ("coverage = 1 - coverage;");
115 }
116
117 f->codeAppendf("%s = %s * coverage;", args.fOutputColor, args.fInputColor);
118 }
119
120 void onSetData(const GrGLSLProgramDataManager& pdman,
121 const GrFragmentProcessor& fp) override {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500122 const GrCCClipProcessor& proc = fp.cast<GrCCClipProcessor>();
Chris Daltona32a3c32017-12-05 10:05:21 -0700123 if (proc.fMustCheckBounds) {
124 const SkRect pathIBounds = SkRect::Make(proc.fClipPath->pathDevIBounds());
125 pdman.set4f(fPathIBoundsUniform, pathIBounds.left(), pathIBounds.top(),
126 pathIBounds.right(), pathIBounds.bottom());
127 }
128 const SkVector& scale = proc.fClipPath->atlasScale();
129 const SkVector& trans = proc.fClipPath->atlasTranslate();
130 pdman.set4f(fAtlasTransformUniform, scale.x(), scale.y(), trans.x(), trans.y());
131 }
132
133private:
134 UniformHandle fPathIBoundsUniform;
135 UniformHandle fAtlasTransformUniform;
136};
137
Chris Dalton383a2ef2018-01-08 17:21:41 -0500138GrGLSLFragmentProcessor* GrCCClipProcessor::onCreateGLSLInstance() const {
Chris Daltona32a3c32017-12-05 10:05:21 -0700139 return new Impl();
140}