Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/ccpr/GrCCClipProcessor.h" |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 9 | |
Greg Daniel | 456f9b5 | 2020-03-05 19:14:18 +0000 | [diff] [blame] | 10 | #include "src/gpu/GrTexture.h" |
Greg Daniel | f91aeb2 | 2019-06-18 09:58:02 -0400 | [diff] [blame] | 11 | #include "src/gpu/GrTextureProxy.h" |
Ben Wagner | 729a23f | 2019-05-17 16:29:34 -0400 | [diff] [blame] | 12 | #include "src/gpu/ccpr/GrCCClipPath.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 13 | #include "src/gpu/glsl/GrGLSLFragmentProcessor.h" |
| 14 | #include "src/gpu/glsl/GrGLSLFragmentShaderBuilder.h" |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 15 | |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame] | 16 | static GrSurfaceProxyView make_view(const GrCaps& caps, GrSurfaceProxy* proxy, |
| 17 | bool isCoverageCount) { |
| 18 | GrColorType ct = isCoverageCount ? GrColorType::kAlpha_F16 : GrColorType::kAlpha_8; |
| 19 | GrSwizzle swizzle = caps.getReadSwizzle(proxy->backendFormat(), ct); |
| 20 | return { sk_ref_sp(proxy), GrCCAtlas::kTextureOrigin, swizzle }; |
| 21 | } |
| 22 | |
| 23 | GrCCClipProcessor::GrCCClipProcessor(GrSurfaceProxyView view, const GrCCClipPath* clipPath, |
| 24 | IsCoverageCount isCoverageCount, |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 25 | MustCheckBounds mustCheckBounds) |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 26 | : INHERITED(kGrCCClipProcessor_ClassID, kCompatibleWithCoverageAsAlpha_OptimizationFlag) |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 27 | , fClipPath(clipPath) |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 28 | , fIsCoverageCount(IsCoverageCount::kYes == isCoverageCount) |
| 29 | , fMustCheckBounds(MustCheckBounds::kYes == mustCheckBounds) |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame] | 30 | , fAtlasAccess(std::move(view)) { |
| 31 | SkASSERT(fAtlasAccess.view()); |
Brian Salomon | f7dcd76 | 2018-07-30 14:48:15 -0400 | [diff] [blame] | 32 | this->setTextureSamplerCnt(1); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 33 | } |
| 34 | |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame] | 35 | GrCCClipProcessor::GrCCClipProcessor(const GrCaps& caps, const GrCCClipPath* clipPath, |
| 36 | IsCoverageCount isCoverageCount, |
| 37 | MustCheckBounds mustCheckBounds) |
| 38 | : GrCCClipProcessor(make_view(caps, clipPath->atlasLazyProxy(), |
| 39 | IsCoverageCount::kYes == isCoverageCount), |
| 40 | clipPath, isCoverageCount, mustCheckBounds) { |
| 41 | } |
| 42 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 43 | std::unique_ptr<GrFragmentProcessor> GrCCClipProcessor::clone() const { |
Mike Klein | f46d5ca | 2019-12-11 10:45:01 -0500 | [diff] [blame] | 44 | return std::make_unique<GrCCClipProcessor>( |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame] | 45 | fAtlasAccess.view(), fClipPath, IsCoverageCount(fIsCoverageCount), |
| 46 | MustCheckBounds(fMustCheckBounds)); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 47 | } |
| 48 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 49 | void GrCCClipProcessor::onGetGLSLProcessorKey(const GrShaderCaps&, GrProcessorKeyBuilder* b) const { |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 50 | const SkPath& clipPath = fClipPath->deviceSpacePath(); |
| 51 | uint32_t key = (fIsCoverageCount) ? (uint32_t)GrFillRuleForSkPath(clipPath) : 0; |
| 52 | key = (key << 1) | ((clipPath.isInverseFillType()) ? 1 : 0); |
| 53 | key = (key << 1) | ((fMustCheckBounds) ? 1 : 0); |
| 54 | b->add32(key); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 55 | } |
| 56 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 57 | bool GrCCClipProcessor::onIsEqual(const GrFragmentProcessor& fp) const { |
| 58 | const GrCCClipProcessor& that = fp.cast<GrCCClipProcessor>(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 59 | // Each ClipPath path has a unique atlas proxy, so hasSameSamplersAndAccesses should have |
| 60 | // already weeded out FPs with different ClipPaths. |
| 61 | SkASSERT(that.fClipPath->deviceSpacePath().getGenerationID() == |
| 62 | fClipPath->deviceSpacePath().getGenerationID()); |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 63 | return that.fClipPath->deviceSpacePath().getFillType() == |
| 64 | fClipPath->deviceSpacePath().getFillType() && |
| 65 | that.fIsCoverageCount == fIsCoverageCount && that.fMustCheckBounds == fMustCheckBounds; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 66 | } |
| 67 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 68 | class GrCCClipProcessor::Impl : public GrGLSLFragmentProcessor { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 69 | public: |
| 70 | void emitCode(EmitArgs& args) override { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 71 | const GrCCClipProcessor& proc = args.fFp.cast<GrCCClipProcessor>(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 72 | GrGLSLUniformHandler* uniHandler = args.fUniformHandler; |
| 73 | GrGLSLFPFragmentBuilder* f = args.fFragBuilder; |
| 74 | |
| 75 | f->codeAppend ("half coverage;"); |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 76 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 77 | if (proc.fMustCheckBounds) { |
| 78 | const char* pathIBounds; |
Ethan Nicholas | 16464c3 | 2020-04-06 13:53:05 -0400 | [diff] [blame] | 79 | fPathIBoundsUniform = uniHandler->addUniform(&proc, kFragment_GrShaderFlag, |
| 80 | kFloat4_GrSLType, "path_ibounds", |
| 81 | &pathIBounds); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 82 | f->codeAppendf("if (all(greaterThan(float4(sk_FragCoord.xy, %s.zw), " |
| 83 | "float4(%s.xy, sk_FragCoord.xy)))) {", |
| 84 | pathIBounds, pathIBounds); |
| 85 | } |
| 86 | |
| 87 | const char* atlasTransform; |
Ethan Nicholas | 16464c3 | 2020-04-06 13:53:05 -0400 | [diff] [blame] | 88 | fAtlasTransformUniform = uniHandler->addUniform(&proc, kFragment_GrShaderFlag, |
| 89 | kFloat4_GrSLType, "atlas_transform", |
| 90 | &atlasTransform); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 91 | f->codeAppendf("float2 texcoord = sk_FragCoord.xy * %s.xy + %s.zw;", |
| 92 | atlasTransform, atlasTransform); |
| 93 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 94 | f->codeAppend ("coverage = "); |
Brian Salomon | d19cd76 | 2020-01-06 13:16:31 -0500 | [diff] [blame] | 95 | f->appendTextureLookup(args.fTexSamplers[0], "texcoord"); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 96 | f->codeAppend (".a;"); |
| 97 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 98 | if (proc.fIsCoverageCount) { |
| 99 | auto fillRule = GrFillRuleForSkPath(proc.fClipPath->deviceSpacePath()); |
| 100 | if (GrFillRule::kEvenOdd == fillRule) { |
| 101 | f->codeAppend ("half t = mod(abs(coverage), 2);"); |
| 102 | f->codeAppend ("coverage = 1 - abs(t - 1);"); |
| 103 | } else { |
| 104 | SkASSERT(GrFillRule::kNonzero == fillRule); |
| 105 | f->codeAppend ("coverage = min(abs(coverage), 1);"); |
| 106 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 107 | } |
| 108 | |
| 109 | if (proc.fMustCheckBounds) { |
| 110 | f->codeAppend ("} else {"); |
| 111 | f->codeAppend ( "coverage = 0;"); |
| 112 | f->codeAppend ("}"); |
| 113 | } |
| 114 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 115 | if (proc.fClipPath->deviceSpacePath().isInverseFillType()) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 116 | f->codeAppend ("coverage = 1 - coverage;"); |
| 117 | } |
| 118 | |
| 119 | f->codeAppendf("%s = %s * coverage;", args.fOutputColor, args.fInputColor); |
| 120 | } |
| 121 | |
| 122 | void onSetData(const GrGLSLProgramDataManager& pdman, |
| 123 | const GrFragmentProcessor& fp) override { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 124 | const GrCCClipProcessor& proc = fp.cast<GrCCClipProcessor>(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 125 | if (proc.fMustCheckBounds) { |
| 126 | const SkRect pathIBounds = SkRect::Make(proc.fClipPath->pathDevIBounds()); |
| 127 | pdman.set4f(fPathIBoundsUniform, pathIBounds.left(), pathIBounds.top(), |
| 128 | pathIBounds.right(), pathIBounds.bottom()); |
| 129 | } |
| 130 | const SkVector& scale = proc.fClipPath->atlasScale(); |
| 131 | const SkVector& trans = proc.fClipPath->atlasTranslate(); |
| 132 | pdman.set4f(fAtlasTransformUniform, scale.x(), scale.y(), trans.x(), trans.y()); |
| 133 | } |
| 134 | |
| 135 | private: |
| 136 | UniformHandle fPathIBoundsUniform; |
| 137 | UniformHandle fAtlasTransformUniform; |
| 138 | }; |
| 139 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 140 | GrGLSLFragmentProcessor* GrCCClipProcessor::onCreateGLSLInstance() const { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 141 | return new Impl(); |
| 142 | } |