Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/ccpr/GrCCClipProcessor.h" |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 9 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 10 | #include "include/gpu/GrTexture.h" |
Greg Daniel | f91aeb2 | 2019-06-18 09:58:02 -0400 | [diff] [blame] | 11 | #include "src/gpu/GrTextureProxy.h" |
Ben Wagner | 729a23f | 2019-05-17 16:29:34 -0400 | [diff] [blame] | 12 | #include "src/gpu/ccpr/GrCCClipPath.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 13 | #include "src/gpu/glsl/GrGLSLFragmentProcessor.h" |
| 14 | #include "src/gpu/glsl/GrGLSLFragmentShaderBuilder.h" |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 15 | |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame^] | 16 | static GrSurfaceProxyView make_view(const GrCaps& caps, GrSurfaceProxy* proxy, |
| 17 | bool isCoverageCount) { |
| 18 | GrColorType ct = isCoverageCount ? GrColorType::kAlpha_F16 : GrColorType::kAlpha_8; |
| 19 | GrSwizzle swizzle = caps.getReadSwizzle(proxy->backendFormat(), ct); |
| 20 | return { sk_ref_sp(proxy), GrCCAtlas::kTextureOrigin, swizzle }; |
| 21 | } |
| 22 | |
| 23 | GrCCClipProcessor::GrCCClipProcessor(GrSurfaceProxyView view, const GrCCClipPath* clipPath, |
| 24 | IsCoverageCount isCoverageCount, |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 25 | MustCheckBounds mustCheckBounds) |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 26 | : INHERITED(kGrCCClipProcessor_ClassID, kCompatibleWithCoverageAsAlpha_OptimizationFlag) |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 27 | , fClipPath(clipPath) |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 28 | , fIsCoverageCount(IsCoverageCount::kYes == isCoverageCount) |
| 29 | , fMustCheckBounds(MustCheckBounds::kYes == mustCheckBounds) |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame^] | 30 | , fAtlasAccess(std::move(view)) { |
| 31 | SkASSERT(fAtlasAccess.view()); |
Brian Salomon | f7dcd76 | 2018-07-30 14:48:15 -0400 | [diff] [blame] | 32 | this->setTextureSamplerCnt(1); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 33 | } |
| 34 | |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame^] | 35 | GrCCClipProcessor::GrCCClipProcessor(const GrCaps& caps, const GrCCClipPath* clipPath, |
| 36 | IsCoverageCount isCoverageCount, |
| 37 | MustCheckBounds mustCheckBounds) |
| 38 | : GrCCClipProcessor(make_view(caps, clipPath->atlasLazyProxy(), |
| 39 | IsCoverageCount::kYes == isCoverageCount), |
| 40 | clipPath, isCoverageCount, mustCheckBounds) { |
| 41 | } |
| 42 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 43 | std::unique_ptr<GrFragmentProcessor> GrCCClipProcessor::clone() const { |
Mike Klein | f46d5ca | 2019-12-11 10:45:01 -0500 | [diff] [blame] | 44 | return std::make_unique<GrCCClipProcessor>( |
Greg Daniel | e810d83 | 2020-02-07 17:20:56 -0500 | [diff] [blame^] | 45 | fAtlasAccess.view(), fClipPath, IsCoverageCount(fIsCoverageCount), |
| 46 | MustCheckBounds(fMustCheckBounds)); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 47 | } |
| 48 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 49 | void GrCCClipProcessor::onGetGLSLProcessorKey(const GrShaderCaps&, GrProcessorKeyBuilder* b) const { |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 50 | const SkPath& clipPath = fClipPath->deviceSpacePath(); |
| 51 | uint32_t key = (fIsCoverageCount) ? (uint32_t)GrFillRuleForSkPath(clipPath) : 0; |
| 52 | key = (key << 1) | ((clipPath.isInverseFillType()) ? 1 : 0); |
| 53 | key = (key << 1) | ((fMustCheckBounds) ? 1 : 0); |
| 54 | b->add32(key); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 55 | } |
| 56 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 57 | bool GrCCClipProcessor::onIsEqual(const GrFragmentProcessor& fp) const { |
| 58 | const GrCCClipProcessor& that = fp.cast<GrCCClipProcessor>(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 59 | // Each ClipPath path has a unique atlas proxy, so hasSameSamplersAndAccesses should have |
| 60 | // already weeded out FPs with different ClipPaths. |
| 61 | SkASSERT(that.fClipPath->deviceSpacePath().getGenerationID() == |
| 62 | fClipPath->deviceSpacePath().getGenerationID()); |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 63 | return that.fClipPath->deviceSpacePath().getFillType() == |
| 64 | fClipPath->deviceSpacePath().getFillType() && |
| 65 | that.fIsCoverageCount == fIsCoverageCount && that.fMustCheckBounds == fMustCheckBounds; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 66 | } |
| 67 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 68 | class GrCCClipProcessor::Impl : public GrGLSLFragmentProcessor { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 69 | public: |
| 70 | void emitCode(EmitArgs& args) override { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 71 | const GrCCClipProcessor& proc = args.fFp.cast<GrCCClipProcessor>(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 72 | GrGLSLUniformHandler* uniHandler = args.fUniformHandler; |
| 73 | GrGLSLFPFragmentBuilder* f = args.fFragBuilder; |
| 74 | |
| 75 | f->codeAppend ("half coverage;"); |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 76 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 77 | if (proc.fMustCheckBounds) { |
| 78 | const char* pathIBounds; |
| 79 | fPathIBoundsUniform = uniHandler->addUniform(kFragment_GrShaderFlag, kFloat4_GrSLType, |
| 80 | "path_ibounds", &pathIBounds); |
| 81 | f->codeAppendf("if (all(greaterThan(float4(sk_FragCoord.xy, %s.zw), " |
| 82 | "float4(%s.xy, sk_FragCoord.xy)))) {", |
| 83 | pathIBounds, pathIBounds); |
| 84 | } |
| 85 | |
| 86 | const char* atlasTransform; |
| 87 | fAtlasTransformUniform = uniHandler->addUniform(kFragment_GrShaderFlag, kFloat4_GrSLType, |
| 88 | "atlas_transform", &atlasTransform); |
| 89 | f->codeAppendf("float2 texcoord = sk_FragCoord.xy * %s.xy + %s.zw;", |
| 90 | atlasTransform, atlasTransform); |
| 91 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 92 | f->codeAppend ("coverage = "); |
Brian Salomon | d19cd76 | 2020-01-06 13:16:31 -0500 | [diff] [blame] | 93 | f->appendTextureLookup(args.fTexSamplers[0], "texcoord"); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 94 | f->codeAppend (".a;"); |
| 95 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 96 | if (proc.fIsCoverageCount) { |
| 97 | auto fillRule = GrFillRuleForSkPath(proc.fClipPath->deviceSpacePath()); |
| 98 | if (GrFillRule::kEvenOdd == fillRule) { |
| 99 | f->codeAppend ("half t = mod(abs(coverage), 2);"); |
| 100 | f->codeAppend ("coverage = 1 - abs(t - 1);"); |
| 101 | } else { |
| 102 | SkASSERT(GrFillRule::kNonzero == fillRule); |
| 103 | f->codeAppend ("coverage = min(abs(coverage), 1);"); |
| 104 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 105 | } |
| 106 | |
| 107 | if (proc.fMustCheckBounds) { |
| 108 | f->codeAppend ("} else {"); |
| 109 | f->codeAppend ( "coverage = 0;"); |
| 110 | f->codeAppend ("}"); |
| 111 | } |
| 112 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 113 | if (proc.fClipPath->deviceSpacePath().isInverseFillType()) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 114 | f->codeAppend ("coverage = 1 - coverage;"); |
| 115 | } |
| 116 | |
| 117 | f->codeAppendf("%s = %s * coverage;", args.fOutputColor, args.fInputColor); |
| 118 | } |
| 119 | |
| 120 | void onSetData(const GrGLSLProgramDataManager& pdman, |
| 121 | const GrFragmentProcessor& fp) override { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 122 | const GrCCClipProcessor& proc = fp.cast<GrCCClipProcessor>(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 123 | if (proc.fMustCheckBounds) { |
| 124 | const SkRect pathIBounds = SkRect::Make(proc.fClipPath->pathDevIBounds()); |
| 125 | pdman.set4f(fPathIBoundsUniform, pathIBounds.left(), pathIBounds.top(), |
| 126 | pathIBounds.right(), pathIBounds.bottom()); |
| 127 | } |
| 128 | const SkVector& scale = proc.fClipPath->atlasScale(); |
| 129 | const SkVector& trans = proc.fClipPath->atlasTranslate(); |
| 130 | pdman.set4f(fAtlasTransformUniform, scale.x(), scale.y(), trans.x(), trans.y()); |
| 131 | } |
| 132 | |
| 133 | private: |
| 134 | UniformHandle fPathIBoundsUniform; |
| 135 | UniformHandle fAtlasTransformUniform; |
| 136 | }; |
| 137 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 138 | GrGLSLFragmentProcessor* GrCCClipProcessor::onCreateGLSLInstance() const { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 139 | return new Impl(); |
| 140 | } |