blob: 3e911b38c20d438c98d473d416ae89c6ab9513f5 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "src/gpu/tessellate/GrGpuTessellationPathRenderer.h"
9
10#include "src/core/SkPathPriv.h"
11#include "src/gpu/GrClip.h"
12#include "src/gpu/GrMemoryPool.h"
13#include "src/gpu/GrRecordingContextPriv.h"
14#include "src/gpu/GrRenderTargetContext.h"
Chris Dalton4e998532020-02-10 11:06:42 -070015#include "src/gpu/GrRenderTargetContextPriv.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070016#include "src/gpu/geometry/GrShape.h"
Chris Dalton4e998532020-02-10 11:06:42 -070017#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070018#include "src/gpu/tessellate/GrTessellatePathOp.h"
19
Chris Dalton4e998532020-02-10 11:06:42 -070020constexpr static SkISize kAtlasInitialSize{512, 512};
21constexpr static int kMaxAtlasSize = 2048;
22
23GrGpuTessellationPathRenderer::GrGpuTessellationPathRenderer(const GrCaps& caps) : fAtlas(
24 GrColorType::kAlpha_8, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
25 std::min(kMaxAtlasSize, caps.maxPreferredRenderTargetSize()), caps) {
26}
27
Chris Daltonb832ce62020-01-06 19:49:37 -070028GrPathRenderer::CanDrawPath GrGpuTessellationPathRenderer::onCanDrawPath(
29 const CanDrawPathArgs& args) const {
30 // This class should not have been added to the chain without tessellation support.
31 SkASSERT(args.fCaps->shaderCaps()->tessellationSupport());
Chris Dalton0f6bb8a2020-01-15 09:40:54 -070032 if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() ||
33 args.fViewMatrix->hasPerspective()) {
Chris Daltonb832ce62020-01-06 19:49:37 -070034 return CanDrawPath::kNo;
35 }
36 if (GrAAType::kCoverage == args.fAAType) {
37 SkASSERT(1 == args.fProxy->numSamples());
38 if (!args.fProxy->canUseMixedSamples(*args.fCaps)) {
39 return CanDrawPath::kNo;
40 }
41 }
42 SkPath path;
43 args.fShape->asPath(&path);
44 if (SkPathPriv::ConicWeightCnt(path)) {
45 return CanDrawPath::kNo;
46 }
47 return CanDrawPath::kYes;
48}
49
50bool GrGpuTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
Chris Dalton4e998532020-02-10 11:06:42 -070051 GrRenderTargetContext* renderTargetContext = args.fRenderTargetContext;
52 GrOpMemoryPool* pool = args.fContext->priv().opMemoryPool();
Chris Daltonb832ce62020-01-06 19:49:37 -070053 SkPath path;
54 args.fShape->asPath(&path);
55
Chris Dalton4e998532020-02-10 11:06:42 -070056 // See if the path is small and simple enough to atlas instead of drawing directly.
57 //
58 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
59 // render the sample mask to an integer texture, but such a scheme would probably require
60 // GL_EXT_post_depth_coverage, which appears to have low adoption.
61 SkIRect devIBounds;
62 SkIVector devToAtlasOffset;
63 if (this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path,
64 args.fAAType, &devIBounds, &devToAtlasOffset)) {
65 auto op = pool->allocate<GrDrawAtlasPathOp>(
66 renderTargetContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()),
67 devIBounds, devToAtlasOffset, *args.fViewMatrix, std::move(args.fPaint));
68 renderTargetContext->addDrawOp(*args.fClip, std::move(op));
69 return true;
70 }
Chris Daltonb832ce62020-01-06 19:49:37 -070071
Chris Dalton4e998532020-02-10 11:06:42 -070072 auto op = pool->allocate<GrTessellatePathOp>(
73 *args.fViewMatrix, path, std::move(args.fPaint), args.fAAType);
74 renderTargetContext->addDrawOp(*args.fClip, std::move(op));
75 return true;
76}
77
78bool GrGpuTessellationPathRenderer::tryAddPathToAtlas(
79 const GrCaps& caps, const SkMatrix& viewMatrix, const SkPath& path, GrAAType aaType,
80 SkIRect* devIBounds, SkIVector* devToAtlasOffset) {
81 if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) {
82 return false;
83 }
84
85 // Atlas paths require their points to be transformed on CPU. Check if the path has too many
86 // points to justify this CPU transformation.
87 if (path.countPoints() > 150) {
88 return false;
89 }
90
91 // Check if the path is too large for an atlas.
92 SkRect devBounds;
93 viewMatrix.mapRect(&devBounds, path.getBounds());
94 if (devBounds.height() * devBounds.width() > 100 * 100 ||
95 std::max(devBounds.height(), devBounds.width()) > kMaxAtlasSize / 2) {
96 return false;
97 }
98
99 devBounds.roundOut(devIBounds);
100 if (!fAtlas.addRect(*devIBounds, devToAtlasOffset)) {
101 return false;
102 }
103
104 SkMatrix atlasMatrix = viewMatrix;
105 atlasMatrix.postTranslate(devToAtlasOffset->x(), devToAtlasOffset->y());
106
107 // Concatenate this path onto our uber path that matches its fill and AA types.
108 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType);
109 uberPath->moveTo(devToAtlasOffset->x(), devToAtlasOffset->y()); // Implicit moveTo(0,0).
110 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700111 return true;
112}
113
114void GrGpuTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
115 SkPath path;
116 args.fShape->asPath(&path);
117
118 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
119
Chris Daltonf9aea7f2020-01-21 11:19:26 -0700120 auto op = args.fContext->priv().opMemoryPool()->allocate<GrTessellatePathOp>(
121 *args.fViewMatrix, path, GrPaint(), aaType, GrTessellatePathOp::Flags::kStencilOnly);
122 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op));
Chris Daltonb832ce62020-01-06 19:49:37 -0700123}
Chris Dalton4e998532020-02-10 11:06:42 -0700124
125void GrGpuTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
126 const uint32_t* opsTaskIDs, int numOpsTaskIDs) {
127 if (!fAtlas.drawBounds().isEmpty()) {
128 this->renderAtlas(onFlushRP);
129 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
130 }
131 for (SkPath& path : fAtlasUberPaths) {
132 path.reset();
133 }
134}
135
136constexpr static GrUserStencilSettings kTestStencil(
137 GrUserStencilSettings::StaticInit<
138 0x0000,
139 GrUserStencilTest::kNotEqual,
140 0xffff,
141 GrUserStencilOp::kKeep,
142 GrUserStencilOp::kKeep,
143 0xffff>());
144
145constexpr static GrUserStencilSettings kTestAndResetStencil(
146 GrUserStencilSettings::StaticInit<
147 0x0000,
148 GrUserStencilTest::kNotEqual,
149 0xffff,
150 GrUserStencilOp::kZero,
151 GrUserStencilOp::kKeep,
152 0xffff>());
153
154void GrGpuTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
155 auto rtc = fAtlas.instantiate(onFlushRP);
156 if (!rtc) {
157 return;
158 }
159
160 // Add ops to stencil the atlas paths.
161 for (auto antialias : {false, true}) {
162 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
163 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
164 if (uberPath->isEmpty()) {
165 continue;
166 }
167 uberPath->setFillType(fillType);
168 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
169 auto op = onFlushRP->opMemoryPool()->allocate<GrTessellatePathOp>(
170 SkMatrix::I(), *uberPath, GrPaint(), aaType,
171 GrTessellatePathOp::Flags::kStencilOnly);
172 rtc->addDrawOp(GrNoClip(), std::move(op));
173 }
174 }
175
176 // The next draw will be the final op in the renderTargetContext. So if Ganesh is planning
177 // to discard the stencil values anyway, then we might not actually need to reset the
178 // stencil values back to zero.
179 bool mustResetStencil = !onFlushRP->caps()->discardStencilValuesAfterRenderPass() ||
180 rtc->numSamples() <= 1; // Need a stencil reset for mixed samples.
181
182 // Draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
183 GrPaint paint;
184 paint.setColor4f(SK_PMColor4fWHITE);
185 SkRect drawRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
186 rtc->priv().stencilRect(GrNoClip(), (mustResetStencil) ? &kTestAndResetStencil : &kTestStencil,
187 std::move(paint), GrAA::kYes, SkMatrix::I(), drawRect, nullptr);
188
189 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
190 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
191 GrSurfaceProxy::ResolveFlags::kMSAA);
192 }
193}