blob: 34fcfc04ccc9aad0a7565760afa74e361062a07f [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Daltond2dc8dd2020-05-19 16:32:02 -060010#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070011#include "src/core/SkPathPriv.h"
12#include "src/gpu/GrClip.h"
13#include "src/gpu/GrMemoryPool.h"
14#include "src/gpu/GrRecordingContextPriv.h"
15#include "src/gpu/GrRenderTargetContext.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070016#include "src/gpu/GrSurfaceContextPriv.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000017#include "src/gpu/geometry/GrStyledShape.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070018#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070019#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070020#include "src/gpu/tessellate/GrTessellatePathOp.h"
21
Chris Dalton4e998532020-02-10 11:06:42 -070022constexpr static SkISize kAtlasInitialSize{512, 512};
23constexpr static int kMaxAtlasSize = 2048;
24
Chris Daltond2dc8dd2020-05-19 16:32:02 -060025// The atlas is only used for small-area paths, which means at least one dimension of every path is
26// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
27// height, which lends very well to efficient pow2 atlas packing.
28constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
29
30// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
31constexpr static int kMaxAtlasPathHeight = 128;
32
Chris Dalton0a22b1e2020-03-26 11:52:15 -060033GrTessellationPathRenderer::GrTessellationPathRenderer(const GrCaps& caps) : fAtlas(
Chris Dalton4e998532020-02-10 11:06:42 -070034 GrColorType::kAlpha_8, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
Chris Daltond2dc8dd2020-05-19 16:32:02 -060035 std::min(kMaxAtlasSize, caps.maxPreferredRenderTargetSize()), caps, kAtlasAlgorithm) {
Chris Dalton4e998532020-02-10 11:06:42 -070036}
37
Chris Dalton0a22b1e2020-03-26 11:52:15 -060038GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -070039 const CanDrawPathArgs& args) const {
Chris Dalton0f6bb8a2020-01-15 09:40:54 -070040 if (!args.fShape->style().isSimpleFill() || args.fShape->inverseFilled() ||
41 args.fViewMatrix->hasPerspective()) {
Chris Daltonb832ce62020-01-06 19:49:37 -070042 return CanDrawPath::kNo;
43 }
44 if (GrAAType::kCoverage == args.fAAType) {
45 SkASSERT(1 == args.fProxy->numSamples());
46 if (!args.fProxy->canUseMixedSamples(*args.fCaps)) {
47 return CanDrawPath::kNo;
48 }
49 }
50 SkPath path;
51 args.fShape->asPath(&path);
52 if (SkPathPriv::ConicWeightCnt(path)) {
53 return CanDrawPath::kNo;
54 }
55 return CanDrawPath::kYes;
56}
57
Chris Dalton0a22b1e2020-03-26 11:52:15 -060058bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
Chris Dalton4e998532020-02-10 11:06:42 -070059 GrRenderTargetContext* renderTargetContext = args.fRenderTargetContext;
60 GrOpMemoryPool* pool = args.fContext->priv().opMemoryPool();
Chris Daltonb832ce62020-01-06 19:49:37 -070061 SkPath path;
62 args.fShape->asPath(&path);
63
Chris Dalton4e998532020-02-10 11:06:42 -070064 // See if the path is small and simple enough to atlas instead of drawing directly.
65 //
66 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
67 // render the sample mask to an integer texture, but such a scheme would probably require
68 // GL_EXT_post_depth_coverage, which appears to have low adoption.
69 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -060070 SkIPoint16 locationInAtlas;
71 bool transposedInAtlas;
Chris Dalton4e998532020-02-10 11:06:42 -070072 if (this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path,
Chris Daltond2dc8dd2020-05-19 16:32:02 -060073 args.fAAType, &devIBounds, &locationInAtlas, &transposedInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -070074 auto op = pool->allocate<GrDrawAtlasPathOp>(
75 renderTargetContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()),
Chris Daltond2dc8dd2020-05-19 16:32:02 -060076 devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix,
Michael Ludwig7c12e282020-05-29 09:54:07 -040077 std::move(args.fPaint));
78 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -070079 return true;
80 }
Chris Daltonb832ce62020-01-06 19:49:37 -070081
Chris Dalton4e998532020-02-10 11:06:42 -070082 auto op = pool->allocate<GrTessellatePathOp>(
83 *args.fViewMatrix, path, std::move(args.fPaint), args.fAAType);
Michael Ludwig7c12e282020-05-29 09:54:07 -040084 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -070085 return true;
86}
87
Chris Dalton0a22b1e2020-03-26 11:52:15 -060088bool GrTessellationPathRenderer::tryAddPathToAtlas(
Chris Dalton4e998532020-02-10 11:06:42 -070089 const GrCaps& caps, const SkMatrix& viewMatrix, const SkPath& path, GrAAType aaType,
Chris Daltond2dc8dd2020-05-19 16:32:02 -060090 SkIRect* devIBounds, SkIPoint16* locationInAtlas, bool* transposedInAtlas) {
Chris Dalton4e998532020-02-10 11:06:42 -070091 if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) {
92 return false;
93 }
94
Chris Daltond2dc8dd2020-05-19 16:32:02 -060095 // Atlas paths require their points to be transformed on the CPU and copied into an "uber path".
96 // Check if this path has too many points to justify this extra work.
97 if (path.countPoints() > 200) {
Chris Dalton4e998532020-02-10 11:06:42 -070098 return false;
99 }
100
Chris Dalton4e998532020-02-10 11:06:42 -0700101 SkRect devBounds;
102 viewMatrix.mapRect(&devBounds, path.getBounds());
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600103 devBounds.roundOut(devIBounds);
104
105 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
106 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
107 // atlas packing.
108 int maxDimenstion = devIBounds->width();
109 int minDimension = devIBounds->height();
110 *transposedInAtlas = minDimension > maxDimenstion;
111 if (*transposedInAtlas) {
112 std::swap(minDimension, maxDimenstion);
113 }
114
115 // Check if the path is too large for an atlas. Since we use "minDimension" for height in the
116 // atlas, limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= kMaxAtlasPathHeight.
117 if (maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
118 maxDimenstion > kMaxAtlasSize / 2) {
Chris Dalton4e998532020-02-10 11:06:42 -0700119 return false;
120 }
121
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600122 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700123 return false;
124 }
125
126 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600127 if (*transposedInAtlas) {
128 std::swap(atlasMatrix[0], atlasMatrix[3]);
129 std::swap(atlasMatrix[1], atlasMatrix[4]);
130 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
131 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
132 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
133 } else {
134 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
135 locationInAtlas->y() - devIBounds->y());
136 }
Chris Dalton4e998532020-02-10 11:06:42 -0700137
138 // Concatenate this path onto our uber path that matches its fill and AA types.
139 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600140 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700141 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700142 return true;
143}
144
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600145void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700146 SkPath path;
147 args.fShape->asPath(&path);
148
149 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
150
Chris Daltonf9aea7f2020-01-21 11:19:26 -0700151 auto op = args.fContext->priv().opMemoryPool()->allocate<GrTessellatePathOp>(
152 *args.fViewMatrix, path, GrPaint(), aaType, GrTessellatePathOp::Flags::kStencilOnly);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400153 args.fRenderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Daltonb832ce62020-01-06 19:49:37 -0700154}
Chris Dalton4e998532020-02-10 11:06:42 -0700155
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600156void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
157 const uint32_t* opsTaskIDs, int numOpsTaskIDs) {
Chris Dalton4e998532020-02-10 11:06:42 -0700158 if (!fAtlas.drawBounds().isEmpty()) {
159 this->renderAtlas(onFlushRP);
160 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
161 }
162 for (SkPath& path : fAtlasUberPaths) {
163 path.reset();
164 }
165}
166
167constexpr static GrUserStencilSettings kTestStencil(
168 GrUserStencilSettings::StaticInit<
169 0x0000,
170 GrUserStencilTest::kNotEqual,
171 0xffff,
172 GrUserStencilOp::kKeep,
173 GrUserStencilOp::kKeep,
174 0xffff>());
175
176constexpr static GrUserStencilSettings kTestAndResetStencil(
177 GrUserStencilSettings::StaticInit<
178 0x0000,
179 GrUserStencilTest::kNotEqual,
180 0xffff,
181 GrUserStencilOp::kZero,
182 GrUserStencilOp::kKeep,
183 0xffff>());
184
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600185void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700186 auto rtc = fAtlas.instantiate(onFlushRP);
187 if (!rtc) {
188 return;
189 }
190
191 // Add ops to stencil the atlas paths.
192 for (auto antialias : {false, true}) {
193 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
194 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
195 if (uberPath->isEmpty()) {
196 continue;
197 }
198 uberPath->setFillType(fillType);
199 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
200 auto op = onFlushRP->opMemoryPool()->allocate<GrTessellatePathOp>(
201 SkMatrix::I(), *uberPath, GrPaint(), aaType,
202 GrTessellatePathOp::Flags::kStencilOnly);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400203 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700204 }
205 }
206
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700207 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
208 auto fillRectFlags = GrFillRectOp::InputFlags::kNone;
Chris Dalton4e998532020-02-10 11:06:42 -0700209
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700210 // This will be the final op in the renderTargetContext. So if Ganesh is planning to discard the
211 // stencil values anyway, then we might not actually need to reset the stencil values back to 0.
212 bool mustResetStencil = !onFlushRP->caps()->discardStencilValuesAfterRenderPass();
213
214 if (rtc->numSamples() <= 1) {
215 // We are mixed sampled. We need to enable conservative raster and ensure stencil values get
216 // reset in order to avoid artifacts along the diagonal of the atlas.
217 fillRectFlags |= GrFillRectOp::InputFlags::kConservativeRaster;
218 mustResetStencil = true;
219 }
220
221 SkRect coverRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
222 const GrUserStencilSettings* stencil;
223 if (mustResetStencil) {
224 // Outset the cover rect in case there are T-junctions in the path bounds.
225 coverRect.outset(1, 1);
226 stencil = &kTestAndResetStencil;
227 } else {
228 stencil = &kTestStencil;
229 }
230
231 GrQuad coverQuad(coverRect);
232 DrawQuad drawQuad{coverQuad, coverQuad, GrQuadAAFlags::kAll};
233
Chris Dalton4e998532020-02-10 11:06:42 -0700234 GrPaint paint;
235 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700236
237 auto coverOp = GrFillRectOp::Make(rtc->surfPriv().getContext(), std::move(paint),
238 GrAAType::kMSAA, &drawQuad, stencil, fillRectFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400239 rtc->addDrawOp(nullptr, std::move(coverOp));
Chris Dalton4e998532020-02-10 11:06:42 -0700240
241 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
242 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
243 GrSurfaceProxy::ResolveFlags::kMSAA);
244 }
245}