blob: d587f506fa1ab4ab0b62d2d1b52f4c4110419d1d [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Chris Dalton03730e62021-03-11 19:41:40 -07009#include <memory>
Chris Dalton1a325d22017-07-14 15:17:41 -060010
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/pathops/SkPathOps.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/GrCaps.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrProxyProvider.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050014#include "src/gpu/GrSurfaceDrawContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/ccpr/GrCCClipProcessor.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060016
Chris Dalton03730e62021-03-11 19:41:40 -070017bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton1a325d22017-07-14 15:17:41 -060018 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Greg Daniel0258c902019-08-01 13:08:33 -040019 GrBackendFormat defaultA8Format = caps.getDefaultBackendFormat(GrColorType::kAlpha_8,
20 GrRenderable::kYes);
Jim Van Verth1bef9792020-07-09 08:09:13 -040021 if (caps.driverDisableCCPR() || !shaderCaps.integerSupport() ||
Chris Daltona77cdee2020-04-03 14:50:43 -060022 !caps.drawInstancedSupport() || !shaderCaps.floatIs32Bits() ||
Greg Daniel0258c902019-08-01 13:08:33 -040023 !defaultA8Format.isValid() || // This checks both texturable and renderable
Chris Daltona8fbeba2019-03-30 00:31:23 -060024 !caps.halfFloatVertexAttributeSupport()) {
25 return false;
26 }
Chris Daltonc3318f02019-07-19 14:20:53 -060027
Jim Van Verth1bef9792020-07-09 08:09:13 -040028 if (!caps.driverDisableMSAACCPR() &&
Greg Danieleadfac92019-08-02 09:03:53 -040029 caps.internalMultisampleCount(defaultA8Format) > 1 &&
Chris Daltonc3318f02019-07-19 14:20:53 -060030 caps.sampleLocationsSupport() &&
Chris Dalton8a64a442019-10-29 18:54:58 -060031 shaderCaps.sampleMaskSupport()) {
Chris Daltonc3318f02019-07-19 14:20:53 -060032 return true;
33 }
34
35 return false;
Chris Dalton1a325d22017-07-14 15:17:41 -060036}
37
Chris Dalton03730e62021-03-11 19:41:40 -070038std::unique_ptr<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
39 const GrCaps& caps) {
40 if (IsSupported(caps)) {
41 return std::make_unique<GrCoverageCountingPathRenderer>();
Chris Daltonc3318f02019-07-19 14:20:53 -060042 }
43 return nullptr;
Chris Daltona2b5b642018-06-24 13:08:57 -060044}
45
Greg Danielf41b2bd2019-08-22 16:19:24 -040046GrCCPerOpsTaskPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opsTaskID) {
47 auto it = fPendingPaths.find(opsTaskID);
Chris Daltond7e22272018-05-23 10:17:17 -060048 if (fPendingPaths.end() == it) {
Greg Danielf41b2bd2019-08-22 16:19:24 -040049 sk_sp<GrCCPerOpsTaskPaths> paths = sk_make_sp<GrCCPerOpsTaskPaths>();
50 it = fPendingPaths.insert(std::make_pair(opsTaskID, std::move(paths))).first;
Chris Daltond7e22272018-05-23 10:17:17 -060051 }
52 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060053}
54
Chris Dalton383a2ef2018-01-08 17:21:41 -050055std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
John Stiles956ec8a2020-06-19 15:32:16 -040056 std::unique_ptr<GrFragmentProcessor> inputFP, uint32_t opsTaskID,
57 const SkPath& deviceSpacePath, const SkIRect& accessRect, const GrCaps& caps) {
Chris Dalton1e7b2e52021-03-09 21:29:32 -070058#ifdef SK_DEBUG
Chris Daltona32a3c32017-12-05 10:05:21 -070059 SkASSERT(!fFlushing);
Chris Dalton1e7b2e52021-03-09 21:29:32 -070060 SkIRect pathIBounds;
61 deviceSpacePath.getBounds().roundOut(&pathIBounds);
62 SkIRect maskBounds;
63 if (maskBounds.intersect(accessRect, pathIBounds)) {
64 SkASSERT(maskBounds.height64() * maskBounds.width64() <= kMaxClipPathArea);
65 }
66#endif
Chris Daltona32a3c32017-12-05 10:05:21 -070067
Chris Daltonc3318f02019-07-19 14:20:53 -060068 uint32_t key = deviceSpacePath.getGenerationID();
Chris Dalton03730e62021-03-11 19:41:40 -070069 key = (key << 1) | (uint32_t)GrFillRuleForSkPath(deviceSpacePath);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060070 GrCCClipPath& clipPath =
Greg Danielf41b2bd2019-08-22 16:19:24 -040071 this->lookupPendingPaths(opsTaskID)->fClipPaths[key];
Chris Dalton5ba36ba2018-05-09 01:08:38 -060072 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -070073 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060074 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
Brian Osman788b9162020-02-07 10:36:46 -050075 if (std::max(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060076 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
77 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -060078 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton09a7bb22018-08-31 19:53:15 +080079 CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Dalton03730e62021-03-11 19:41:40 -070080 clipPath.init(croppedPath, accessRect, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060081 } else {
Chris Dalton03730e62021-03-11 19:41:40 -070082 clipPath.init(deviceSpacePath, accessRect, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060083 }
Chris Daltona32a3c32017-12-05 10:05:21 -070084 } else {
85 clipPath.addAccess(accessRect);
86 }
87
Chris Daltonc3318f02019-07-19 14:20:53 -060088 auto mustCheckBounds = GrCCClipProcessor::MustCheckBounds(
89 !clipPath.pathDevIBounds().contains(accessRect));
Chris Dalton2603c1f2021-03-10 20:20:26 -070090 return std::make_unique<GrCCClipProcessor>(std::move(inputFP), caps, &clipPath,
91 mustCheckBounds);
Chris Daltona32a3c32017-12-05 10:05:21 -070092}
93
Brian Salomonbf6b9792019-08-21 09:38:10 -040094void GrCoverageCountingPathRenderer::preFlush(
Adlai Holler9902cff2020-11-11 08:51:25 -050095 GrOnFlushResourceProvider* onFlushRP, SkSpan<const uint32_t> taskIDs) {
Chris Daltona32a3c32017-12-05 10:05:21 -070096 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -060097 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -050098 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -070099
Chris Daltond7e22272018-05-23 10:17:17 -0600100 if (fPendingPaths.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500101 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700102 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600103
Chris Dalton4da70192018-06-18 09:51:36 -0600104 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600105 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton4da70192018-06-18 09:51:36 -0600106 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Brian Osman788b9162020-02-07 10:36:46 -0500107 specs.fRenderedAtlasSpecs.fMinTextureSize = std::min(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600108
Greg Danielf41b2bd2019-08-22 16:19:24 -0400109 // Move the per-opsTask paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600110 // and count them up so we can preallocate buffers.
Adlai Holler9902cff2020-11-11 08:51:25 -0500111 fFlushingPaths.reserve_back(taskIDs.count());
112 for (uint32_t taskID : taskIDs) {
113 auto iter = fPendingPaths.find(taskID);
Chris Daltond7e22272018-05-23 10:17:17 -0600114 if (fPendingPaths.end() == iter) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400115 continue; // No paths on this opsTask.
Chris Dalton1a325d22017-07-14 15:17:41 -0600116 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700117
tzikbdb49562018-05-28 14:58:00 +0900118 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600119 fPendingPaths.erase(iter);
120
Chris Daltond7e22272018-05-23 10:17:17 -0600121 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600122 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700123 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600124 }
125
Chris Dalton4da70192018-06-18 09:51:36 -0600126 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500127 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600128 }
129
Chris Dalton03730e62021-03-11 19:41:40 -0700130 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600131 if (!resources->isMapped()) {
132 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600133 }
134
Chris Dalton4da70192018-06-18 09:51:36 -0600135 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600136 for (const auto& flushingPaths : fFlushingPaths) {
Chris Daltond7e22272018-05-23 10:17:17 -0600137 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600138 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600139 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600140 }
141
Chris Dalton351e80c2019-01-06 22:51:00 -0700142 // Allocate resources and then render the atlas(es).
Chris Daltonc4b47352019-08-23 10:10:36 -0600143 if (!resources->finalize(onFlushRP)) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700144 return;
145 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000146
Chris Daltond7e22272018-05-23 10:17:17 -0600147 // Commit flushing paths to the resources once they are successfully completed.
148 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400149 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600150 flushingPaths->fFlushResources = resources;
151 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600152}
153
Adlai Holler9902cff2020-11-11 08:51:25 -0500154void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken,
155 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600156 SkASSERT(fFlushing);
Robert Phillips774168e2018-05-31 12:43:27 -0400157
Chris Dalton4da70192018-06-18 09:51:36 -0600158 if (!fFlushingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600159 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
160 // resources manually.
161 for (auto& flushingPaths : fFlushingPaths) {
162 flushingPaths->fFlushResources = nullptr;
163 }
164
165 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
166 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400167 }
168
Chris Dalton383a2ef2018-01-08 17:21:41 -0500169 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600170}
Chris Dalton09a7bb22018-08-31 19:53:15 +0800171
172void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox,
173 SkPath* out) {
174 SkPath cropboxPath;
175 cropboxPath.addRect(SkRect::Make(cropbox));
176 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
177 // This can fail if the PathOps encounter NaN or infinities.
178 out->reset();
179 }
180 out->setIsVolatile(true);
181}