blob: 12bd622d41dc07cdec3dacebf2025a1b8e51baa8 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Chris Dalton03730e62021-03-11 19:41:40 -07009#include <memory>
Chris Dalton1a325d22017-07-14 15:17:41 -060010
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/pathops/SkPathOps.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/GrCaps.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrProxyProvider.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050014#include "src/gpu/GrSurfaceDrawContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/ccpr/GrCCClipProcessor.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060016
Chris Dalton03730e62021-03-11 19:41:40 -070017bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton1a325d22017-07-14 15:17:41 -060018 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Greg Daniel0258c902019-08-01 13:08:33 -040019 GrBackendFormat defaultA8Format = caps.getDefaultBackendFormat(GrColorType::kAlpha_8,
20 GrRenderable::kYes);
Chris Dalton94310792021-03-12 11:10:24 -070021 if (caps.driverDisableMSAAClipAtlas() || !shaderCaps.integerSupport() ||
Chris Daltona77cdee2020-04-03 14:50:43 -060022 !caps.drawInstancedSupport() || !shaderCaps.floatIs32Bits() ||
Greg Daniel0258c902019-08-01 13:08:33 -040023 !defaultA8Format.isValid() || // This checks both texturable and renderable
Chris Daltona8fbeba2019-03-30 00:31:23 -060024 !caps.halfFloatVertexAttributeSupport()) {
25 return false;
26 }
Chris Daltonc3318f02019-07-19 14:20:53 -060027
Chris Dalton94310792021-03-12 11:10:24 -070028 if (caps.internalMultisampleCount(defaultA8Format) > 1 &&
Chris Daltonc3318f02019-07-19 14:20:53 -060029 caps.sampleLocationsSupport() &&
Chris Dalton8a64a442019-10-29 18:54:58 -060030 shaderCaps.sampleMaskSupport()) {
Chris Daltonc3318f02019-07-19 14:20:53 -060031 return true;
32 }
33
34 return false;
Chris Dalton1a325d22017-07-14 15:17:41 -060035}
36
Chris Dalton03730e62021-03-11 19:41:40 -070037std::unique_ptr<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
38 const GrCaps& caps) {
39 if (IsSupported(caps)) {
40 return std::make_unique<GrCoverageCountingPathRenderer>();
Chris Daltonc3318f02019-07-19 14:20:53 -060041 }
42 return nullptr;
Chris Daltona2b5b642018-06-24 13:08:57 -060043}
44
Greg Danielf41b2bd2019-08-22 16:19:24 -040045GrCCPerOpsTaskPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opsTaskID) {
46 auto it = fPendingPaths.find(opsTaskID);
Chris Daltond7e22272018-05-23 10:17:17 -060047 if (fPendingPaths.end() == it) {
Greg Danielf41b2bd2019-08-22 16:19:24 -040048 sk_sp<GrCCPerOpsTaskPaths> paths = sk_make_sp<GrCCPerOpsTaskPaths>();
49 it = fPendingPaths.insert(std::make_pair(opsTaskID, std::move(paths))).first;
Chris Daltond7e22272018-05-23 10:17:17 -060050 }
51 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060052}
53
Chris Daltonbf9c63b2021-03-19 15:44:18 -060054GrFPResult GrCoverageCountingPathRenderer::makeClipProcessor(
John Stiles956ec8a2020-06-19 15:32:16 -040055 std::unique_ptr<GrFragmentProcessor> inputFP, uint32_t opsTaskID,
56 const SkPath& deviceSpacePath, const SkIRect& accessRect, const GrCaps& caps) {
Chris Dalton1e7b2e52021-03-09 21:29:32 -070057#ifdef SK_DEBUG
Chris Daltona32a3c32017-12-05 10:05:21 -070058 SkASSERT(!fFlushing);
Chris Dalton1e7b2e52021-03-09 21:29:32 -070059 SkIRect pathIBounds;
60 deviceSpacePath.getBounds().roundOut(&pathIBounds);
61 SkIRect maskBounds;
62 if (maskBounds.intersect(accessRect, pathIBounds)) {
63 SkASSERT(maskBounds.height64() * maskBounds.width64() <= kMaxClipPathArea);
64 }
65#endif
Chris Daltona32a3c32017-12-05 10:05:21 -070066
Chris Daltonbf9c63b2021-03-19 15:44:18 -060067 if (deviceSpacePath.isEmpty() ||
68 !SkIRect::Intersects(accessRect, deviceSpacePath.getBounds().roundOut())) {
Chris Dalton92b35672021-04-01 11:39:21 -060069 // The accessRect never touches the path.
70 return deviceSpacePath.isInverseFillType() ? GrFPSuccess(std::move(inputFP)) // Wide open.
71 : GrFPFailure(nullptr); // Clipped out.
Chris Daltonbf9c63b2021-03-19 15:44:18 -060072 }
73
Chris Daltonc3318f02019-07-19 14:20:53 -060074 uint32_t key = deviceSpacePath.getGenerationID();
Chris Dalton03730e62021-03-11 19:41:40 -070075 key = (key << 1) | (uint32_t)GrFillRuleForSkPath(deviceSpacePath);
Chris Dalton4ac0cbe2021-03-19 20:07:15 -060076 sk_sp<GrCCClipPath>& clipPath = this->lookupPendingPaths(opsTaskID)->fClipPaths[key];
77 if (!clipPath) {
78 // This the first time we've accessed this clip path key in the map.
79 clipPath = sk_make_sp<GrCCClipPath>(deviceSpacePath, accessRect, caps);
Chris Daltona32a3c32017-12-05 10:05:21 -070080 } else {
Chris Dalton4ac0cbe2021-03-19 20:07:15 -060081 clipPath->addAccess(accessRect);
Chris Daltona32a3c32017-12-05 10:05:21 -070082 }
83
Chris Daltonc3318f02019-07-19 14:20:53 -060084 auto mustCheckBounds = GrCCClipProcessor::MustCheckBounds(
Chris Dalton4ac0cbe2021-03-19 20:07:15 -060085 !clipPath->pathDevIBounds().contains(accessRect));
86 return GrFPSuccess(std::make_unique<GrCCClipProcessor>(std::move(inputFP), caps, clipPath,
Chris Daltonbf9c63b2021-03-19 15:44:18 -060087 mustCheckBounds));
Chris Daltona32a3c32017-12-05 10:05:21 -070088}
89
Chris Daltonffa8fd12021-03-19 19:39:35 -060090namespace {
91
92// Iterates all GrCCClipPaths in an array of non-empty maps.
93class ClipMapsIter {
94public:
95 ClipMapsIter(sk_sp<GrCCPerOpsTaskPaths>* mapsList) : fMapsList(mapsList) {}
96
97 bool operator!=(const ClipMapsIter& that) {
98 if (fMapsList != that.fMapsList) {
99 return true;
100 }
101 // fPerOpsTaskClipPaths will be null when we are on the first element.
102 if (fPerOpsTaskClipPaths != that.fPerOpsTaskClipPaths) {
103 return true;
104 }
105 return fPerOpsTaskClipPaths && fClipPathsIter != that.fClipPathsIter;
106 }
107
108 void operator++() {
109 // fPerOpsTaskClipPaths is null when we are on the first element.
110 if (!fPerOpsTaskClipPaths) {
111 fPerOpsTaskClipPaths = &(*fMapsList)->fClipPaths;
112 SkASSERT(!fPerOpsTaskClipPaths->empty()); // We don't handle empty lists.
113 fClipPathsIter = fPerOpsTaskClipPaths->begin();
114 }
115 if ((++fClipPathsIter) == fPerOpsTaskClipPaths->end()) {
116 ++fMapsList;
117 fPerOpsTaskClipPaths = nullptr;
118 }
119 }
120
121 GrCCClipPath* operator->() {
122 // fPerOpsTaskClipPaths is null when we are on the first element.
123 const auto& it = (!fPerOpsTaskClipPaths) ? (*fMapsList)->fClipPaths.begin()
124 : fClipPathsIter;
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600125 return it->second.get();
Chris Daltonffa8fd12021-03-19 19:39:35 -0600126 }
127
128private:
129 sk_sp<GrCCPerOpsTaskPaths>* fMapsList;
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600130 std::map<uint32_t, sk_sp<GrCCClipPath>>* fPerOpsTaskClipPaths = nullptr;
131 std::map<uint32_t, sk_sp<GrCCClipPath>>::iterator fClipPathsIter;
Chris Daltonffa8fd12021-03-19 19:39:35 -0600132};
133
134} // namespace
135
136static void assign_atlas_textures(GrTexture* atlasTexture, ClipMapsIter nextPathToAssign,
137 const ClipMapsIter& end) {
138 if (!atlasTexture) {
139 return;
140 }
141 for (; nextPathToAssign != end; ++nextPathToAssign) {
142 nextPathToAssign->assignAtlasTexture(sk_ref_sp(atlasTexture));
143 }
144}
145
Brian Salomonbf6b9792019-08-21 09:38:10 -0400146void GrCoverageCountingPathRenderer::preFlush(
Adlai Holler9902cff2020-11-11 08:51:25 -0500147 GrOnFlushResourceProvider* onFlushRP, SkSpan<const uint32_t> taskIDs) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700148 SkASSERT(!fFlushing);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500149 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700150
Chris Daltond7e22272018-05-23 10:17:17 -0600151 if (fPendingPaths.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500152 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700153 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600154
Chris Dalton7d592cd2021-03-11 22:49:33 -0700155 GrCCAtlas::Specs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600156 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton7d592cd2021-03-11 22:49:33 -0700157 specs.fMaxPreferredTextureSize = maxPreferredRTSize;
158 specs.fMinTextureSize = std::min(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600159
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600160 // Move the per-opsTask paths that are about to be flushed from fPendingPaths to flushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600161 // and count them up so we can preallocate buffers.
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600162 SkSTArray<8, sk_sp<GrCCPerOpsTaskPaths>> flushingPaths;
163 flushingPaths.reserve_back(taskIDs.count());
Adlai Holler9902cff2020-11-11 08:51:25 -0500164 for (uint32_t taskID : taskIDs) {
165 auto iter = fPendingPaths.find(taskID);
Chris Daltond7e22272018-05-23 10:17:17 -0600166 if (fPendingPaths.end() == iter) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400167 continue; // No paths on this opsTask.
Chris Dalton1a325d22017-07-14 15:17:41 -0600168 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700169
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600170 flushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600171 fPendingPaths.erase(iter);
172
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600173 for (const auto& clipsIter : flushingPaths.back()->fClipPaths) {
174 clipsIter.second->accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700175 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600176 }
177
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600178 GrCCPerFlushResources perFlushResources(onFlushRP, specs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600179
Chris Dalton7d592cd2021-03-11 22:49:33 -0700180 // Layout the atlas(es) and render paths.
Chris Dalton4ac0cbe2021-03-19 20:07:15 -0600181 ClipMapsIter it(flushingPaths.begin());
182 ClipMapsIter end(flushingPaths.end());
Chris Daltonffa8fd12021-03-19 19:39:35 -0600183 ClipMapsIter nextPathToAssign = it; // The next GrCCClipPath to call assignAtlasTexture on.
184 for (; it != end; ++it) {
Chris Dalton68c243e2021-03-20 12:37:58 -0600185 if (auto retiredAtlas = it->renderPathInAtlas(&perFlushResources, onFlushRP)) {
Chris Daltonffa8fd12021-03-19 19:39:35 -0600186 assign_atlas_textures(retiredAtlas->textureProxy()->peekTexture(), nextPathToAssign,
187 it);
188 nextPathToAssign = it;
Chris Daltonc1e59632017-09-05 00:30:07 -0600189 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600190 }
191
Chris Dalton351e80c2019-01-06 22:51:00 -0700192 // Allocate resources and then render the atlas(es).
Chris Dalton68c243e2021-03-20 12:37:58 -0600193 auto atlas = perFlushResources.finalize(onFlushRP);
Chris Daltonffa8fd12021-03-19 19:39:35 -0600194 assign_atlas_textures(atlas->textureProxy()->peekTexture(), nextPathToAssign, end);
Chris Dalton1a325d22017-07-14 15:17:41 -0600195}
196
Adlai Holler9902cff2020-11-11 08:51:25 -0500197void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken,
198 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600199 SkASSERT(fFlushing);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500200 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600201}