Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h" |
Chris Dalton | 03730e6 | 2021-03-11 19:41:40 -0700 | [diff] [blame] | 9 | #include <memory> |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 10 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 11 | #include "include/pathops/SkPathOps.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 12 | #include "src/gpu/GrCaps.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 13 | #include "src/gpu/GrProxyProvider.h" |
Brian Salomon | eebe735 | 2020-12-09 16:37:04 -0500 | [diff] [blame] | 14 | #include "src/gpu/GrSurfaceDrawContext.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 15 | #include "src/gpu/ccpr/GrCCClipProcessor.h" |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 16 | |
Chris Dalton | 03730e6 | 2021-03-11 19:41:40 -0700 | [diff] [blame] | 17 | bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) { |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 18 | const GrShaderCaps& shaderCaps = *caps.shaderCaps(); |
Greg Daniel | 0258c90 | 2019-08-01 13:08:33 -0400 | [diff] [blame] | 19 | GrBackendFormat defaultA8Format = caps.getDefaultBackendFormat(GrColorType::kAlpha_8, |
| 20 | GrRenderable::kYes); |
Jim Van Verth | 1bef979 | 2020-07-09 08:09:13 -0400 | [diff] [blame] | 21 | if (caps.driverDisableCCPR() || !shaderCaps.integerSupport() || |
Chris Dalton | a77cdee | 2020-04-03 14:50:43 -0600 | [diff] [blame] | 22 | !caps.drawInstancedSupport() || !shaderCaps.floatIs32Bits() || |
Greg Daniel | 0258c90 | 2019-08-01 13:08:33 -0400 | [diff] [blame] | 23 | !defaultA8Format.isValid() || // This checks both texturable and renderable |
Chris Dalton | a8fbeba | 2019-03-30 00:31:23 -0600 | [diff] [blame] | 24 | !caps.halfFloatVertexAttributeSupport()) { |
| 25 | return false; |
| 26 | } |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 27 | |
Jim Van Verth | 1bef979 | 2020-07-09 08:09:13 -0400 | [diff] [blame] | 28 | if (!caps.driverDisableMSAACCPR() && |
Greg Daniel | eadfac9 | 2019-08-02 09:03:53 -0400 | [diff] [blame] | 29 | caps.internalMultisampleCount(defaultA8Format) > 1 && |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 30 | caps.sampleLocationsSupport() && |
Chris Dalton | 8a64a44 | 2019-10-29 18:54:58 -0600 | [diff] [blame] | 31 | shaderCaps.sampleMaskSupport()) { |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 32 | return true; |
| 33 | } |
| 34 | |
| 35 | return false; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 36 | } |
| 37 | |
Chris Dalton | 03730e6 | 2021-03-11 19:41:40 -0700 | [diff] [blame] | 38 | std::unique_ptr<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported( |
| 39 | const GrCaps& caps) { |
| 40 | if (IsSupported(caps)) { |
| 41 | return std::make_unique<GrCoverageCountingPathRenderer>(); |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 42 | } |
| 43 | return nullptr; |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 44 | } |
| 45 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 46 | GrCCPerOpsTaskPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opsTaskID) { |
| 47 | auto it = fPendingPaths.find(opsTaskID); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 48 | if (fPendingPaths.end() == it) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 49 | sk_sp<GrCCPerOpsTaskPaths> paths = sk_make_sp<GrCCPerOpsTaskPaths>(); |
| 50 | it = fPendingPaths.insert(std::make_pair(opsTaskID, std::move(paths))).first; |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 51 | } |
| 52 | return it->second.get(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 53 | } |
| 54 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 55 | std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor( |
John Stiles | 956ec8a | 2020-06-19 15:32:16 -0400 | [diff] [blame] | 56 | std::unique_ptr<GrFragmentProcessor> inputFP, uint32_t opsTaskID, |
| 57 | const SkPath& deviceSpacePath, const SkIRect& accessRect, const GrCaps& caps) { |
Chris Dalton | 1e7b2e5 | 2021-03-09 21:29:32 -0700 | [diff] [blame] | 58 | #ifdef SK_DEBUG |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 59 | SkASSERT(!fFlushing); |
Chris Dalton | 1e7b2e5 | 2021-03-09 21:29:32 -0700 | [diff] [blame] | 60 | SkIRect pathIBounds; |
| 61 | deviceSpacePath.getBounds().roundOut(&pathIBounds); |
| 62 | SkIRect maskBounds; |
| 63 | if (maskBounds.intersect(accessRect, pathIBounds)) { |
| 64 | SkASSERT(maskBounds.height64() * maskBounds.width64() <= kMaxClipPathArea); |
| 65 | } |
| 66 | #endif |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 67 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 68 | uint32_t key = deviceSpacePath.getGenerationID(); |
Chris Dalton | 03730e6 | 2021-03-11 19:41:40 -0700 | [diff] [blame] | 69 | key = (key << 1) | (uint32_t)GrFillRuleForSkPath(deviceSpacePath); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 70 | GrCCClipPath& clipPath = |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 71 | this->lookupPendingPaths(opsTaskID)->fClipPaths[key]; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 72 | if (!clipPath.isInitialized()) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 73 | // This ClipPath was just created during lookup. Initialize it. |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 74 | const SkRect& pathDevBounds = deviceSpacePath.getBounds(); |
Brian Osman | 788b916 | 2020-02-07 10:36:46 -0500 | [diff] [blame] | 75 | if (std::max(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 76 | // The path is too large. Crop it or analytic AA can run out of fp32 precision. |
| 77 | SkPath croppedPath; |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 78 | int maxRTSize = caps.maxRenderTargetSize(); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 79 | CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath); |
Chris Dalton | 03730e6 | 2021-03-11 19:41:40 -0700 | [diff] [blame] | 80 | clipPath.init(croppedPath, accessRect, caps); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 81 | } else { |
Chris Dalton | 03730e6 | 2021-03-11 19:41:40 -0700 | [diff] [blame] | 82 | clipPath.init(deviceSpacePath, accessRect, caps); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 83 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 84 | } else { |
| 85 | clipPath.addAccess(accessRect); |
| 86 | } |
| 87 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 88 | auto mustCheckBounds = GrCCClipProcessor::MustCheckBounds( |
| 89 | !clipPath.pathDevIBounds().contains(accessRect)); |
Chris Dalton | 2603c1f | 2021-03-10 20:20:26 -0700 | [diff] [blame] | 90 | return std::make_unique<GrCCClipProcessor>(std::move(inputFP), caps, &clipPath, |
| 91 | mustCheckBounds); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 92 | } |
| 93 | |
Brian Salomon | bf6b979 | 2019-08-21 09:38:10 -0400 | [diff] [blame] | 94 | void GrCoverageCountingPathRenderer::preFlush( |
Adlai Holler | 9902cff | 2020-11-11 08:51:25 -0500 | [diff] [blame] | 95 | GrOnFlushResourceProvider* onFlushRP, SkSpan<const uint32_t> taskIDs) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 96 | SkASSERT(!fFlushing); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 97 | SkASSERT(fFlushingPaths.empty()); |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 98 | SkDEBUGCODE(fFlushing = true); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 99 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 100 | if (fPendingPaths.empty()) { |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 101 | return; // Nothing to draw. |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 102 | } |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 103 | |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 104 | GrCCAtlas::Specs specs; |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 105 | int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize(); |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 106 | specs.fMaxPreferredTextureSize = maxPreferredRTSize; |
| 107 | specs.fMinTextureSize = std::min(512, maxPreferredRTSize); |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 108 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 109 | // Move the per-opsTask paths that are about to be flushed from fPendingPaths to fFlushingPaths, |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 110 | // and count them up so we can preallocate buffers. |
Adlai Holler | 9902cff | 2020-11-11 08:51:25 -0500 | [diff] [blame] | 111 | fFlushingPaths.reserve_back(taskIDs.count()); |
| 112 | for (uint32_t taskID : taskIDs) { |
| 113 | auto iter = fPendingPaths.find(taskID); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 114 | if (fPendingPaths.end() == iter) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 115 | continue; // No paths on this opsTask. |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 116 | } |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 117 | |
tzik | bdb4956 | 2018-05-28 14:58:00 +0900 | [diff] [blame] | 118 | fFlushingPaths.push_back(std::move(iter->second)); |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 119 | fPendingPaths.erase(iter); |
| 120 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 121 | for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 122 | clipsIter.second.accountForOwnPath(&specs); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 123 | } |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 124 | } |
| 125 | |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 126 | fPerFlushResources = std::make_unique<GrCCPerFlushResources>(onFlushRP, specs); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 127 | |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 128 | // Layout the atlas(es) and render paths. |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 129 | for (const auto& flushingPaths : fFlushingPaths) { |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 130 | for (auto& clipsIter : flushingPaths->fClipPaths) { |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 131 | clipsIter.second.renderPathInAtlas(fPerFlushResources.get(), onFlushRP); |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 132 | } |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 133 | } |
| 134 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 135 | // Allocate resources and then render the atlas(es). |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 136 | fPerFlushResources->finalize(onFlushRP); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 137 | } |
| 138 | |
Adlai Holler | 9902cff | 2020-11-11 08:51:25 -0500 | [diff] [blame] | 139 | void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, |
| 140 | SkSpan<const uint32_t> /* taskIDs */) { |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 141 | SkASSERT(fFlushing); |
Robert Phillips | 774168e | 2018-05-31 12:43:27 -0400 | [diff] [blame] | 142 | |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 143 | fPerFlushResources.reset(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 144 | |
Chris Dalton | 7d592cd | 2021-03-11 22:49:33 -0700 | [diff] [blame^] | 145 | if (!fFlushingPaths.empty()) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 146 | // We wait to erase these until after flush, once Ops and FPs are done accessing their data. |
| 147 | fFlushingPaths.reset(); |
Robert Phillips | 774168e | 2018-05-31 12:43:27 -0400 | [diff] [blame] | 148 | } |
| 149 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 150 | SkDEBUGCODE(fFlushing = false); |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 151 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 152 | |
| 153 | void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox, |
| 154 | SkPath* out) { |
| 155 | SkPath cropboxPath; |
| 156 | cropboxPath.addRect(SkRect::Make(cropbox)); |
| 157 | if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) { |
| 158 | // This can fail if the PathOps encounter NaN or infinities. |
| 159 | out->reset(); |
| 160 | } |
| 161 | out->setIsVolatile(true); |
| 162 | } |