Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCoverageCountingPathRenderer_DEFINED |
| 9 | #define GrCoverageCountingPathRenderer_DEFINED |
| 10 | |
Brian Salomon | 653f42f | 2018-07-10 10:07:31 -0400 | [diff] [blame] | 11 | #include <map> |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 12 | #include "src/gpu/GrOnFlushResourceProvider.h" |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 13 | #include "src/gpu/GrOpsTask.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 14 | #include "src/gpu/GrPathRenderer.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 15 | #include "src/gpu/ccpr/GrCCPerFlushResources.h" |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 16 | #include "src/gpu/ccpr/GrCCPerOpsTaskPaths.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 17 | |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 18 | class GrCCDrawPathsOp; |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 19 | class GrCCPathCache; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 20 | |
| 21 | /** |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 22 | * This is a path renderer that draws antialiased paths by counting coverage in an offscreen |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 23 | * buffer. (See GrCCCoverageProcessor, GrCCPathProcessor.) |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 24 | * |
| 25 | * It also serves as the per-render-target tracker for pending path draws, and at the start of |
| 26 | * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths. |
| 27 | */ |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 28 | class GrCoverageCountingPathRenderer : public GrPathRenderer, public GrOnFlushCallbackObject { |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 29 | public: |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 30 | using CoverageType = GrCCAtlas::CoverageType; |
| 31 | |
| 32 | static bool IsSupported(const GrCaps&, CoverageType* = nullptr); |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 33 | |
| 34 | enum class AllowCaching : bool { |
| 35 | kNo = false, |
| 36 | kYes = true |
| 37 | }; |
| 38 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 39 | static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported( |
| 40 | const GrCaps&, AllowCaching, uint32_t contextUniqueID); |
| 41 | |
| 42 | CoverageType coverageType() const { return fCoverageType; } |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 43 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 44 | using PendingPathsMap = std::map<uint32_t, sk_sp<GrCCPerOpsTaskPaths>>; |
Chris Dalton | a71305c | 2018-05-23 12:00:07 -0600 | [diff] [blame] | 45 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 46 | // In DDL mode, Ganesh needs to be able to move the pending GrCCPerOpsTaskPaths to the DDL |
| 47 | // object (detachPendingPaths) and then return them upon replay (mergePendingPaths). |
Chris Dalton | a71305c | 2018-05-23 12:00:07 -0600 | [diff] [blame] | 48 | PendingPathsMap detachPendingPaths() { return std::move(fPendingPaths); } |
| 49 | |
Robert Phillips | 774168e | 2018-05-31 12:43:27 -0400 | [diff] [blame] | 50 | void mergePendingPaths(const PendingPathsMap& paths) { |
| 51 | #ifdef SK_DEBUG |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 52 | // Ensure there are no duplicate opsTask IDs between the incoming path map and ours. |
| 53 | // This should always be true since opsTask IDs are globally unique and these are coming |
Robert Phillips | 774168e | 2018-05-31 12:43:27 -0400 | [diff] [blame] | 54 | // from different DDL recordings. |
| 55 | for (const auto& it : paths) { |
| 56 | SkASSERT(!fPendingPaths.count(it.first)); |
| 57 | } |
| 58 | #endif |
| 59 | |
| 60 | fPendingPaths.insert(paths.begin(), paths.end()); |
Chris Dalton | a71305c | 2018-05-23 12:00:07 -0600 | [diff] [blame] | 61 | } |
| 62 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 63 | std::unique_ptr<GrFragmentProcessor> makeClipProcessor( |
| 64 | uint32_t oplistID, const SkPath& deviceSpacePath, const SkIRect& accessRect, |
| 65 | const GrCaps&); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 66 | |
| 67 | // GrOnFlushCallbackObject overrides. |
Chris Dalton | c4b4735 | 2019-08-23 10:10:36 -0600 | [diff] [blame] | 68 | void preFlush(GrOnFlushResourceProvider*, const uint32_t* opsTaskIDs, |
| 69 | int numOpsTaskIDs) override; |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 70 | void postFlush(GrDeferredUploadToken, const uint32_t* opsTaskIDs, int numOpsTaskIDs) override; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 71 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 72 | void purgeCacheEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point&); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 73 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 74 | // If a path spans more pixels than this, we need to crop it or else analytic AA can run out of |
| 75 | // fp32 precision. |
| 76 | static constexpr float kPathCropThreshold = 1 << 16; |
| 77 | |
| 78 | static void CropPath(const SkPath&, const SkIRect& cropbox, SkPath* out); |
| 79 | |
Chris Dalton | 82de18f | 2018-09-12 17:24:09 -0600 | [diff] [blame] | 80 | // Maximum inflation of path bounds due to stroking (from width, miter, caps). Strokes wider |
| 81 | // than this will be converted to fill paths and drawn by the CCPR filler instead. |
| 82 | static constexpr float kMaxBoundsInflationFromStroke = 4096; |
| 83 | |
| 84 | static float GetStrokeDevWidth(const SkMatrix&, const SkStrokeRec&, |
| 85 | float* inflationRadius = nullptr); |
| 86 | |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 87 | private: |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 88 | GrCoverageCountingPathRenderer(CoverageType, AllowCaching, uint32_t contextUniqueID); |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 89 | |
| 90 | // GrPathRenderer overrides. |
| 91 | StencilSupport onGetStencilSupport(const GrShape&) const override { |
| 92 | return GrPathRenderer::kNoSupport_StencilSupport; |
| 93 | } |
| 94 | CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override; |
| 95 | bool onDrawPath(const DrawPathArgs&) override; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 96 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 97 | GrCCPerOpsTaskPaths* lookupPendingPaths(uint32_t opsTaskID); |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 98 | void recordOp(std::unique_ptr<GrCCDrawPathsOp>, const DrawPathArgs&); |
Chris Dalton | c1e5963 | 2017-09-05 00:30:07 -0600 | [diff] [blame] | 99 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 100 | const CoverageType fCoverageType; |
| 101 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 102 | // fPendingPaths holds the GrCCPerOpsTaskPaths objects that have already been created, but not |
| 103 | // flushed, and those that are still being created. All GrCCPerOpsTaskPaths objects will first |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 104 | // reside in fPendingPaths, then be moved to fFlushingPaths during preFlush(). |
Chris Dalton | a71305c | 2018-05-23 12:00:07 -0600 | [diff] [blame] | 105 | PendingPathsMap fPendingPaths; |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 106 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 107 | // fFlushingPaths holds the GrCCPerOpsTaskPaths objects that are currently being flushed. |
Chris Dalton | d7e2227 | 2018-05-23 10:17:17 -0600 | [diff] [blame] | 108 | // (It will only contain elements when fFlushing is true.) |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 109 | SkSTArray<4, sk_sp<GrCCPerOpsTaskPaths>> fFlushingPaths; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 110 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 111 | std::unique_ptr<GrCCPathCache> fPathCache; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 112 | |
Chris Dalton | 383a2ef | 2018-01-08 17:21:41 -0500 | [diff] [blame] | 113 | SkDEBUGCODE(bool fFlushing = false); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 114 | |
| 115 | public: |
| 116 | void testingOnly_drawPathDirectly(const DrawPathArgs&); |
| 117 | const GrCCPerFlushResources* testingOnly_getCurrentFlushResources(); |
| 118 | const GrCCPathCache* testingOnly_getPathCache() const; |
Chris Dalton | 1a325d2 | 2017-07-14 15:17:41 -0600 | [diff] [blame] | 119 | }; |
| 120 | |
| 121 | #endif |