blob: 13f6c7d1defcaef94178d396fa69faeb4189ef81 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCoverageCountingPathRenderer_DEFINED
9#define GrCoverageCountingPathRenderer_DEFINED
10
Brian Salomon653f42f2018-07-10 10:07:31 -040011#include <map>
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/GrOnFlushResourceProvider.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040013#include "src/gpu/GrOpsTask.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/ccpr/GrCCPerFlushResources.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040016#include "src/gpu/ccpr/GrCCPerOpsTaskPaths.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060017
Chris Daltond7e22272018-05-23 10:17:17 -060018class GrCCDrawPathsOp;
Chris Daltona2b5b642018-06-24 13:08:57 -060019class GrCCPathCache;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060020
21/**
Chris Dalton1a325d22017-07-14 15:17:41 -060022 * This is a path renderer that draws antialiased paths by counting coverage in an offscreen
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023 * buffer. (See GrCCCoverageProcessor, GrCCPathProcessor.)
Chris Dalton1a325d22017-07-14 15:17:41 -060024 *
25 * It also serves as the per-render-target tracker for pending path draws, and at the start of
26 * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths.
27 */
Chris Dalton383a2ef2018-01-08 17:21:41 -050028class GrCoverageCountingPathRenderer : public GrPathRenderer, public GrOnFlushCallbackObject {
Chris Dalton1a325d22017-07-14 15:17:41 -060029public:
Chris Daltonc3318f02019-07-19 14:20:53 -060030 using CoverageType = GrCCAtlas::CoverageType;
31
Robert Phillipsa6286052020-04-13 10:55:08 -040032 const char* name() const final { return "CCPR"; }
33
Chris Daltonc3318f02019-07-19 14:20:53 -060034 static bool IsSupported(const GrCaps&, CoverageType* = nullptr);
Chris Daltona2b5b642018-06-24 13:08:57 -060035
36 enum class AllowCaching : bool {
37 kNo = false,
38 kYes = true
39 };
40
Chris Daltonc3318f02019-07-19 14:20:53 -060041 static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported(
42 const GrCaps&, AllowCaching, uint32_t contextUniqueID);
43
44 CoverageType coverageType() const { return fCoverageType; }
Chris Daltona2b5b642018-06-24 13:08:57 -060045
Greg Danielf41b2bd2019-08-22 16:19:24 -040046 using PendingPathsMap = std::map<uint32_t, sk_sp<GrCCPerOpsTaskPaths>>;
Chris Daltona71305c2018-05-23 12:00:07 -060047
Greg Danielf41b2bd2019-08-22 16:19:24 -040048 // In DDL mode, Ganesh needs to be able to move the pending GrCCPerOpsTaskPaths to the DDL
49 // object (detachPendingPaths) and then return them upon replay (mergePendingPaths).
Chris Daltona71305c2018-05-23 12:00:07 -060050 PendingPathsMap detachPendingPaths() { return std::move(fPendingPaths); }
51
Robert Phillips774168e2018-05-31 12:43:27 -040052 void mergePendingPaths(const PendingPathsMap& paths) {
53#ifdef SK_DEBUG
Greg Danielf41b2bd2019-08-22 16:19:24 -040054 // Ensure there are no duplicate opsTask IDs between the incoming path map and ours.
55 // This should always be true since opsTask IDs are globally unique and these are coming
Robert Phillips774168e2018-05-31 12:43:27 -040056 // from different DDL recordings.
57 for (const auto& it : paths) {
58 SkASSERT(!fPendingPaths.count(it.first));
59 }
60#endif
61
62 fPendingPaths.insert(paths.begin(), paths.end());
Chris Daltona71305c2018-05-23 12:00:07 -060063 }
64
Chris Daltonc3318f02019-07-19 14:20:53 -060065 std::unique_ptr<GrFragmentProcessor> makeClipProcessor(
John Stiles956ec8a2020-06-19 15:32:16 -040066 std::unique_ptr<GrFragmentProcessor> inputFP, uint32_t opsTaskID,
67 const SkPath& deviceSpacePath, const SkIRect& accessRect, const GrCaps& caps);
Chris Daltona32a3c32017-12-05 10:05:21 -070068
69 // GrOnFlushCallbackObject overrides.
Adlai Holler9902cff2020-11-11 08:51:25 -050070 void preFlush(GrOnFlushResourceProvider*, SkSpan<const uint32_t> taskIDs) override;
71 void postFlush(GrDeferredUploadToken, SkSpan<const uint32_t> taskIDs) override;
Chris Daltona32a3c32017-12-05 10:05:21 -070072
Chris Dalton351e80c2019-01-06 22:51:00 -070073 void purgeCacheEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point&);
Chris Dalton4da70192018-06-18 09:51:36 -060074
Chris Dalton09a7bb22018-08-31 19:53:15 +080075 // If a path spans more pixels than this, we need to crop it or else analytic AA can run out of
76 // fp32 precision.
77 static constexpr float kPathCropThreshold = 1 << 16;
78
79 static void CropPath(const SkPath&, const SkIRect& cropbox, SkPath* out);
80
Chris Dalton82de18f2018-09-12 17:24:09 -060081 // Maximum inflation of path bounds due to stroking (from width, miter, caps). Strokes wider
82 // than this will be converted to fill paths and drawn by the CCPR filler instead.
83 static constexpr float kMaxBoundsInflationFromStroke = 4096;
84
Jiulong Wangdf5739c2020-10-13 15:09:09 -070085 static constexpr int kDoCopiesThreshold = 100;
86
Chris Dalton82de18f2018-09-12 17:24:09 -060087 static float GetStrokeDevWidth(const SkMatrix&, const SkStrokeRec&,
88 float* inflationRadius = nullptr);
89
Chris Dalton1a325d22017-07-14 15:17:41 -060090private:
Chris Daltonc3318f02019-07-19 14:20:53 -060091 GrCoverageCountingPathRenderer(CoverageType, AllowCaching, uint32_t contextUniqueID);
Chris Daltona2b5b642018-06-24 13:08:57 -060092
93 // GrPathRenderer overrides.
Michael Ludwig2686d692020-04-17 20:21:37 +000094 StencilSupport onGetStencilSupport(const GrStyledShape&) const override {
Chris Daltona2b5b642018-06-24 13:08:57 -060095 return GrPathRenderer::kNoSupport_StencilSupport;
96 }
97 CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override;
98 bool onDrawPath(const DrawPathArgs&) override;
Chris Dalton1a325d22017-07-14 15:17:41 -060099
Greg Danielf41b2bd2019-08-22 16:19:24 -0400100 GrCCPerOpsTaskPaths* lookupPendingPaths(uint32_t opsTaskID);
Herb Derbyc76d4092020-10-07 16:46:15 -0400101 void recordOp(GrOp::Owner, const DrawPathArgs&);
Chris Daltonc1e59632017-09-05 00:30:07 -0600102
Chris Daltonc3318f02019-07-19 14:20:53 -0600103 const CoverageType fCoverageType;
104
Greg Danielf41b2bd2019-08-22 16:19:24 -0400105 // fPendingPaths holds the GrCCPerOpsTaskPaths objects that have already been created, but not
106 // flushed, and those that are still being created. All GrCCPerOpsTaskPaths objects will first
Chris Daltond7e22272018-05-23 10:17:17 -0600107 // reside in fPendingPaths, then be moved to fFlushingPaths during preFlush().
Chris Daltona71305c2018-05-23 12:00:07 -0600108 PendingPathsMap fPendingPaths;
Chris Daltona32a3c32017-12-05 10:05:21 -0700109
Greg Danielf41b2bd2019-08-22 16:19:24 -0400110 // fFlushingPaths holds the GrCCPerOpsTaskPaths objects that are currently being flushed.
Chris Daltond7e22272018-05-23 10:17:17 -0600111 // (It will only contain elements when fFlushing is true.)
Greg Danielf41b2bd2019-08-22 16:19:24 -0400112 SkSTArray<4, sk_sp<GrCCPerOpsTaskPaths>> fFlushingPaths;
Chris Dalton4da70192018-06-18 09:51:36 -0600113
Chris Daltona2b5b642018-06-24 13:08:57 -0600114 std::unique_ptr<GrCCPathCache> fPathCache;
Chris Dalton4da70192018-06-18 09:51:36 -0600115
Chris Dalton383a2ef2018-01-08 17:21:41 -0500116 SkDEBUGCODE(bool fFlushing = false);
Chris Dalton351e80c2019-01-06 22:51:00 -0700117
118public:
119 void testingOnly_drawPathDirectly(const DrawPathArgs&);
120 const GrCCPerFlushResources* testingOnly_getCurrentFlushResources();
121 const GrCCPathCache* testingOnly_getPathCache() const;
Chris Dalton1a325d22017-07-14 15:17:41 -0600122};
123
124#endif