blob: 1d3bfd841bbc4979fdeb0978b6da4408246f3474 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCoverageCountingPathRenderer_DEFINED
9#define GrCoverageCountingPathRenderer_DEFINED
10
Brian Salomon653f42f2018-07-10 10:07:31 -040011#include <map>
Robert Phillips774168e2018-05-31 12:43:27 -040012#include "GrCCPerOpListPaths.h"
Brian Salomon653f42f2018-07-10 10:07:31 -040013#include "GrOnFlushResourceProvider.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060014#include "GrPathRenderer.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060015#include "GrRenderTargetOpList.h"
Chris Daltond7e22272018-05-23 10:17:17 -060016#include "ccpr/GrCCPerFlushResources.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060017
Chris Daltond7e22272018-05-23 10:17:17 -060018class GrCCDrawPathsOp;
Chris Daltona2b5b642018-06-24 13:08:57 -060019class GrCCPathCache;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060020
21/**
Chris Dalton1a325d22017-07-14 15:17:41 -060022 * This is a path renderer that draws antialiased paths by counting coverage in an offscreen
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023 * buffer. (See GrCCCoverageProcessor, GrCCPathProcessor.)
Chris Dalton1a325d22017-07-14 15:17:41 -060024 *
25 * It also serves as the per-render-target tracker for pending path draws, and at the start of
26 * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths.
27 */
Chris Dalton383a2ef2018-01-08 17:21:41 -050028class GrCoverageCountingPathRenderer : public GrPathRenderer, public GrOnFlushCallbackObject {
Chris Dalton1a325d22017-07-14 15:17:41 -060029public:
30 static bool IsSupported(const GrCaps&);
Chris Daltona2b5b642018-06-24 13:08:57 -060031
32 enum class AllowCaching : bool {
33 kNo = false,
34 kYes = true
35 };
36
37 static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported(const GrCaps&, AllowCaching);
38
39 ~GrCoverageCountingPathRenderer() override;
Chris Dalton1a325d22017-07-14 15:17:41 -060040
Robert Phillips774168e2018-05-31 12:43:27 -040041 using PendingPathsMap = std::map<uint32_t, sk_sp<GrCCPerOpListPaths>>;
Chris Daltona71305c2018-05-23 12:00:07 -060042
43 // In DDL mode, Ganesh needs to be able to move the pending GrCCPerOpListPaths to the DDL object
44 // (detachPendingPaths) and then return them upon replay (mergePendingPaths).
45 PendingPathsMap detachPendingPaths() { return std::move(fPendingPaths); }
46
Robert Phillips774168e2018-05-31 12:43:27 -040047 void mergePendingPaths(const PendingPathsMap& paths) {
48#ifdef SK_DEBUG
Chris Daltona71305c2018-05-23 12:00:07 -060049 // Ensure there are no duplicate opList IDs between the incoming path map and ours.
Robert Phillips774168e2018-05-31 12:43:27 -040050 // This should always be true since opList IDs are globally unique and these are coming
51 // from different DDL recordings.
52 for (const auto& it : paths) {
53 SkASSERT(!fPendingPaths.count(it.first));
54 }
55#endif
56
57 fPendingPaths.insert(paths.begin(), paths.end());
Chris Daltona71305c2018-05-23 12:00:07 -060058 }
59
Chris Dalton4c458b12018-06-16 17:22:59 -060060 std::unique_ptr<GrFragmentProcessor> makeClipProcessor(uint32_t oplistID,
Chris Daltona32a3c32017-12-05 10:05:21 -070061 const SkPath& deviceSpacePath,
Chris Dalton4c458b12018-06-16 17:22:59 -060062 const SkIRect& accessRect, int rtWidth,
63 int rtHeight, const GrCaps&);
Chris Daltona32a3c32017-12-05 10:05:21 -070064
65 // GrOnFlushCallbackObject overrides.
66 void preFlush(GrOnFlushResourceProvider*, const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton9414c962018-06-14 10:14:50 -060067 SkTArray<sk_sp<GrRenderTargetContext>>* out) override;
Chris Daltona32a3c32017-12-05 10:05:21 -070068 void postFlush(GrDeferredUploadToken, const uint32_t* opListIDs, int numOpListIDs) override;
69
Chris Daltona2b5b642018-06-24 13:08:57 -060070 void testingOnly_drawPathDirectly(const DrawPathArgs&);
Chris Dalton4da70192018-06-18 09:51:36 -060071 const GrUniqueKey& testingOnly_getStashedAtlasKey() const;
72
Chris Dalton09a7bb22018-08-31 19:53:15 +080073 // If a path spans more pixels than this, we need to crop it or else analytic AA can run out of
74 // fp32 precision.
75 static constexpr float kPathCropThreshold = 1 << 16;
76
77 static void CropPath(const SkPath&, const SkIRect& cropbox, SkPath* out);
78
Chris Dalton82de18f2018-09-12 17:24:09 -060079 // Maximum inflation of path bounds due to stroking (from width, miter, caps). Strokes wider
80 // than this will be converted to fill paths and drawn by the CCPR filler instead.
81 static constexpr float kMaxBoundsInflationFromStroke = 4096;
82
83 static float GetStrokeDevWidth(const SkMatrix&, const SkStrokeRec&,
84 float* inflationRadius = nullptr);
85
Chris Dalton1a325d22017-07-14 15:17:41 -060086private:
Chris Daltona2b5b642018-06-24 13:08:57 -060087 GrCoverageCountingPathRenderer(AllowCaching);
88
89 // GrPathRenderer overrides.
90 StencilSupport onGetStencilSupport(const GrShape&) const override {
91 return GrPathRenderer::kNoSupport_StencilSupport;
92 }
93 CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override;
94 bool onDrawPath(const DrawPathArgs&) override;
Chris Dalton1a325d22017-07-14 15:17:41 -060095
Chris Daltond7e22272018-05-23 10:17:17 -060096 GrCCPerOpListPaths* lookupPendingPaths(uint32_t opListID);
Chris Dalton42c21152018-06-13 15:28:19 -060097 void recordOp(std::unique_ptr<GrCCDrawPathsOp>, const DrawPathArgs&);
Chris Daltonc1e59632017-09-05 00:30:07 -060098
Chris Daltond7e22272018-05-23 10:17:17 -060099 // fPendingPaths holds the GrCCPerOpListPaths objects that have already been created, but not
100 // flushed, and those that are still being created. All GrCCPerOpListPaths objects will first
101 // reside in fPendingPaths, then be moved to fFlushingPaths during preFlush().
Chris Daltona71305c2018-05-23 12:00:07 -0600102 PendingPathsMap fPendingPaths;
Chris Daltona32a3c32017-12-05 10:05:21 -0700103
Chris Daltond7e22272018-05-23 10:17:17 -0600104 // fFlushingPaths holds the GrCCPerOpListPaths objects that are currently being flushed.
105 // (It will only contain elements when fFlushing is true.)
Robert Phillips774168e2018-05-31 12:43:27 -0400106 SkSTArray<4, sk_sp<GrCCPerOpListPaths>> fFlushingPaths;
Chris Dalton4da70192018-06-18 09:51:36 -0600107
Chris Daltona2b5b642018-06-24 13:08:57 -0600108 std::unique_ptr<GrCCPathCache> fPathCache;
Chris Dalton4da70192018-06-18 09:51:36 -0600109 GrUniqueKey fStashedAtlasKey;
110
Chris Dalton383a2ef2018-01-08 17:21:41 -0500111 SkDEBUGCODE(bool fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600112};
113
114#endif