blob: 8d52297e8b2c3ca75bcf0e0a4641007c931684f9 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCDrawPathsOp_DEFINED
9#define GrCCDrawPathsOp_DEFINED
10
Chris Dalton4da70192018-06-18 09:51:36 -060011#include "GrShape.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "SkTInternalLList.h"
Chris Dalton4bfb50b2018-05-21 09:10:53 -060013#include "ccpr/GrCCSTLList.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014#include "ops/GrDrawOp.h"
15
Chris Dalton42c21152018-06-13 15:28:19 -060016struct GrCCPerFlushResourceSpecs;
Chris Daltonb68bcc42018-09-14 00:44:22 -060017struct GrCCPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060018class GrCCAtlas;
Chris Dalton4da70192018-06-18 09:51:36 -060019class GrOnFlushResourceProvider;
20class GrCCPathCache;
21class GrCCPathCacheEntry;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060022class GrCCPerFlushResources;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023
24/**
25 * This is the Op that draws paths to the actual canvas, using atlases generated by CCPR.
26 */
27class GrCCDrawPathsOp : public GrDrawOp {
28public:
29 DEFINE_OP_CLASS_ID
30 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCDrawPathsOp);
31
Chris Dalton42c21152018-06-13 15:28:19 -060032 static std::unique_ptr<GrCCDrawPathsOp> Make(GrContext*, const SkIRect& clipIBounds,
Chris Dalton09a7bb22018-08-31 19:53:15 +080033 const SkMatrix&, const GrShape&, GrPaint&&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060034 ~GrCCDrawPathsOp() override;
35
Chris Dalton4da70192018-06-18 09:51:36 -060036 const char* name() const override { return "GrCCDrawPathsOp"; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060037 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Brian Osman532b3f92018-07-11 10:02:07 -040038 RequiresDstTexture finalize(const GrCaps&, const GrAppliedClip*) override;
Brian Salomon7eae3e02018-08-07 14:02:38 +000039 CombineResult onCombineIfPossible(GrOp*, const GrCaps&) override;
Brian Salomon7d94bb52018-10-12 14:37:19 -040040 void visitProxies(const VisitProxyFunc& fn, VisitorType) const override {
41 fProcessors.visitProxies(fn);
42 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060043 void onPrepare(GrOpFlushState*) override {}
44
Brian Salomon348a0372018-10-31 10:42:18 -040045 void addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths);
Chris Dalton4da70192018-06-18 09:51:36 -060046
47 // Makes decisions about how to draw each path (cached, copied, rendered, etc.), and
48 // increments/fills out the corresponding GrCCPerFlushResourceSpecs. 'stashedAtlasKey', if
49 // valid, references the mainline coverage count atlas from the previous flush. Paths found in
50 // this atlas will be copied to more permanent atlases in the resource cache.
51 void accountForOwnPaths(GrCCPathCache*, GrOnFlushResourceProvider*,
52 const GrUniqueKey& stashedAtlasKey, GrCCPerFlushResourceSpecs*);
53
54 // Allows the caller to decide whether to copy paths out of the stashed atlas and into the
55 // resource cache, or to just re-render the paths from scratch. If there aren't many copies or
56 // the copies would only fill a small atlas, it's probably best to just re-render.
57 enum class DoCopiesToCache : bool {
58 kNo = false,
59 kYes = true
60 };
61
62 // Allocates the GPU resources indicated by accountForOwnPaths(), in preparation for drawing. If
63 // DoCopiesToCache is kNo, the paths slated for copy will instead be re-rendered from scratch.
64 //
65 // NOTE: If using DoCopiesToCache::kNo, it is the caller's responsibility to call
66 // convertCopiesToRenders() on the GrCCPerFlushResourceSpecs.
67 void setupResources(GrOnFlushResourceProvider*, GrCCPerFlushResources*, DoCopiesToCache);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060068
69 void onExecute(GrOpFlushState*) override;
70
71private:
Robert Phillips7c525e62018-06-12 10:11:12 -040072 friend class GrOpMemoryPool;
73
Chris Dalton09a7bb22018-08-31 19:53:15 +080074 static std::unique_ptr<GrCCDrawPathsOp> InternalMake(GrContext*, const SkIRect& clipIBounds,
75 const SkMatrix&, const GrShape&,
76 float strokeDevWidth,
77 const SkRect& conservativeDevBounds,
78 GrPaint&&);
Chris Daltona8429cf2018-06-22 11:43:31 -060079 enum class Visibility {
80 kPartial,
81 kMostlyComplete, // (i.e., can we cache the whole path mask if we think it will be reused?)
82 kComplete
83 };
84
Chris Dalton09a7bb22018-08-31 19:53:15 +080085 GrCCDrawPathsOp(const SkMatrix&, const GrShape&, float strokeDevWidth,
86 const SkIRect& shapeConservativeIBounds, const SkIRect& maskDevIBounds,
87 Visibility maskVisibility, const SkRect& conservativeDevBounds, GrPaint&&);
Robert Phillips88a32ef2018-06-07 11:05:56 -040088
Brian Salomon7eae3e02018-08-07 14:02:38 +000089 void recordInstance(GrTextureProxy* atlasProxy, int instanceIdx);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060090
Chris Dalton1c548942018-05-22 13:09:48 -060091 const SkMatrix fViewMatrixIfUsingLocalCoords;
Chris Dalton4bfb50b2018-05-21 09:10:53 -060092
93 struct SingleDraw {
Chris Dalton09a7bb22018-08-31 19:53:15 +080094 SingleDraw(const SkMatrix&, const GrShape&, float strokeDevWidth,
95 const SkIRect& shapeConservativeIBounds, const SkIRect& maskDevIBounds,
Brian Osman936fe7d2018-10-30 15:30:35 -040096 Visibility maskVisibility, const GrColor4h&);
Chris Dalton4da70192018-06-18 09:51:36 -060097 ~SingleDraw();
98
Chris Dalton4bfb50b2018-05-21 09:10:53 -060099 SkMatrix fMatrix;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800100 GrShape fShape;
101 float fStrokeDevWidth;
102 const SkIRect fShapeConservativeIBounds;
Chris Daltona8429cf2018-06-22 11:43:31 -0600103 SkIRect fMaskDevIBounds;
104 Visibility fMaskVisibility;
Brian Osman1be2b7c2018-10-29 16:07:15 -0400105 GrColor4h fColor;
Chris Dalton4da70192018-06-18 09:51:36 -0600106
107 sk_sp<GrCCPathCacheEntry> fCacheEntry;
108 sk_sp<GrTextureProxy> fCachedAtlasProxy;
109 SkIVector fCachedMaskShift;
110
Chris Dalton644341a2018-06-18 19:14:16 -0600111 SingleDraw* fNext = nullptr;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600112 };
113
Chris Daltondedf8f22018-09-24 20:23:47 -0600114 // Declare fOwningPerOpListPaths first, before fDraws. The draws use memory allocated by
115 // fOwningPerOpListPaths, so it must not be unreffed until after fDraws is destroyed.
116 sk_sp<GrCCPerOpListPaths> fOwningPerOpListPaths;
117
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600118 GrCCSTLList<SingleDraw> fDraws;
119 SkDEBUGCODE(int fNumDraws = 1);
120
Chris Dalton42c21152018-06-13 15:28:19 -0600121 GrProcessorSet fProcessors;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600122
Chris Dalton4da70192018-06-18 09:51:36 -0600123 struct InstanceRange {
Brian Salomon7eae3e02018-08-07 14:02:38 +0000124 GrTextureProxy* fAtlasProxy;
Chris Dalton4da70192018-06-18 09:51:36 -0600125 int fEndInstanceIdx;
126 };
127
128 SkSTArray<2, InstanceRange, true> fInstanceRanges;
129 int fBaseInstance SkDEBUGCODE(= -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600130};
131
132#endif