blob: bc22738d0e29c109d616dfae21a11f3a28b968c4 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCDrawPathsOp_DEFINED
9#define GrCCDrawPathsOp_DEFINED
10
Ben Wagner729a23f2019-05-17 16:29:34 -040011#include "src/core/SkTInternalLList.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/ccpr/GrCCPathCache.h"
13#include "src/gpu/ccpr/GrCCSTLList.h"
Michael Ludwig663afe52019-06-03 16:46:19 -040014#include "src/gpu/geometry/GrShape.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/ops/GrDrawOp.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060016
Robert Phillips6f0e02f2019-02-13 11:02:28 -050017class GrCCAtlas;
18class GrCCPerFlushResources;
Chris Dalton42c21152018-06-13 15:28:19 -060019struct GrCCPerFlushResourceSpecs;
Greg Danielf41b2bd2019-08-22 16:19:24 -040020struct GrCCPerOpsTaskPaths;
Chris Dalton4da70192018-06-18 09:51:36 -060021class GrOnFlushResourceProvider;
Robert Phillips6f0e02f2019-02-13 11:02:28 -050022class GrRecordingContext;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023
24/**
25 * This is the Op that draws paths to the actual canvas, using atlases generated by CCPR.
26 */
27class GrCCDrawPathsOp : public GrDrawOp {
28public:
29 DEFINE_OP_CLASS_ID
30 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCDrawPathsOp);
31
Robert Phillips6f0e02f2019-02-13 11:02:28 -050032 static std::unique_ptr<GrCCDrawPathsOp> Make(GrRecordingContext*, const SkIRect& clipIBounds,
Chris Dalton09a7bb22018-08-31 19:53:15 +080033 const SkMatrix&, const GrShape&, GrPaint&&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060034 ~GrCCDrawPathsOp() override;
35
Chris Dalton4da70192018-06-18 09:51:36 -060036 const char* name() const override { return "GrCCDrawPathsOp"; }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060037 FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
Chris Dalton6ce447a2019-06-23 18:07:38 -060038 GrProcessorSet::Analysis finalize(const GrCaps&, const GrAppliedClip*,
39 bool hasMixedSampledCoverage, GrClampType) override;
Michael Ludwig28b0c5d2019-12-19 14:51:00 -050040 CombineResult onCombineIfPossible(GrOp*, GrRecordingContext::Arenas*, const GrCaps&) override;
Chris Dalton1706cbf2019-05-21 19:35:29 -060041 void visitProxies(const VisitProxyFunc& fn) const override {
Chris Dalton45f6b3d2019-05-21 12:06:03 -060042 for (const auto& range : fInstanceRanges) {
Chris Dalton7eb5c0f2019-05-23 15:15:47 -060043 fn(range.fAtlasProxy, GrMipMapped::kNo);
Chris Dalton45f6b3d2019-05-21 12:06:03 -060044 }
Brian Salomon7d94bb52018-10-12 14:37:19 -040045 fProcessors.visitProxies(fn);
46 }
Greg Danielb20d7e52019-09-03 13:54:39 -040047 void onPrepare(GrOpFlushState*) override;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060048
Greg Danielf41b2bd2019-08-22 16:19:24 -040049 void addToOwningPerOpsTaskPaths(sk_sp<GrCCPerOpsTaskPaths> owningPerOpsTaskPaths);
Chris Dalton4da70192018-06-18 09:51:36 -060050
51 // Makes decisions about how to draw each path (cached, copied, rendered, etc.), and
Chris Dalton351e80c2019-01-06 22:51:00 -070052 // increments/fills out the corresponding GrCCPerFlushResourceSpecs.
53 void accountForOwnPaths(GrCCPathCache*, GrOnFlushResourceProvider*, GrCCPerFlushResourceSpecs*);
Chris Dalton4da70192018-06-18 09:51:36 -060054
Chris Dalton351e80c2019-01-06 22:51:00 -070055 // Allows the caller to decide whether to actually do the suggested copies from cached 16-bit
56 // coverage count atlases, and into 8-bit literal coverage atlases. Purely to save space.
57 enum class DoCopiesToA8Coverage : bool {
Chris Dalton4da70192018-06-18 09:51:36 -060058 kNo = false,
59 kYes = true
60 };
61
62 // Allocates the GPU resources indicated by accountForOwnPaths(), in preparation for drawing. If
Chris Dalton351e80c2019-01-06 22:51:00 -070063 // DoCopiesToA8Coverage is kNo, the paths slated for copy will instead be left in their 16-bit
64 // coverage count atlases.
Chris Dalton4da70192018-06-18 09:51:36 -060065 //
Chris Dalton351e80c2019-01-06 22:51:00 -070066 // NOTE: If using DoCopiesToA8Coverage::kNo, it is the caller's responsibility to have called
67 // cancelCopies() on the GrCCPerFlushResourceSpecs, prior to making this call.
68 void setupResources(GrCCPathCache*, GrOnFlushResourceProvider*, GrCCPerFlushResources*,
69 DoCopiesToA8Coverage);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060070
Brian Salomon588cec72018-11-14 13:56:37 -050071 void onExecute(GrOpFlushState*, const SkRect& chainBounds) override;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060072
73private:
Robert Phillipsc655c3a2020-03-18 13:23:45 -040074 void onPrePrepare(GrRecordingContext*,
Brian Salomon8afde5f2020-04-01 16:22:00 -040075 const GrSurfaceProxyView* writeView,
Robert Phillipsc655c3a2020-03-18 13:23:45 -040076 GrAppliedClip*,
77 const GrXferProcessor::DstProxyView&) override {}
78
Robert Phillips7c525e62018-06-12 10:11:12 -040079 friend class GrOpMemoryPool;
80
Robert Phillips6f0e02f2019-02-13 11:02:28 -050081 static std::unique_ptr<GrCCDrawPathsOp> InternalMake(GrRecordingContext*,
82 const SkIRect& clipIBounds,
Chris Dalton09a7bb22018-08-31 19:53:15 +080083 const SkMatrix&, const GrShape&,
84 float strokeDevWidth,
85 const SkRect& conservativeDevBounds,
86 GrPaint&&);
Chris Daltona8429cf2018-06-22 11:43:31 -060087
Chris Dalton09a7bb22018-08-31 19:53:15 +080088 GrCCDrawPathsOp(const SkMatrix&, const GrShape&, float strokeDevWidth,
89 const SkIRect& shapeConservativeIBounds, const SkIRect& maskDevIBounds,
Chris Daltonaaa77c12019-01-07 17:45:36 -070090 const SkRect& conservativeDevBounds, GrPaint&&);
Robert Phillips88a32ef2018-06-07 11:05:56 -040091
Chris Dalton6a5317a2019-07-12 09:55:52 -060092 void recordInstance(
93 GrCCPathProcessor::CoverageMode, GrTextureProxy* atlasProxy, int instanceIdx);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060094
Chris Dalton1c548942018-05-22 13:09:48 -060095 const SkMatrix fViewMatrixIfUsingLocalCoords;
Chris Dalton4bfb50b2018-05-21 09:10:53 -060096
Chris Daltona13078c2019-01-07 09:34:05 -070097 class SingleDraw {
98 public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080099 SingleDraw(const SkMatrix&, const GrShape&, float strokeDevWidth,
100 const SkIRect& shapeConservativeIBounds, const SkIRect& maskDevIBounds,
Chris Daltonaaa77c12019-01-07 17:45:36 -0700101 const SkPMColor4f&);
Chris Dalton4da70192018-06-18 09:51:36 -0600102
Chris Daltona13078c2019-01-07 09:34:05 -0700103 // See the corresponding methods in GrCCDrawPathsOp.
Chris Daltonb8fff0d2019-03-05 10:11:58 -0700104 GrProcessorSet::Analysis finalize(
Chris Dalton6ce447a2019-06-23 18:07:38 -0600105 const GrCaps&, const GrAppliedClip*, bool hasMixedSampledCoverage, GrClampType,
106 GrProcessorSet*);
Chris Daltona13078c2019-01-07 09:34:05 -0700107 void accountForOwnPath(GrCCPathCache*, GrOnFlushResourceProvider*,
108 GrCCPerFlushResourceSpecs*);
109 void setupResources(GrCCPathCache*, GrOnFlushResourceProvider*, GrCCPerFlushResources*,
110 DoCopiesToA8Coverage, GrCCDrawPathsOp*);
111
112 private:
Chris Daltonaaa77c12019-01-07 17:45:36 -0700113 bool shouldCachePathMask(int maxRenderTargetSize) const;
114
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600115 SkMatrix fMatrix;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800116 GrShape fShape;
117 float fStrokeDevWidth;
118 const SkIRect fShapeConservativeIBounds;
Chris Daltona8429cf2018-06-22 11:43:31 -0600119 SkIRect fMaskDevIBounds;
Brian Osmancf860852018-10-31 14:04:39 -0400120 SkPMColor4f fColor;
Chris Dalton4da70192018-06-18 09:51:36 -0600121
Chris Dalton351e80c2019-01-06 22:51:00 -0700122 GrCCPathCache::OnFlushEntryRef fCacheEntry;
Chris Dalton4da70192018-06-18 09:51:36 -0600123 SkIVector fCachedMaskShift;
Chris Dalton351e80c2019-01-06 22:51:00 -0700124 bool fDoCopyToA8Coverage = false;
Chris Daltonaaa77c12019-01-07 17:45:36 -0700125 bool fDoCachePathMask = false;
Chris Dalton9bc450b2019-07-21 19:34:52 -0600126 SkDEBUGCODE(bool fWasCountedAsRender = false);
Chris Dalton4da70192018-06-18 09:51:36 -0600127
Chris Dalton644341a2018-06-18 19:14:16 -0600128 SingleDraw* fNext = nullptr;
Chris Daltona13078c2019-01-07 09:34:05 -0700129
130 friend class GrCCSTLList<SingleDraw>; // To access fNext.
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600131 };
132
Greg Danielf41b2bd2019-08-22 16:19:24 -0400133 // Declare fOwningPerOpsTaskPaths first, before fDraws. The draws use memory allocated by
134 // fOwningPerOpsTaskPaths, so it must not be unreffed until after fDraws is destroyed.
135 sk_sp<GrCCPerOpsTaskPaths> fOwningPerOpsTaskPaths;
Chris Daltondedf8f22018-09-24 20:23:47 -0600136
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600137 GrCCSTLList<SingleDraw> fDraws;
138 SkDEBUGCODE(int fNumDraws = 1);
139
Chris Dalton42c21152018-06-13 15:28:19 -0600140 GrProcessorSet fProcessors;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600141
Chris Dalton4da70192018-06-18 09:51:36 -0600142 struct InstanceRange {
Chris Dalton6a5317a2019-07-12 09:55:52 -0600143 GrCCPathProcessor::CoverageMode fCoverageMode;
Brian Salomon7eae3e02018-08-07 14:02:38 +0000144 GrTextureProxy* fAtlasProxy;
Chris Dalton4da70192018-06-18 09:51:36 -0600145 int fEndInstanceIdx;
146 };
147
148 SkSTArray<2, InstanceRange, true> fInstanceRanges;
149 int fBaseInstance SkDEBUGCODE(= -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600150};
151
152#endif