blob: 370b4a3cb58c01a0a989d7698694a1ef75e74c05 [file] [log] [blame]
Chris Daltonc3176002021-07-23 15:33:09 -06001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrAtlasPathRenderer_DEFINED
9#define GrAtlasPathRenderer_DEFINED
10
Robert Phillips400f52e2021-07-26 13:23:10 -040011#include "include/gpu/GrTypes.h"
12
13#if SK_GPU_V1
14
Chris Daltonc3176002021-07-23 15:33:09 -060015#include "include/private/SkTHash.h"
16#include "src/core/SkIPoint16.h"
17#include "src/gpu/GrDynamicAtlas.h"
18#include "src/gpu/GrFragmentProcessor.h"
19#include "src/gpu/GrOnFlushResourceProvider.h"
20#include "src/gpu/GrPathRenderer.h"
21
22class GrAtlasRenderTask;
23class GrOp;
24class GrRecordingContext;
25
26// Draws paths by first rendering their coverage mask into an offscreen atlas.
27class GrAtlasPathRenderer : public GrPathRenderer, public GrOnFlushCallbackObject {
28public:
29 static bool IsSupported(GrRecordingContext*);
30
31 // Returns a GrAtlasPathRenderer if it is supported, otherwise null.
32 static sk_sp<GrAtlasPathRenderer> Make(GrRecordingContext* rContext);
33
34 const char* name() const final { return "GrAtlasPathRenderer"; }
35
36 StencilSupport onGetStencilSupport(const GrStyledShape&) const override {
37 return kNoSupport_StencilSupport;
38 }
39
40 CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override;
41
42 bool onDrawPath(const DrawPathArgs&) override;
43
44 // Returns a fragment processor that modulates inputFP by the given deviceSpacePath's coverage,
45 // implemented using an internal atlas.
46 //
47 // Returns 'inputFP' wrapped in GrFPFailure() if the path was too large, or if the current atlas
48 // is full and already used by either opBeingClipped or inputFP. (Currently, "too large" means
Chris Dalton66deeb22021-07-23 13:57:03 -060049 // larger than fMaxAtlasSize in either dimension, more than 256^2 total pixels, or more than
50 // 128^2 total pixels if the surfaceDrawContext supports MSAA or DMSAA.)
Chris Daltonc3176002021-07-23 15:33:09 -060051 //
52 // Also returns GrFPFailure() if the view matrix has perspective.
Chris Dalton66deeb22021-07-23 13:57:03 -060053 GrFPResult makeAtlasClipEffect(const GrSurfaceDrawContext*,
Chris Daltonc3176002021-07-23 15:33:09 -060054 const GrOp* opBeingClipped,
55 std::unique_ptr<GrFragmentProcessor> inputFP,
56 const SkIRect& drawBounds,
57 const SkMatrix&,
58 const SkPath&);
59
60private:
61 // The atlas is not compatible with DDL. We can only use it on direct contexts.
62 GrAtlasPathRenderer(GrDirectContext*);
63
Chris Dalton66deeb22021-07-23 13:57:03 -060064 // Returns true if the given device-space path bounds are small enough to fit in an atlas and to
65 // benefit from atlasing. (Currently, "small enough" means no larger than fMaxAtlasSize in
66 // either dimension, no more than 256^2 total pixels, or no more than 128^2 total pixels if the
67 // fallbackAAType is kMSAA.)
68 bool pathFitsInAtlas(const SkRect& pathDevBounds, GrAAType fallbackAAType) const;
Chris Daltonc3176002021-07-23 15:33:09 -060069
70 // Returns true if the draw being set up already uses the given atlasProxy.
71 using DrawRefsAtlasCallback = std::function<bool(const GrSurfaceProxy* atlasProxy)>;
72
73 // Adds the filled path to an atlas.
74 //
75 // pathFitsInAtlas() and is_visible() both must have returned true before making this call.
76 //
77 // Fails and returns false if the current atlas is full and already in use according to
78 // DrawRefsAtlasCallback.
79 bool addPathToAtlas(GrRecordingContext*,
80 const SkMatrix&,
81 const SkPath&,
82 const SkRect& pathDevBounds,
83 SkIRect* devIBounds,
84 SkIPoint16* locationInAtlas,
85 bool* transposedInAtlas,
86 const DrawRefsAtlasCallback&);
87
88 // Instantiates texture(s) for all atlases we've created since the last flush. Atlases that are
89 // the same size will be instantiated with the same backing texture.
90 void preFlush(GrOnFlushResourceProvider*, SkSpan<const uint32_t> taskIDs) override;
91
92 float fAtlasMaxSize = 0;
93 int fAtlasInitialSize = 0;
94
95 // A collection of all atlases we've created and used since the last flush. We instantiate these
96 // at flush time during preFlush().
97 SkSTArray<4, sk_sp<GrAtlasRenderTask>> fAtlasRenderTasks;
98
99 // This simple cache remembers the locations of cacheable path masks in the most recent atlas.
100 // Its main motivation is for clip paths.
101 struct AtlasPathKey {
102 void set(const SkMatrix&, const SkPath&);
103 bool operator==(const AtlasPathKey& k) const {
104 static_assert(sizeof(*this) == sizeof(uint32_t) * 6);
105 return !memcmp(this, &k, sizeof(*this));
106 }
107 uint32_t fPathGenID;
108 float fAffineMatrix[4];
109 uint8_t fSubpixelPositionKey[2];
110 uint16_t fFillRule;
111 };
112 SkTHashMap<AtlasPathKey, SkIPoint16> fAtlasPathCache;
113};
114
Robert Phillips400f52e2021-07-26 13:23:10 -0400115#else // SK_GPU_V1
116
117class GrAtlasPathRenderer {
118public:
119 static bool IsSupported(GrRecordingContext*) { return false; }
120};
121
122#endif // SK_GPU_V1
123
124#endif // GrAtlasPathRenderer_DEFINED