blob: 0588549e7ac5ffad3c41bd6717b133f387913ec5 [file] [log] [blame]
Chris Daltonc3176002021-07-23 15:33:09 -06001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Robert Phillips43e70f12021-08-19 11:12:48 -04008#ifndef AtlasPathRenderer_DEFINED
9#define AtlasPathRenderer_DEFINED
Chris Daltonc3176002021-07-23 15:33:09 -060010
Robert Phillips400f52e2021-07-26 13:23:10 -040011#include "include/gpu/GrTypes.h"
Chris Daltonc3176002021-07-23 15:33:09 -060012#include "include/private/SkTHash.h"
13#include "src/core/SkIPoint16.h"
14#include "src/gpu/GrDynamicAtlas.h"
15#include "src/gpu/GrFragmentProcessor.h"
16#include "src/gpu/GrOnFlushResourceProvider.h"
Robert Phillipsdb0ec082021-08-19 12:30:12 -040017#include "src/gpu/v1/PathRenderer.h"
Chris Daltonc3176002021-07-23 15:33:09 -060018
Chris Daltonc3176002021-07-23 15:33:09 -060019class GrOp;
20class GrRecordingContext;
21
Robert Phillips43e70f12021-08-19 11:12:48 -040022namespace skgpu::v1 {
23
Robert Phillipsfdafc0c2021-08-25 16:39:14 -040024class AtlasRenderTask;
25
Chris Daltonc3176002021-07-23 15:33:09 -060026// Draws paths by first rendering their coverage mask into an offscreen atlas.
Robert Phillipsdb0ec082021-08-19 12:30:12 -040027class AtlasPathRenderer final : public PathRenderer, public GrOnFlushCallbackObject {
Chris Daltonc3176002021-07-23 15:33:09 -060028public:
29 static bool IsSupported(GrRecordingContext*);
30
31 // Returns a GrAtlasPathRenderer if it is supported, otherwise null.
Robert Phillips43e70f12021-08-19 11:12:48 -040032 static sk_sp<AtlasPathRenderer> Make(GrRecordingContext* rContext);
Chris Daltonc3176002021-07-23 15:33:09 -060033
Robert Phillips43e70f12021-08-19 11:12:48 -040034 const char* name() const override { return "GrAtlasPathRenderer"; }
Chris Daltonc3176002021-07-23 15:33:09 -060035
36 // Returns a fragment processor that modulates inputFP by the given deviceSpacePath's coverage,
37 // implemented using an internal atlas.
38 //
39 // Returns 'inputFP' wrapped in GrFPFailure() if the path was too large, or if the current atlas
40 // is full and already used by either opBeingClipped or inputFP. (Currently, "too large" means
Chris Dalton66deeb22021-07-23 13:57:03 -060041 // larger than fMaxAtlasSize in either dimension, more than 256^2 total pixels, or more than
42 // 128^2 total pixels if the surfaceDrawContext supports MSAA or DMSAA.)
Chris Daltonc3176002021-07-23 15:33:09 -060043 //
44 // Also returns GrFPFailure() if the view matrix has perspective.
Robert Phillips4dca8312021-07-28 15:13:20 -040045 GrFPResult makeAtlasClipEffect(const skgpu::v1::SurfaceDrawContext*,
Chris Daltonc3176002021-07-23 15:33:09 -060046 const GrOp* opBeingClipped,
47 std::unique_ptr<GrFragmentProcessor> inputFP,
48 const SkIRect& drawBounds,
49 const SkMatrix&,
50 const SkPath&);
51
52private:
53 // The atlas is not compatible with DDL. We can only use it on direct contexts.
Robert Phillips43e70f12021-08-19 11:12:48 -040054 AtlasPathRenderer(GrDirectContext*);
55
56 StencilSupport onGetStencilSupport(const GrStyledShape&) const override {
57 return kNoSupport_StencilSupport;
58 }
59
60 CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override;
61
62 bool onDrawPath(const DrawPathArgs&) override;
Chris Daltonc3176002021-07-23 15:33:09 -060063
Chris Dalton66deeb22021-07-23 13:57:03 -060064 // Returns true if the given device-space path bounds are small enough to fit in an atlas and to
65 // benefit from atlasing. (Currently, "small enough" means no larger than fMaxAtlasSize in
66 // either dimension, no more than 256^2 total pixels, or no more than 128^2 total pixels if the
67 // fallbackAAType is kMSAA.)
68 bool pathFitsInAtlas(const SkRect& pathDevBounds, GrAAType fallbackAAType) const;
Chris Daltonc3176002021-07-23 15:33:09 -060069
70 // Returns true if the draw being set up already uses the given atlasProxy.
71 using DrawRefsAtlasCallback = std::function<bool(const GrSurfaceProxy* atlasProxy)>;
72
73 // Adds the filled path to an atlas.
74 //
75 // pathFitsInAtlas() and is_visible() both must have returned true before making this call.
76 //
77 // Fails and returns false if the current atlas is full and already in use according to
78 // DrawRefsAtlasCallback.
79 bool addPathToAtlas(GrRecordingContext*,
80 const SkMatrix&,
81 const SkPath&,
82 const SkRect& pathDevBounds,
83 SkIRect* devIBounds,
84 SkIPoint16* locationInAtlas,
85 bool* transposedInAtlas,
86 const DrawRefsAtlasCallback&);
87
88 // Instantiates texture(s) for all atlases we've created since the last flush. Atlases that are
89 // the same size will be instantiated with the same backing texture.
90 void preFlush(GrOnFlushResourceProvider*, SkSpan<const uint32_t> taskIDs) override;
91
92 float fAtlasMaxSize = 0;
Chris Dalton72fd33a2021-07-28 14:19:13 -060093 float fAtlasMaxPathWidth = 0;
Chris Daltonc3176002021-07-23 15:33:09 -060094 int fAtlasInitialSize = 0;
95
96 // A collection of all atlases we've created and used since the last flush. We instantiate these
97 // at flush time during preFlush().
Robert Phillipsfdafc0c2021-08-25 16:39:14 -040098 SkSTArray<4, sk_sp<AtlasRenderTask>> fAtlasRenderTasks;
Chris Daltonc3176002021-07-23 15:33:09 -060099
100 // This simple cache remembers the locations of cacheable path masks in the most recent atlas.
101 // Its main motivation is for clip paths.
102 struct AtlasPathKey {
103 void set(const SkMatrix&, const SkPath&);
104 bool operator==(const AtlasPathKey& k) const {
Chris Dalton7311f9a2021-10-05 13:31:16 -0600105 static_assert(sizeof(*this) == sizeof(uint32_t) * 8);
Chris Daltonc3176002021-07-23 15:33:09 -0600106 return !memcmp(this, &k, sizeof(*this));
107 }
108 uint32_t fPathGenID;
Chris Dalton7311f9a2021-10-05 13:31:16 -0600109 float fAffineMatrix[6];
110 uint32_t fFillRule;
Chris Daltonc3176002021-07-23 15:33:09 -0600111 };
112 SkTHashMap<AtlasPathKey, SkIPoint16> fAtlasPathCache;
113};
114
Robert Phillips43e70f12021-08-19 11:12:48 -0400115} // namespace skgpu::v1
Robert Phillips400f52e2021-07-26 13:23:10 -0400116
117#endif // GrAtlasPathRenderer_DEFINED