Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrAtlasPathRenderer_DEFINED |
| 9 | #define GrAtlasPathRenderer_DEFINED |
| 10 | |
Robert Phillips | 400f52e | 2021-07-26 13:23:10 -0400 | [diff] [blame^] | 11 | #include "include/gpu/GrTypes.h" |
| 12 | |
| 13 | #if SK_GPU_V1 |
| 14 | |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 15 | #include "include/private/SkTHash.h" |
| 16 | #include "src/core/SkIPoint16.h" |
| 17 | #include "src/gpu/GrDynamicAtlas.h" |
| 18 | #include "src/gpu/GrFragmentProcessor.h" |
| 19 | #include "src/gpu/GrOnFlushResourceProvider.h" |
| 20 | #include "src/gpu/GrPathRenderer.h" |
| 21 | |
| 22 | class GrAtlasRenderTask; |
| 23 | class GrOp; |
| 24 | class GrRecordingContext; |
| 25 | |
| 26 | // Draws paths by first rendering their coverage mask into an offscreen atlas. |
| 27 | class GrAtlasPathRenderer : public GrPathRenderer, public GrOnFlushCallbackObject { |
| 28 | public: |
| 29 | static bool IsSupported(GrRecordingContext*); |
| 30 | |
| 31 | // Returns a GrAtlasPathRenderer if it is supported, otherwise null. |
| 32 | static sk_sp<GrAtlasPathRenderer> Make(GrRecordingContext* rContext); |
| 33 | |
| 34 | const char* name() const final { return "GrAtlasPathRenderer"; } |
| 35 | |
| 36 | StencilSupport onGetStencilSupport(const GrStyledShape&) const override { |
| 37 | return kNoSupport_StencilSupport; |
| 38 | } |
| 39 | |
| 40 | CanDrawPath onCanDrawPath(const CanDrawPathArgs&) const override; |
| 41 | |
| 42 | bool onDrawPath(const DrawPathArgs&) override; |
| 43 | |
| 44 | // Returns a fragment processor that modulates inputFP by the given deviceSpacePath's coverage, |
| 45 | // implemented using an internal atlas. |
| 46 | // |
| 47 | // Returns 'inputFP' wrapped in GrFPFailure() if the path was too large, or if the current atlas |
| 48 | // is full and already used by either opBeingClipped or inputFP. (Currently, "too large" means |
| 49 | // more than 128*128 total pixels, or larger than the atlas size in either dimension.) |
| 50 | // |
| 51 | // Also returns GrFPFailure() if the view matrix has perspective. |
| 52 | GrFPResult makeAtlasClipEffect(GrRecordingContext*, |
| 53 | const GrOp* opBeingClipped, |
| 54 | std::unique_ptr<GrFragmentProcessor> inputFP, |
| 55 | const SkIRect& drawBounds, |
| 56 | const SkMatrix&, |
| 57 | const SkPath&); |
| 58 | |
| 59 | private: |
| 60 | // The atlas is not compatible with DDL. We can only use it on direct contexts. |
| 61 | GrAtlasPathRenderer(GrDirectContext*); |
| 62 | |
| 63 | // Returns true if the given device-space path bounds are no larger than 128*128 total pixels |
| 64 | // and no larger than the max atlas size in either dimension. |
| 65 | bool pathFitsInAtlas(const SkRect& pathDevBounds) const; |
| 66 | |
| 67 | // Returns true if the draw being set up already uses the given atlasProxy. |
| 68 | using DrawRefsAtlasCallback = std::function<bool(const GrSurfaceProxy* atlasProxy)>; |
| 69 | |
| 70 | // Adds the filled path to an atlas. |
| 71 | // |
| 72 | // pathFitsInAtlas() and is_visible() both must have returned true before making this call. |
| 73 | // |
| 74 | // Fails and returns false if the current atlas is full and already in use according to |
| 75 | // DrawRefsAtlasCallback. |
| 76 | bool addPathToAtlas(GrRecordingContext*, |
| 77 | const SkMatrix&, |
| 78 | const SkPath&, |
| 79 | const SkRect& pathDevBounds, |
| 80 | SkIRect* devIBounds, |
| 81 | SkIPoint16* locationInAtlas, |
| 82 | bool* transposedInAtlas, |
| 83 | const DrawRefsAtlasCallback&); |
| 84 | |
| 85 | // Instantiates texture(s) for all atlases we've created since the last flush. Atlases that are |
| 86 | // the same size will be instantiated with the same backing texture. |
| 87 | void preFlush(GrOnFlushResourceProvider*, SkSpan<const uint32_t> taskIDs) override; |
| 88 | |
| 89 | float fAtlasMaxSize = 0; |
| 90 | int fAtlasInitialSize = 0; |
| 91 | |
| 92 | // A collection of all atlases we've created and used since the last flush. We instantiate these |
| 93 | // at flush time during preFlush(). |
| 94 | SkSTArray<4, sk_sp<GrAtlasRenderTask>> fAtlasRenderTasks; |
| 95 | |
| 96 | // This simple cache remembers the locations of cacheable path masks in the most recent atlas. |
| 97 | // Its main motivation is for clip paths. |
| 98 | struct AtlasPathKey { |
| 99 | void set(const SkMatrix&, const SkPath&); |
| 100 | bool operator==(const AtlasPathKey& k) const { |
| 101 | static_assert(sizeof(*this) == sizeof(uint32_t) * 6); |
| 102 | return !memcmp(this, &k, sizeof(*this)); |
| 103 | } |
| 104 | uint32_t fPathGenID; |
| 105 | float fAffineMatrix[4]; |
| 106 | uint8_t fSubpixelPositionKey[2]; |
| 107 | uint16_t fFillRule; |
| 108 | }; |
| 109 | SkTHashMap<AtlasPathKey, SkIPoint16> fAtlasPathCache; |
| 110 | }; |
| 111 | |
Robert Phillips | 400f52e | 2021-07-26 13:23:10 -0400 | [diff] [blame^] | 112 | #else // SK_GPU_V1 |
| 113 | |
| 114 | class GrAtlasPathRenderer { |
| 115 | public: |
| 116 | static bool IsSupported(GrRecordingContext*) { return false; } |
| 117 | }; |
| 118 | |
| 119 | #endif // SK_GPU_V1 |
| 120 | |
| 121 | #endif // GrAtlasPathRenderer_DEFINED |