blob: 32e5ae49f7afc3a0174ae55f5516a897aedee1d7 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCClipPath_DEFINED
9#define GrCCClipPath_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkPath.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrTextureProxy.h"
Chris Daltonc3318f02019-07-19 14:20:53 -060013#include "src/gpu/ccpr/GrCCAtlas.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014
Chris Dalton42c21152018-06-13 15:28:19 -060015struct GrCCPerFlushResourceSpecs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060016class GrCCPerFlushResources;
17class GrOnFlushResourceProvider;
18class GrProxyProvider;
19
20/**
21 * These are keyed by SkPath generation ID, and store which device-space paths are accessed and
Greg Danielf41b2bd2019-08-22 16:19:24 -040022 * where by clip FPs in a given opsTask. A single GrCCClipPath can be referenced by multiple FPs. At
Chris Dalton5ba36ba2018-05-09 01:08:38 -060023 * flush time their coverage count masks are packed into atlas(es) alongside normal DrawPathOps.
24 */
25class GrCCClipPath {
26public:
27 GrCCClipPath() = default;
28 GrCCClipPath(const GrCCClipPath&) = delete;
29
30 ~GrCCClipPath() {
Chris Daltonb68bcc42018-09-14 00:44:22 -060031 // Ensure no clip FP exists with a dangling pointer back into this class. This works because
32 // a clip FP will have a ref on the proxy if it exists.
33 //
34 // This assert also guarantees there won't be a lazy proxy callback with a dangling pointer
35 // back into this class, since no proxy will exist after we destruct, if the assert passes.
Robert Phillipse5f73282019-06-18 17:15:04 -040036 SkASSERT(!fAtlasLazyProxy || fAtlasLazyProxy->unique());
Chris Dalton5ba36ba2018-05-09 01:08:38 -060037 }
38
Ben Wagner5d1adbf2018-05-28 13:35:39 -040039 bool isInitialized() const { return fAtlasLazyProxy != nullptr; }
Chris Daltonc3318f02019-07-19 14:20:53 -060040 void init(const SkPath& deviceSpacePath, const SkIRect& accessRect,
41 GrCCAtlas::CoverageType atlasCoverageType, const GrCaps&);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060042
43 void addAccess(const SkIRect& accessRect) {
44 SkASSERT(this->isInitialized());
45 fAccessRect.join(accessRect);
46 }
47 GrTextureProxy* atlasLazyProxy() const {
48 SkASSERT(this->isInitialized());
49 return fAtlasLazyProxy.get();
50 }
51 const SkPath& deviceSpacePath() const {
52 SkASSERT(this->isInitialized());
53 return fDeviceSpacePath;
54 }
55 const SkIRect& pathDevIBounds() const {
56 SkASSERT(this->isInitialized());
57 return fPathDevIBounds;
58 }
59
Chris Dalton42c21152018-06-13 15:28:19 -060060 void accountForOwnPath(GrCCPerFlushResourceSpecs*) const;
Chris Daltondaef06a2018-05-23 17:11:09 -060061 void renderPathInAtlas(GrCCPerFlushResources*, GrOnFlushResourceProvider*);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060062
63 const SkVector& atlasScale() const { SkASSERT(fHasAtlasTransform); return fAtlasScale; }
64 const SkVector& atlasTranslate() const { SkASSERT(fHasAtlasTransform); return fAtlasTranslate; }
65
66private:
67 sk_sp<GrTextureProxy> fAtlasLazyProxy;
68 SkPath fDeviceSpacePath;
69 SkIRect fPathDevIBounds;
70 SkIRect fAccessRect;
71
72 const GrCCAtlas* fAtlas = nullptr;
Chris Dalton9414c962018-06-14 10:14:50 -060073 SkIVector fDevToAtlasOffset; // Translation from device space to location in atlas.
Nico Weber0af08132019-02-28 13:23:38 -050074 SkDEBUGCODE(bool fHasAtlas = false;)
Chris Dalton5ba36ba2018-05-09 01:08:38 -060075
76 SkVector fAtlasScale;
77 SkVector fAtlasTranslate;
Nico Weber0af08132019-02-28 13:23:38 -050078 SkDEBUGCODE(bool fHasAtlasTransform = false;)
Chris Dalton5ba36ba2018-05-09 01:08:38 -060079};
80
81#endif