blob: 39779a70083592219bbeb6d5b2a6429f78b6482c [file] [log] [blame]
cdaltonb85a0aa2014-07-21 15:32:44 -07001/*
2 * Copyright 2014 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrPathRange_DEFINED
9#define GrPathRange_DEFINED
10
bsalomon6d3fe022014-07-25 08:35:45 -070011#include "GrGpuResource.h"
cdaltond1201052015-10-05 15:56:34 -070012#include "SkPath.h"
cdalton855d83f2014-09-18 13:51:53 -070013#include "SkRefCnt.h"
cdalton855d83f2014-09-18 13:51:53 -070014#include "SkTArray.h"
cdaltonb85a0aa2014-07-21 15:32:44 -070015
cdalton855d83f2014-09-18 13:51:53 -070016class SkDescriptor;
cdaltonb85a0aa2014-07-21 15:32:44 -070017
18/**
kkinnunen50b58e62015-05-18 23:02:07 -070019 * Represents a contiguous range of GPU path objects.
cdalton855d83f2014-09-18 13:51:53 -070020 * This object is immutable with the exception that individual paths may be
21 * initialized lazily.
cdaltonb85a0aa2014-07-21 15:32:44 -070022 */
cdalton855d83f2014-09-18 13:51:53 -070023
bsalomon6d3fe022014-07-25 08:35:45 -070024class GrPathRange : public GrGpuResource {
cdaltonb85a0aa2014-07-21 15:32:44 -070025public:
halcanary9d524f22016-03-29 09:03:52 -070026
cdaltonb85a0aa2014-07-21 15:32:44 -070027
cdalton55b24af2014-11-25 11:00:56 -080028 enum PathIndexType {
29 kU8_PathIndexType, //!< uint8_t
30 kU16_PathIndexType, //!< uint16_t
31 kU32_PathIndexType, //!< uint32_t
32
33 kLast_PathIndexType = kU32_PathIndexType
34 };
35
36 static inline int PathIndexSizeInBytes(PathIndexType type) {
37 GR_STATIC_ASSERT(0 == kU8_PathIndexType);
38 GR_STATIC_ASSERT(1 == kU16_PathIndexType);
39 GR_STATIC_ASSERT(2 == kU32_PathIndexType);
40 GR_STATIC_ASSERT(kU32_PathIndexType == kLast_PathIndexType);
41
42 return 1 << type;
43 }
44
cdaltonb85a0aa2014-07-21 15:32:44 -070045 /**
cdalton55b24af2014-11-25 11:00:56 -080046 * Class that generates the paths for a specific range.
cdaltonb85a0aa2014-07-21 15:32:44 -070047 */
cdalton855d83f2014-09-18 13:51:53 -070048 class PathGenerator : public SkRefCnt {
49 public:
50 virtual int getNumPaths() = 0;
51 virtual void generatePath(int index, SkPath* out) = 0;
kkinnunen50b58e62015-05-18 23:02:07 -070052#ifdef SK_DEBUG
cdalton855d83f2014-09-18 13:51:53 -070053 virtual bool isEqualTo(const SkDescriptor&) const { return false; }
kkinnunen50b58e62015-05-18 23:02:07 -070054#endif
cdalton855d83f2014-09-18 13:51:53 -070055 virtual ~PathGenerator() {}
56 };
cdaltonb85a0aa2014-07-21 15:32:44 -070057
58 /**
cdalton855d83f2014-09-18 13:51:53 -070059 * Initialize a lazy-loaded path range. This class will generate an SkPath and call
60 * onInitPath() for each path within the range before it is drawn for the first time.
cdaltonb85a0aa2014-07-21 15:32:44 -070061 */
kkinnunen50b58e62015-05-18 23:02:07 -070062 GrPathRange(GrGpu*, PathGenerator*);
cdalton855d83f2014-09-18 13:51:53 -070063
64 /**
65 * Initialize an eager-loaded path range. The subclass is responsible for ensuring all
66 * the paths are initialized up front.
67 */
kkinnunen50b58e62015-05-18 23:02:07 -070068 GrPathRange(GrGpu*, int numPaths);
cdalton855d83f2014-09-18 13:51:53 -070069
kkinnunen50b58e62015-05-18 23:02:07 -070070 int getNumPaths() const { return fNumPaths; }
71 const PathGenerator* getPathGenerator() const { return fPathGenerator.get(); }
72
cdaltond1201052015-10-05 15:56:34 -070073 void loadPathsIfNeeded(const void* indices, PathIndexType, int count) const;
74
cdalton8585dd22015-10-08 08:04:09 -070075 template<typename IndexType> void loadPathsIfNeeded(const IndexType* indices, int count) const {
cdaltond1201052015-10-05 15:56:34 -070076 if (!fPathGenerator) {
77 return;
78 }
79
cdaltond1201052015-10-05 15:56:34 -070080 bool didLoadPaths = false;
81
82 for (int i = 0; i < count; ++i) {
cdalton8585dd22015-10-08 08:04:09 -070083 SkASSERT(indices[i] < static_cast<uint32_t>(fNumPaths));
cdaltond1201052015-10-05 15:56:34 -070084
cdalton8585dd22015-10-08 08:04:09 -070085 const int groupIndex = indices[i] / kPathsPerGroup;
cdaltond1201052015-10-05 15:56:34 -070086 const int groupByte = groupIndex / 8;
87 const uint8_t groupBit = 1 << (groupIndex % 8);
88
89 const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit);
90 if (!hasPath) {
91 // We track which paths are loaded in groups of kPathsPerGroup. To
92 // mark a path as loaded we need to load the entire group.
93 const int groupFirstPath = groupIndex * kPathsPerGroup;
94 const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fNumPaths) - 1;
95
96 SkPath path;
97 for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathIdx) {
98 fPathGenerator->generatePath(pathIdx, &path);
99 this->onInitPath(pathIdx, path);
100 }
101
102 fGeneratedPaths[groupByte] |= groupBit;
103 didLoadPaths = true;
104 }
105 }
106
107 if (didLoadPaths) {
108 this->didChangeGpuMemorySize();
109 }
110 }
111
kkinnunen50b58e62015-05-18 23:02:07 -0700112#ifdef SK_DEBUG
cdaltond1201052015-10-05 15:56:34 -0700113 void assertPathsLoaded(const void* indices, PathIndexType, int count) const;
114
cdalton8585dd22015-10-08 08:04:09 -0700115 template<typename IndexType> void assertPathsLoaded(const IndexType* indices, int count) const {
cdaltond1201052015-10-05 15:56:34 -0700116 if (!fPathGenerator) {
117 return;
118 }
119
cdaltond1201052015-10-05 15:56:34 -0700120 for (int i = 0; i < count; ++i) {
cdalton8585dd22015-10-08 08:04:09 -0700121 SkASSERT(indices[i] < static_cast<uint32_t>(fNumPaths));
cdaltond1201052015-10-05 15:56:34 -0700122
cdalton8585dd22015-10-08 08:04:09 -0700123 const int groupIndex = indices[i] / kPathsPerGroup;
cdaltond1201052015-10-05 15:56:34 -0700124 const int groupByte = groupIndex / 8;
125 const uint8_t groupBit = 1 << (groupIndex % 8);
126
127 SkASSERT(fGeneratedPaths[groupByte] & groupBit);
128 }
129 }
130
cdalton855d83f2014-09-18 13:51:53 -0700131 virtual bool isEqualTo(const SkDescriptor& desc) const {
halcanary96fcdcc2015-08-27 07:41:13 -0700132 return nullptr != fPathGenerator.get() && fPathGenerator->isEqualTo(desc);
cdalton855d83f2014-09-18 13:51:53 -0700133 }
kkinnunen50b58e62015-05-18 23:02:07 -0700134#endif
cdaltonb85a0aa2014-07-21 15:32:44 -0700135protected:
cdalton855d83f2014-09-18 13:51:53 -0700136 // Initialize a path in the range before drawing. This is only called when
137 // fPathGenerator is non-null. The child class need not call didChangeGpuMemorySize(),
138 // GrPathRange will take care of that after the call is complete.
139 virtual void onInitPath(int index, const SkPath&) const = 0;
cdaltonb85a0aa2014-07-21 15:32:44 -0700140
141private:
cdaltond1201052015-10-05 15:56:34 -0700142 enum {
143 kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading.
144 };
cdalton855d83f2014-09-18 13:51:53 -0700145
Hal Canary144caf52016-11-07 17:57:18 -0500146 mutable sk_sp<PathGenerator> fPathGenerator;
cdalton855d83f2014-09-18 13:51:53 -0700147 mutable SkTArray<uint8_t, true /*MEM_COPY*/> fGeneratedPaths;
148 const int fNumPaths;
cdalton855d83f2014-09-18 13:51:53 -0700149
bsalomon6d3fe022014-07-25 08:35:45 -0700150 typedef GrGpuResource INHERITED;
cdaltonb85a0aa2014-07-21 15:32:44 -0700151};
152
153#endif