cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2014 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrPathRange_DEFINED |
| 9 | #define GrPathRange_DEFINED |
| 10 | |
bsalomon | 6d3fe02 | 2014-07-25 08:35:45 -0700 | [diff] [blame] | 11 | #include "GrGpuResource.h" |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 12 | #include "SkPath.h" |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 13 | #include "SkRefCnt.h" |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 14 | #include "SkTArray.h" |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 15 | |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 16 | class SkDescriptor; |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 17 | |
| 18 | /** |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 19 | * Represents a contiguous range of GPU path objects. |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 20 | * This object is immutable with the exception that individual paths may be |
| 21 | * initialized lazily. |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 22 | */ |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 23 | |
bsalomon | 6d3fe02 | 2014-07-25 08:35:45 -0700 | [diff] [blame] | 24 | class GrPathRange : public GrGpuResource { |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 25 | public: |
halcanary | 9d524f2 | 2016-03-29 09:03:52 -0700 | [diff] [blame] | 26 | |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 27 | |
cdalton | 55b24af | 2014-11-25 11:00:56 -0800 | [diff] [blame] | 28 | enum PathIndexType { |
| 29 | kU8_PathIndexType, //!< uint8_t |
| 30 | kU16_PathIndexType, //!< uint16_t |
| 31 | kU32_PathIndexType, //!< uint32_t |
| 32 | |
| 33 | kLast_PathIndexType = kU32_PathIndexType |
| 34 | }; |
| 35 | |
| 36 | static inline int PathIndexSizeInBytes(PathIndexType type) { |
| 37 | GR_STATIC_ASSERT(0 == kU8_PathIndexType); |
| 38 | GR_STATIC_ASSERT(1 == kU16_PathIndexType); |
| 39 | GR_STATIC_ASSERT(2 == kU32_PathIndexType); |
| 40 | GR_STATIC_ASSERT(kU32_PathIndexType == kLast_PathIndexType); |
| 41 | |
| 42 | return 1 << type; |
| 43 | } |
| 44 | |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 45 | /** |
cdalton | 55b24af | 2014-11-25 11:00:56 -0800 | [diff] [blame] | 46 | * Class that generates the paths for a specific range. |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 47 | */ |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 48 | class PathGenerator : public SkRefCnt { |
| 49 | public: |
| 50 | virtual int getNumPaths() = 0; |
| 51 | virtual void generatePath(int index, SkPath* out) = 0; |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 52 | #ifdef SK_DEBUG |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 53 | virtual bool isEqualTo(const SkDescriptor&) const { return false; } |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 54 | #endif |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 55 | virtual ~PathGenerator() {} |
| 56 | }; |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 57 | |
| 58 | /** |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 59 | * Initialize a lazy-loaded path range. This class will generate an SkPath and call |
| 60 | * onInitPath() for each path within the range before it is drawn for the first time. |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 61 | */ |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 62 | GrPathRange(GrGpu*, PathGenerator*); |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 63 | |
| 64 | /** |
| 65 | * Initialize an eager-loaded path range. The subclass is responsible for ensuring all |
| 66 | * the paths are initialized up front. |
| 67 | */ |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 68 | GrPathRange(GrGpu*, int numPaths); |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 69 | |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 70 | int getNumPaths() const { return fNumPaths; } |
| 71 | const PathGenerator* getPathGenerator() const { return fPathGenerator.get(); } |
| 72 | |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 73 | void loadPathsIfNeeded(const void* indices, PathIndexType, int count) const; |
| 74 | |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 75 | template<typename IndexType> void loadPathsIfNeeded(const IndexType* indices, int count) const { |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 76 | if (!fPathGenerator) { |
| 77 | return; |
| 78 | } |
| 79 | |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 80 | bool didLoadPaths = false; |
| 81 | |
| 82 | for (int i = 0; i < count; ++i) { |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 83 | SkASSERT(indices[i] < static_cast<uint32_t>(fNumPaths)); |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 84 | |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 85 | const int groupIndex = indices[i] / kPathsPerGroup; |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 86 | const int groupByte = groupIndex / 8; |
| 87 | const uint8_t groupBit = 1 << (groupIndex % 8); |
| 88 | |
| 89 | const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit); |
| 90 | if (!hasPath) { |
| 91 | // We track which paths are loaded in groups of kPathsPerGroup. To |
| 92 | // mark a path as loaded we need to load the entire group. |
| 93 | const int groupFirstPath = groupIndex * kPathsPerGroup; |
| 94 | const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fNumPaths) - 1; |
| 95 | |
| 96 | SkPath path; |
| 97 | for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathIdx) { |
| 98 | fPathGenerator->generatePath(pathIdx, &path); |
| 99 | this->onInitPath(pathIdx, path); |
| 100 | } |
| 101 | |
| 102 | fGeneratedPaths[groupByte] |= groupBit; |
| 103 | didLoadPaths = true; |
| 104 | } |
| 105 | } |
| 106 | |
| 107 | if (didLoadPaths) { |
| 108 | this->didChangeGpuMemorySize(); |
| 109 | } |
| 110 | } |
| 111 | |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 112 | #ifdef SK_DEBUG |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 113 | void assertPathsLoaded(const void* indices, PathIndexType, int count) const; |
| 114 | |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 115 | template<typename IndexType> void assertPathsLoaded(const IndexType* indices, int count) const { |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 116 | if (!fPathGenerator) { |
| 117 | return; |
| 118 | } |
| 119 | |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 120 | for (int i = 0; i < count; ++i) { |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 121 | SkASSERT(indices[i] < static_cast<uint32_t>(fNumPaths)); |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 122 | |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 123 | const int groupIndex = indices[i] / kPathsPerGroup; |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 124 | const int groupByte = groupIndex / 8; |
| 125 | const uint8_t groupBit = 1 << (groupIndex % 8); |
| 126 | |
| 127 | SkASSERT(fGeneratedPaths[groupByte] & groupBit); |
| 128 | } |
| 129 | } |
| 130 | |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 131 | virtual bool isEqualTo(const SkDescriptor& desc) const { |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 132 | return nullptr != fPathGenerator.get() && fPathGenerator->isEqualTo(desc); |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 133 | } |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 134 | #endif |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 135 | protected: |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 136 | // Initialize a path in the range before drawing. This is only called when |
| 137 | // fPathGenerator is non-null. The child class need not call didChangeGpuMemorySize(), |
| 138 | // GrPathRange will take care of that after the call is complete. |
| 139 | virtual void onInitPath(int index, const SkPath&) const = 0; |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 140 | |
| 141 | private: |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 142 | enum { |
| 143 | kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading. |
| 144 | }; |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 145 | |
Hal Canary | 144caf5 | 2016-11-07 17:57:18 -0500 | [diff] [blame] | 146 | mutable sk_sp<PathGenerator> fPathGenerator; |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 147 | mutable SkTArray<uint8_t, true /*MEM_COPY*/> fGeneratedPaths; |
| 148 | const int fNumPaths; |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 149 | |
bsalomon | 6d3fe02 | 2014-07-25 08:35:45 -0700 | [diff] [blame] | 150 | typedef GrGpuResource INHERITED; |
cdalton | b85a0aa | 2014-07-21 15:32:44 -0700 | [diff] [blame] | 151 | }; |
| 152 | |
| 153 | #endif |