blob: 5e71014ce8b5fd78d091a8090a0579fbacf77113 [file] [log] [blame]
cdalton855d83f2014-09-18 13:51:53 -07001/*
2 * Copyright 2014 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrPathRange.h"
9#include "SkPath.h"
10
11enum {
12 kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading.
13};
14
15GrPathRange::GrPathRange(GrGpu* gpu,
kkinnunen50b58e62015-05-18 23:02:07 -070016 PathGenerator* pathGenerator)
bsalomon5236cf42015-01-14 10:42:08 -080017 : INHERITED(gpu, kCached_LifeCycle),
cdalton855d83f2014-09-18 13:51:53 -070018 fPathGenerator(SkRef(pathGenerator)),
kkinnunen50b58e62015-05-18 23:02:07 -070019 fNumPaths(fPathGenerator->getNumPaths()) {
cdalton855d83f2014-09-18 13:51:53 -070020 const int numGroups = (fNumPaths + kPathsPerGroup - 1) / kPathsPerGroup;
21 fGeneratedPaths.reset((numGroups + 7) / 8); // 1 bit per path group.
22 memset(&fGeneratedPaths.front(), 0, fGeneratedPaths.count());
23}
24
25GrPathRange::GrPathRange(GrGpu* gpu,
kkinnunen50b58e62015-05-18 23:02:07 -070026 int numPaths)
bsalomon5236cf42015-01-14 10:42:08 -080027 : INHERITED(gpu, kCached_LifeCycle),
kkinnunen50b58e62015-05-18 23:02:07 -070028 fNumPaths(numPaths) {
cdalton855d83f2014-09-18 13:51:53 -070029}
30
cdalton55b24af2014-11-25 11:00:56 -080031void GrPathRange::willDrawPaths(const void* indices, PathIndexType indexType, int count) const {
32 if (!fPathGenerator) {
cdalton855d83f2014-09-18 13:51:53 -070033 return;
34 }
35
cdalton55b24af2014-11-25 11:00:56 -080036 switch (indexType) {
37 case kU8_PathIndexType: return this->willDrawPaths<uint8_t>(indices, count);
38 case kU16_PathIndexType: return this->willDrawPaths<uint16_t>(indices, count);
39 case kU32_PathIndexType: return this->willDrawPaths<uint32_t>(indices, count);
40 default: SkFAIL("Unknown path index type");
41 }
42}
43
44template<typename IndexType> void GrPathRange::willDrawPaths(const void* indices, int count) const {
45 SkASSERT(fPathGenerator);
46
47 const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices);
cdalton855d83f2014-09-18 13:51:53 -070048 bool didLoadPaths = false;
49
50 for (int i = 0; i < count; ++i) {
cdalton55b24af2014-11-25 11:00:56 -080051 SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths));
cdalton855d83f2014-09-18 13:51:53 -070052
cdalton55b24af2014-11-25 11:00:56 -080053 const int groupIndex = indexArray[i] / kPathsPerGroup;
cdalton855d83f2014-09-18 13:51:53 -070054 const int groupByte = groupIndex / 8;
55 const uint8_t groupBit = 1 << (groupIndex % 8);
56
57 const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit);
58 if (!hasPath) {
59 // We track which paths are loaded in groups of kPathsPerGroup. To
60 // mark a path as loaded we need to load the entire group.
61 const int groupFirstPath = groupIndex * kPathsPerGroup;
62 const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fNumPaths) - 1;
63
64 SkPath path;
65 for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathIdx) {
66 fPathGenerator->generatePath(pathIdx, &path);
67 this->onInitPath(pathIdx, path);
68 }
69
70 fGeneratedPaths[groupByte] |= groupBit;
71 didLoadPaths = true;
72 }
73 }
74
75 if (didLoadPaths) {
76 this->didChangeGpuMemorySize();
77 }
78}