blob: aed06721a12797b2dc28aa1c669982a4fb42d3e4 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
Brian Salomon653f42f2018-07-10 10:07:31 -04009#include "GrContext.h"
10#include "GrContextPriv.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040011#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060013#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014#include "ccpr/GrCCPerFlushResources.h"
15#include "ccpr/GrCoverageCountingPathRenderer.h"
16
Chris Dalton1c548942018-05-22 13:09:48 -060017static bool has_coord_transforms(const GrPaint& paint) {
18 GrFragmentProcessor::Iter iter(paint);
19 while (const GrFragmentProcessor* fp = iter.next()) {
20 if (!fp->coordTransforms().empty()) {
21 return true;
22 }
23 }
24 return false;
25}
26
Chris Daltona8429cf2018-06-22 11:43:31 -060027static int64_t area(const SkIRect& r) {
28 return sk_64_mul(r.height(), r.width());
29}
30
Robert Phillipsc994a932018-06-19 13:09:54 -040031std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext* context,
32 const SkIRect& clipIBounds,
33 const SkMatrix& m,
34 const GrShape& shape,
35 const SkRect& devBounds,
36 GrPaint&& paint) {
Chris Daltona8429cf2018-06-22 11:43:31 -060037 SkIRect shapeDevIBounds;
38 devBounds.roundOut(&shapeDevIBounds); // GrCCPathParser might find slightly tighter bounds.
39
40 SkIRect maskDevIBounds;
41 Visibility maskVisibility;
42 if (clipIBounds.contains(shapeDevIBounds)) {
43 maskDevIBounds = shapeDevIBounds;
44 maskVisibility = Visibility::kComplete;
45 } else {
46 if (!maskDevIBounds.intersect(clipIBounds, shapeDevIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -060047 return nullptr;
48 }
Chris Daltona8429cf2018-06-22 11:43:31 -060049 int64_t unclippedArea = area(shapeDevIBounds);
50 int64_t clippedArea = area(maskDevIBounds);
51 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
52 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
53 // whole thing if we think we can cache it.
54 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -060055 }
Robert Phillipsc994a932018-06-19 13:09:54 -040056
57 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
58
Chris Daltona8429cf2018-06-22 11:43:31 -060059 return pool->allocate<GrCCDrawPathsOp>(m, shape, shapeDevIBounds, maskDevIBounds,
60 maskVisibility, devBounds, std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -060061}
62
Chris Daltona8429cf2018-06-22 11:43:31 -060063GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape,
64 const SkIRect& shapeDevIBounds, const SkIRect& maskDevIBounds,
65 Visibility maskVisibility, const SkRect& devBounds,
66 GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -060067 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -060068 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Daltona8429cf2018-06-22 11:43:31 -060069 , fDraws(m, shape, shapeDevIBounds, maskDevIBounds, maskVisibility, paint.getColor())
Chris Dalton42c21152018-06-13 15:28:19 -060070 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060071 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060072 // FIXME: intersect with clip bounds to (hopefully) improve batching.
73 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
74 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
75}
76
77GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -060078 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060079 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -060080 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060081 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060082}
83
Chris Daltona8429cf2018-06-22 11:43:31 -060084GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
85 const SkIRect& shapeDevIBounds,
86 const SkIRect& maskDevIBounds, Visibility maskVisibility,
87 GrColor color)
88 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -060089 , fShape(shape)
Chris Daltona8429cf2018-06-22 11:43:31 -060090 , fShapeDevIBounds(shapeDevIBounds)
91 , fMaskDevIBounds(maskDevIBounds)
92 , fMaskVisibility(maskVisibility)
93 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -060094#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
95 if (fShape.hasUnstyledKey()) {
96 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
97 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
98 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
99 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
100 }
101#endif
102}
103
Chris Dalton4da70192018-06-18 09:51:36 -0600104GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
105 if (fCacheEntry) {
106 // All currFlushAtlas references must be reset back to null before the flush is finished.
107 fCacheEntry->setCurrFlushAtlas(nullptr);
108 }
109}
110
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600111GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
Brian Osman532b3f92018-07-11 10:02:07 -0400112 const GrAppliedClip* clip) {
Chris Dalton4da70192018-06-18 09:51:36 -0600113 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600114 GrProcessorSet::Analysis analysis =
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600115 fProcessors.finalize(fDraws.head().fColor, GrProcessorAnalysisCoverage::kSingleChannel,
Brian Osman532b3f92018-07-11 10:02:07 -0400116 clip, false, caps, &fDraws.head().fColor);
Chris Dalton4da70192018-06-18 09:51:36 -0600117 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600118}
119
Brian Salomon7eae3e02018-08-07 14:02:38 +0000120GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600121 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600122 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600123 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600124 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600125 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600126
Brian Osman9aa30c62018-07-02 15:21:46 -0400127 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600128 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Brian Salomon7eae3e02018-08-07 14:02:38 +0000129 return CombineResult::kCannotCombine;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600130 }
131
Chris Daltond7e22272018-05-23 10:17:17 -0600132 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600133 this->joinBounds(*that);
134
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600135 SkDEBUGCODE(fNumDraws += that->fNumDraws);
136 SkDEBUGCODE(that->fNumDraws = 0);
Brian Salomon7eae3e02018-08-07 14:02:38 +0000137 return CombineResult::kMerged;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600138}
139
Chris Daltond7e22272018-05-23 10:17:17 -0600140void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600141 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600142 SkASSERT(!fOwningPerOpListPaths);
143 owningPerOpListPaths->fDrawOps.addToTail(this);
144 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600145}
146
Chris Dalton4da70192018-06-18 09:51:36 -0600147void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
148 GrOnFlushResourceProvider* onFlushRP,
149 const GrUniqueKey& stashedAtlasKey,
150 GrCCPerFlushResourceSpecs* specs) {
151 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
152 using MaskTransform = GrCCPathCache::MaskTransform;
153
154 for (SingleDraw& draw : fDraws) {
Chris Dalton4da70192018-06-18 09:51:36 -0600155 SkPath path;
156 draw.fShape.asPath(&path);
157
Chris Daltona2b5b642018-06-24 13:08:57 -0600158 SkASSERT(!draw.fCacheEntry);
159
160 if (pathCache) {
161 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
162 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
163 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
164 }
Chris Daltona8429cf2018-06-22 11:43:31 -0600165
Chris Dalton4da70192018-06-18 09:51:36 -0600166 if (auto cacheEntry = draw.fCacheEntry.get()) {
167 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
Chris Daltona8429cf2018-06-22 11:43:31 -0600168
Chris Dalton4da70192018-06-18 09:51:36 -0600169 if (cacheEntry->atlasKey().isValid()) {
170 // Does the path already exist in a cached atlas?
171 if (cacheEntry->hasCachedAtlas() &&
172 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
173 cacheEntry->atlasKey(),
174 GrCCAtlas::kTextureOrigin))) {
175 ++specs->fNumCachedPaths;
176 continue;
177 }
178
179 // Does the path exist in the atlas that we stashed away from last flush? If so we
180 // can copy it into a new 8-bit atlas and keep it in the resource cache.
181 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
182 SkASSERT(!cacheEntry->hasCachedAtlas());
183 ++specs->fNumCopiedPaths;
184 specs->fCopyPathStats.statPath(path);
185 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
186 cacheEntry->height());
187 continue;
188 }
189
190 // Whatever atlas the path used to reside in, it no longer exists.
191 cacheEntry->resetAtlasKeyAndInfo();
192 }
193
Chris Daltona8429cf2018-06-22 11:43:31 -0600194 if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1 &&
195 SkTMax(draw.fShapeDevIBounds.height(),
196 draw.fShapeDevIBounds.width()) <= onFlushRP->caps()->maxRenderTargetSize()) {
197 // We've seen this path before with a compatible matrix, and it's mostly visible.
198 // Just render the whole mask so we can try to cache it.
199 draw.fMaskDevIBounds = draw.fShapeDevIBounds;
200 draw.fMaskVisibility = Visibility::kComplete;
Chris Dalton4da70192018-06-18 09:51:36 -0600201 }
202 }
203
204 ++specs->fNumRenderedPaths;
205 specs->fRenderedPathStats.statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600206 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
207 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600208 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600209}
210
Chris Dalton4da70192018-06-18 09:51:36 -0600211void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
212 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
213 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600214 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600215 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600216 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600217
Chris Dalton4da70192018-06-18 09:51:36 -0600218 for (SingleDraw& draw : fDraws) {
219 SkPath path;
220 draw.fShape.asPath(&path);
221
222 auto doEvenOddFill = DoEvenOddFill(SkPath::kEvenOdd_FillType == path.getFillType());
223 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
224 SkPath::kWinding_FillType == path.getFillType());
225
226 if (auto cacheEntry = draw.fCacheEntry.get()) {
227 // Does the path already exist in a cached atlas texture?
228 if (auto proxy = draw.fCachedAtlasProxy.get()) {
229 SkASSERT(!cacheEntry->currFlushAtlas());
230 this->recordInstance(proxy, resources->nextPathInstanceIdx());
231 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
232 draw.fColor);
233 continue;
234 }
235
236 // Have we already encountered this path during the flush? (i.e. was the same SkPath
237 // drawn more than once during the same flush, with a compatible matrix?)
238 if (auto atlas = cacheEntry->currFlushAtlas()) {
239 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
240 resources->appendDrawPathInstance().set(
241 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
242 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
243 continue;
244 }
245
246 // If the cache entry still has a valid atlas key at this point, it means the path
247 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
248 // 8-bit atlas in the resource cache.
249 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
250 SkIVector newOffset;
251 GrCCAtlas* atlas =
252 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
253 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
Brian Salomon238069b2018-07-11 15:58:57 -0400254 onFlushRP->contextUniqueID(), newOffset,
255 atlas->refOrMakeCachedAtlasInfo());
Chris Dalton4da70192018-06-18 09:51:36 -0600256 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
257 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
258 draw.fColor);
259 // Remember this atlas in case we encounter the path again during the same flush.
260 cacheEntry->setCurrFlushAtlas(atlas);
261 continue;
262 }
263 }
264
265 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
266 // bounding boxes: One in device space, as well as a second one rotated an additional 45
267 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
268 // circumscribes the path.
269 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600270 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600271 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600272 SkIVector devToAtlasOffset;
Chris Daltona8429cf2018-06-22 11:43:31 -0600273 if (auto atlas = resources->renderPathInAtlas(draw.fMaskDevIBounds, draw.fMatrix, path,
Chris Dalton4da70192018-06-18 09:51:36 -0600274 &devBounds, &devBounds45, &devIBounds,
275 &devToAtlasOffset)) {
276 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
277 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
278 draw.fColor, doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600279
280 // If we have a spot in the path cache, try to make a note of where this mask is so we
281 // can reuse it in the future.
282 if (auto cacheEntry = draw.fCacheEntry.get()) {
283 SkASSERT(!cacheEntry->hasCachedAtlas());
284
285 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
286 // Don't cache a path mask unless it's completely visible with a hit count > 1.
287 //
288 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
289 // fully visible during accountForOwnPaths().
290 continue;
291 }
292
293 if (resources->nextAtlasToStash() != atlas) {
294 // This mask does not belong to the atlas that will be stashed for next flush.
295 continue;
296 }
297
Chris Dalton4da70192018-06-18 09:51:36 -0600298 const GrUniqueKey& atlasKey =
299 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
Brian Salomon238069b2018-07-11 15:58:57 -0400300 cacheEntry->initAsStashedAtlas(atlasKey, onFlushRP->contextUniqueID(),
301 devToAtlasOffset, devBounds, devBounds45, devIBounds,
302 draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600303 // Remember this atlas in case we encounter the path again during the same flush.
Chris Daltona8429cf2018-06-22 11:43:31 -0600304 cacheEntry->setCurrFlushAtlas(atlas);
Chris Dalton4da70192018-06-18 09:51:36 -0600305 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600306 continue;
307 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600308 }
309
Chris Dalton4da70192018-06-18 09:51:36 -0600310 if (!fInstanceRanges.empty()) {
311 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
312 }
313}
314
Brian Salomon7eae3e02018-08-07 14:02:38 +0000315inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) {
Chris Dalton4da70192018-06-18 09:51:36 -0600316 if (fInstanceRanges.empty()) {
317 fInstanceRanges.push_back({atlasProxy, instanceIdx});
318 return;
319 }
320 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
321 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
322 fInstanceRanges.push_back({atlasProxy, instanceIdx});
323 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600324 }
325}
326
327void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600328 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600329
Chris Daltond7e22272018-05-23 10:17:17 -0600330 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600331 if (!resources) {
332 return; // Setup failed.
333 }
334
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600335 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600336 initArgs.fProxy = flushState->drawOpArgs().fProxy;
337 initArgs.fCaps = &flushState->caps();
338 initArgs.fResourceProvider = flushState->resourceProvider();
339 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400340 auto clip = flushState->detachAppliedClip();
341 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
342 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600343
344 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600345 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600346
Chris Dalton4c458b12018-06-16 17:22:59 -0600347 for (const InstanceRange& range : fInstanceRanges) {
348 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600349
Brian Salomon7eae3e02018-08-07 14:02:38 +0000350 GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords);
351 GrTextureProxy* atlasProxy = range.fAtlasProxy;
352 fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400353 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
354 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600355
356 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600357 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600358}