Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/ccpr/GrCCPerFlushResources.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 9 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 10 | #include "include/private/GrRecordingContext.h" |
| 11 | #include "src/core/SkMakeUnique.h" |
| 12 | #include "src/gpu/GrClip.h" |
| 13 | #include "src/gpu/GrMemoryPool.h" |
| 14 | #include "src/gpu/GrOnFlushResourceProvider.h" |
| 15 | #include "src/gpu/GrRecordingContextPriv.h" |
| 16 | #include "src/gpu/GrRenderTargetContext.h" |
| 17 | #include "src/gpu/GrShape.h" |
| 18 | #include "src/gpu/GrSurfaceContextPriv.h" |
| 19 | #include "src/gpu/ccpr/GrCCPathCache.h" |
| 20 | #include "src/gpu/ccpr/GrGSCoverageProcessor.h" |
| 21 | #include "src/gpu/ccpr/GrVSCoverageProcessor.h" |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 22 | |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 23 | using FillBatchID = GrCCFiller::BatchID; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 24 | using StrokeBatchID = GrCCStroker::BatchID; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 25 | using PathInstance = GrCCPathProcessor::Instance; |
| 26 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 27 | static constexpr int kFillIdx = GrCCPerFlushResourceSpecs::kFillIdx; |
| 28 | static constexpr int kStrokeIdx = GrCCPerFlushResourceSpecs::kStrokeIdx; |
| 29 | |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 30 | namespace { |
| 31 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 32 | // Base class for an Op that renders a CCPR atlas. |
| 33 | class AtlasOp : public GrDrawOp { |
| 34 | public: |
| 35 | FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; } |
Brian Osman | 5ced0bf | 2019-03-15 10:15:29 -0400 | [diff] [blame] | 36 | GrProcessorSet::Analysis finalize( |
| 37 | const GrCaps&, const GrAppliedClip*, GrFSAAType, GrClampType) override { |
Chris Dalton | 4b62aed | 2019-01-15 11:53:00 -0700 | [diff] [blame] | 38 | return GrProcessorSet::EmptySetAnalysis(); |
Brian Osman | 532b3f9 | 2018-07-11 10:02:07 -0400 | [diff] [blame] | 39 | } |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 40 | CombineResult onCombineIfPossible(GrOp* other, const GrCaps&) override { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 41 | // We will only make multiple copy ops if they have different source proxies. |
| 42 | // TODO: make use of texture chaining. |
| 43 | return CombineResult::kCannotCombine; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 44 | } |
| 45 | void onPrepare(GrOpFlushState*) override {} |
| 46 | |
| 47 | protected: |
| 48 | AtlasOp(uint32_t classID, sk_sp<const GrCCPerFlushResources> resources, |
| 49 | const SkISize& drawBounds) |
| 50 | : GrDrawOp(classID) |
| 51 | , fResources(std::move(resources)) { |
| 52 | this->setBounds(SkRect::MakeIWH(drawBounds.width(), drawBounds.height()), |
| 53 | GrOp::HasAABloat::kNo, GrOp::IsZeroArea::kNo); |
| 54 | } |
| 55 | |
| 56 | const sk_sp<const GrCCPerFlushResources> fResources; |
| 57 | }; |
| 58 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 59 | // Copies paths from a cached coverage count atlas into an 8-bit literal-coverage atlas. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 60 | class CopyAtlasOp : public AtlasOp { |
| 61 | public: |
| 62 | DEFINE_OP_CLASS_ID |
| 63 | |
Robert Phillips | be9aff2 | 2019-02-15 11:33:22 -0500 | [diff] [blame] | 64 | static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context, |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 65 | sk_sp<const GrCCPerFlushResources> resources, |
| 66 | sk_sp<GrTextureProxy> copyProxy, int baseInstance, |
| 67 | int endInstance, const SkISize& drawBounds) { |
Robert Phillips | 9da87e0 | 2019-02-04 13:26:26 -0500 | [diff] [blame] | 68 | GrOpMemoryPool* pool = context->priv().opMemoryPool(); |
Robert Phillips | c994a93 | 2018-06-19 13:09:54 -0400 | [diff] [blame] | 69 | |
| 70 | return pool->allocate<CopyAtlasOp>(std::move(resources), std::move(copyProxy), |
| 71 | baseInstance, endInstance, drawBounds); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 72 | } |
| 73 | |
| 74 | const char* name() const override { return "CopyAtlasOp (CCPR)"; } |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 75 | void visitProxies(const VisitProxyFunc& fn, VisitorType) const override { fn(fSrcProxy.get()); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 76 | |
Brian Salomon | 588cec7 | 2018-11-14 13:56:37 -0500 | [diff] [blame] | 77 | void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 78 | SkASSERT(fSrcProxy); |
Chris Dalton | 46d0f9a | 2019-04-24 19:34:54 -0400 | [diff] [blame] | 79 | GrPipeline::FixedDynamicState dynamicState; |
Chris Dalton | 2185f39 | 2019-04-25 17:44:41 +0000 | [diff] [blame^] | 80 | auto srcProxy = fSrcProxy.get(); |
Chris Dalton | 46d0f9a | 2019-04-24 19:34:54 -0400 | [diff] [blame] | 81 | dynamicState.fPrimitiveProcessorTextures = &srcProxy; |
| 82 | |
Chris Dalton | 2185f39 | 2019-04-25 17:44:41 +0000 | [diff] [blame^] | 83 | GrPipeline pipeline(GrScissorTest::kDisabled, SkBlendMode::kSrc); |
| 84 | GrCCPathProcessor pathProc(srcProxy); |
Brian Salomon | 7eae3e0 | 2018-08-07 14:02:38 +0000 | [diff] [blame] | 85 | pathProc.drawPaths(flushState, pipeline, &dynamicState, *fResources, fBaseInstance, |
| 86 | fEndInstance, this->bounds()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 87 | } |
| 88 | |
| 89 | private: |
| 90 | friend class ::GrOpMemoryPool; // for ctor |
| 91 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 92 | CopyAtlasOp(sk_sp<const GrCCPerFlushResources> resources, sk_sp<GrTextureProxy> srcProxy, |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 93 | int baseInstance, int endInstance, const SkISize& drawBounds) |
| 94 | : AtlasOp(ClassID(), std::move(resources), drawBounds) |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 95 | , fSrcProxy(srcProxy) |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 96 | , fBaseInstance(baseInstance) |
| 97 | , fEndInstance(endInstance) { |
| 98 | } |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 99 | sk_sp<GrTextureProxy> fSrcProxy; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 100 | const int fBaseInstance; |
| 101 | const int fEndInstance; |
| 102 | }; |
| 103 | |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 104 | // Renders coverage counts to a CCPR atlas using the resources' pre-filled GrCCPathParser. |
Chris Dalton | 2c5e011 | 2019-03-29 13:14:18 -0500 | [diff] [blame] | 105 | template<typename ProcessorType> class RenderAtlasOp : public AtlasOp { |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 106 | public: |
| 107 | DEFINE_OP_CLASS_ID |
| 108 | |
Robert Phillips | be9aff2 | 2019-02-15 11:33:22 -0500 | [diff] [blame] | 109 | static std::unique_ptr<GrDrawOp> Make(GrRecordingContext* context, |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 110 | sk_sp<const GrCCPerFlushResources> resources, |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 111 | FillBatchID fillBatchID, StrokeBatchID strokeBatchID, |
| 112 | const SkISize& drawBounds) { |
Robert Phillips | 9da87e0 | 2019-02-04 13:26:26 -0500 | [diff] [blame] | 113 | GrOpMemoryPool* pool = context->priv().opMemoryPool(); |
Robert Phillips | c994a93 | 2018-06-19 13:09:54 -0400 | [diff] [blame] | 114 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 115 | return pool->allocate<RenderAtlasOp>(std::move(resources), fillBatchID, strokeBatchID, |
| 116 | drawBounds); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 117 | } |
| 118 | |
| 119 | // GrDrawOp interface. |
| 120 | const char* name() const override { return "RenderAtlasOp (CCPR)"; } |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 121 | |
Brian Salomon | 588cec7 | 2018-11-14 13:56:37 -0500 | [diff] [blame] | 122 | void onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) override { |
Chris Dalton | 2c5e011 | 2019-03-29 13:14:18 -0500 | [diff] [blame] | 123 | ProcessorType proc; |
| 124 | fResources->filler().drawFills(flushState, &proc, fFillBatchID, fDrawBounds); |
| 125 | fResources->stroker().drawStrokes(flushState, &proc, fStrokeBatchID, fDrawBounds); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 126 | } |
| 127 | |
| 128 | private: |
| 129 | friend class ::GrOpMemoryPool; // for ctor |
| 130 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 131 | RenderAtlasOp(sk_sp<const GrCCPerFlushResources> resources, FillBatchID fillBatchID, |
| 132 | StrokeBatchID strokeBatchID, const SkISize& drawBounds) |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 133 | : AtlasOp(ClassID(), std::move(resources), drawBounds) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 134 | , fFillBatchID(fillBatchID) |
| 135 | , fStrokeBatchID(strokeBatchID) |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 136 | , fDrawBounds(SkIRect::MakeWH(drawBounds.width(), drawBounds.height())) { |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 137 | } |
| 138 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 139 | const FillBatchID fFillBatchID; |
| 140 | const StrokeBatchID fStrokeBatchID; |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 141 | const SkIRect fDrawBounds; |
| 142 | }; |
| 143 | |
| 144 | } |
| 145 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 146 | static int inst_buffer_count(const GrCCPerFlushResourceSpecs& specs) { |
| 147 | return specs.fNumCachedPaths + |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 148 | // Copies get two instances per draw: 1 copy + 1 draw. |
| 149 | (specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]) * 2 + |
| 150 | specs.fNumRenderedPaths[kFillIdx] + specs.fNumRenderedPaths[kStrokeIdx]; |
| 151 | // No clips in instance buffers. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 152 | } |
| 153 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 154 | GrCCPerFlushResources::GrCCPerFlushResources(GrOnFlushResourceProvider* onFlushRP, |
Chris Dalton | 42c2115 | 2018-06-13 15:28:19 -0600 | [diff] [blame] | 155 | const GrCCPerFlushResourceSpecs& specs) |
Brian Salomon | ae64c19 | 2019-02-05 09:41:37 -0500 | [diff] [blame] | 156 | // Overallocate by one point so we can call Sk4f::Store at the final SkPoint in the array. |
| 157 | // (See transform_path_pts below.) |
| 158 | // FIXME: instead use built-in instructions to write only the first two lanes of an Sk4f. |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 159 | : fLocalDevPtsBuffer(SkTMax(specs.fRenderedPathStats[kFillIdx].fMaxPointsPerPath, |
| 160 | specs.fRenderedPathStats[kStrokeIdx].fMaxPointsPerPath) + 1) |
| 161 | , fFiller(specs.fNumRenderedPaths[kFillIdx] + specs.fNumClipPaths, |
| 162 | specs.fRenderedPathStats[kFillIdx].fNumTotalSkPoints, |
| 163 | specs.fRenderedPathStats[kFillIdx].fNumTotalSkVerbs, |
| 164 | specs.fRenderedPathStats[kFillIdx].fNumTotalConicWeights) |
| 165 | , fStroker(specs.fNumRenderedPaths[kStrokeIdx], |
| 166 | specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkPoints, |
| 167 | specs.fRenderedPathStats[kStrokeIdx].fNumTotalSkVerbs) |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 168 | , fCopyAtlasStack(GrCCAtlas::CoverageType::kA8_LiteralCoverage, specs.fCopyAtlasSpecs, |
| 169 | onFlushRP->caps()) |
| 170 | , fRenderedAtlasStack(GrCCAtlas::CoverageType::kFP16_CoverageCount, |
| 171 | specs.fRenderedAtlasSpecs, onFlushRP->caps()) |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 172 | , fIndexBuffer(GrCCPathProcessor::FindIndexBuffer(onFlushRP)) |
| 173 | , fVertexBuffer(GrCCPathProcessor::FindVertexBuffer(onFlushRP)) |
Brian Salomon | ae64c19 | 2019-02-05 09:41:37 -0500 | [diff] [blame] | 174 | , fInstanceBuffer(onFlushRP->makeBuffer(GrGpuBufferType::kVertex, |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 175 | inst_buffer_count(specs) * sizeof(PathInstance))) |
| 176 | , fNextCopyInstanceIdx(0) |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 177 | , fNextPathInstanceIdx(specs.fNumCopiedPaths[kFillIdx] + |
| 178 | specs.fNumCopiedPaths[kStrokeIdx]) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 179 | if (!fIndexBuffer) { |
| 180 | SkDebugf("WARNING: failed to allocate CCPR index buffer. No paths will be drawn.\n"); |
| 181 | return; |
| 182 | } |
| 183 | if (!fVertexBuffer) { |
| 184 | SkDebugf("WARNING: failed to allocate CCPR vertex buffer. No paths will be drawn.\n"); |
| 185 | return; |
| 186 | } |
| 187 | if (!fInstanceBuffer) { |
| 188 | SkDebugf("WARNING: failed to allocate CCPR instance buffer. No paths will be drawn.\n"); |
| 189 | return; |
| 190 | } |
| 191 | fPathInstanceData = static_cast<PathInstance*>(fInstanceBuffer->map()); |
| 192 | SkASSERT(fPathInstanceData); |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 193 | SkDEBUGCODE(fEndCopyInstance = |
| 194 | specs.fNumCopiedPaths[kFillIdx] + specs.fNumCopiedPaths[kStrokeIdx]); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 195 | SkDEBUGCODE(fEndPathInstance = inst_buffer_count(specs)); |
| 196 | } |
| 197 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 198 | void GrCCPerFlushResources::upgradeEntryToLiteralCoverageAtlas( |
| 199 | GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP, GrCCPathCacheEntry* entry, |
| 200 | GrCCPathProcessor::DoEvenOddFill evenOdd) { |
| 201 | using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 202 | SkASSERT(this->isMapped()); |
| 203 | SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 204 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 205 | const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas(); |
| 206 | SkASSERT(cachedAtlas); |
| 207 | SkASSERT(cachedAtlas->getOnFlushProxy()); |
| 208 | |
| 209 | if (GrCCAtlas::CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) { |
| 210 | // This entry has already been upgraded to literal coverage. The path must have been drawn |
| 211 | // multiple times during the flush. |
| 212 | SkDEBUGCODE(--fEndCopyInstance); |
| 213 | return; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 214 | } |
| 215 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 216 | SkIVector newAtlasOffset; |
| 217 | if (GrCCAtlas* retiredAtlas = fCopyAtlasStack.addRect(entry->devIBounds(), &newAtlasOffset)) { |
| 218 | // We did not fit in the previous copy atlas and it was retired. We will render the ranges |
| 219 | // up until fCopyPathRanges.count() into the retired atlas during finalize(). |
| 220 | retiredAtlas->setFillBatchID(fCopyPathRanges.count()); |
| 221 | fCurrCopyAtlasRangesIdx = fCopyPathRanges.count(); |
| 222 | } |
| 223 | |
| 224 | this->recordCopyPathInstance(*entry, newAtlasOffset, evenOdd, |
| 225 | sk_ref_sp(cachedAtlas->getOnFlushProxy())); |
| 226 | |
| 227 | sk_sp<GrTexture> previousAtlasTexture = |
| 228 | sk_ref_sp(cachedAtlas->getOnFlushProxy()->peekTexture()); |
| 229 | GrCCAtlas* newAtlas = &fCopyAtlasStack.current(); |
| 230 | if (ReleaseAtlasResult::kDidInvalidateFromCache == |
| 231 | entry->upgradeToLiteralCoverageAtlas(pathCache, onFlushRP, newAtlas, newAtlasOffset)) { |
| 232 | // This texture just got booted out of the cache. Keep it around, in case we might be able |
| 233 | // to recycle it for a new atlas. We can recycle it because copying happens before rendering |
| 234 | // new paths, and every path from the atlas that we're planning to use this flush will be |
| 235 | // copied to a new atlas. We'll never copy some and leave others. |
| 236 | fRecyclableAtlasTextures.push_back(std::move(previousAtlasTexture)); |
| 237 | } |
| 238 | } |
| 239 | |
| 240 | template<typename T, typename... Args> |
| 241 | static void emplace_at_memcpy(SkTArray<T>* array, int idx, Args&&... args) { |
| 242 | if (int moveCount = array->count() - idx) { |
| 243 | array->push_back(); |
| 244 | T* location = array->begin() + idx; |
| 245 | memcpy(location+1, location, moveCount * sizeof(T)); |
| 246 | new (location) T(std::forward<Args>(args)...); |
| 247 | } else { |
| 248 | array->emplace_back(std::forward<Args>(args)...); |
| 249 | } |
| 250 | } |
| 251 | |
| 252 | void GrCCPerFlushResources::recordCopyPathInstance(const GrCCPathCacheEntry& entry, |
| 253 | const SkIVector& newAtlasOffset, |
| 254 | GrCCPathProcessor::DoEvenOddFill evenOdd, |
| 255 | sk_sp<GrTextureProxy> srcProxy) { |
| 256 | SkASSERT(fNextCopyInstanceIdx < fEndCopyInstance); |
| 257 | |
| 258 | // Write the instance at the back of the array. |
| 259 | int currentInstanceIdx = fNextCopyInstanceIdx++; |
Brian Osman | c6444d2 | 2019-01-09 16:30:12 -0500 | [diff] [blame] | 260 | constexpr uint64_t kWhite = (((uint64_t) SK_Half1) << 0) | |
| 261 | (((uint64_t) SK_Half1) << 16) | |
| 262 | (((uint64_t) SK_Half1) << 32) | |
| 263 | (((uint64_t) SK_Half1) << 48); |
| 264 | fPathInstanceData[currentInstanceIdx].set(entry, newAtlasOffset, kWhite, evenOdd); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 265 | |
| 266 | // Percolate the instance forward until it's contiguous with other instances that share the same |
| 267 | // proxy. |
| 268 | for (int i = fCopyPathRanges.count() - 1; i >= fCurrCopyAtlasRangesIdx; --i) { |
| 269 | if (fCopyPathRanges[i].fSrcProxy == srcProxy) { |
| 270 | ++fCopyPathRanges[i].fCount; |
| 271 | return; |
| 272 | } |
| 273 | int rangeFirstInstanceIdx = currentInstanceIdx - fCopyPathRanges[i].fCount; |
| 274 | std::swap(fPathInstanceData[rangeFirstInstanceIdx], fPathInstanceData[currentInstanceIdx]); |
| 275 | currentInstanceIdx = rangeFirstInstanceIdx; |
| 276 | } |
| 277 | |
| 278 | // An instance with this particular proxy did not yet exist in the array. Add a range for it. |
| 279 | emplace_at_memcpy(&fCopyPathRanges, fCurrCopyAtlasRangesIdx, std::move(srcProxy), 1); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 280 | } |
| 281 | |
Chris Dalton | ce038dc | 2018-09-14 14:14:49 -0600 | [diff] [blame] | 282 | static bool transform_path_pts(const SkMatrix& m, const SkPath& path, |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 283 | const SkAutoSTArray<32, SkPoint>& outDevPts, SkRect* devBounds, |
| 284 | SkRect* devBounds45) { |
| 285 | const SkPoint* pts = SkPathPriv::PointData(path); |
| 286 | int numPts = path.countPoints(); |
| 287 | SkASSERT(numPts + 1 <= outDevPts.count()); |
| 288 | SkASSERT(numPts); |
| 289 | |
| 290 | // m45 transforms path points into "45 degree" device space. A bounding box in this space gives |
| 291 | // the circumscribing octagon's diagonals. We could use SK_ScalarRoot2Over2, but an orthonormal |
| 292 | // transform is not necessary as long as the shader uses the correct inverse. |
| 293 | SkMatrix m45; |
| 294 | m45.setSinCos(1, 1); |
| 295 | m45.preConcat(m); |
| 296 | |
| 297 | // X,Y,T are two parallel view matrices that accumulate two bounding boxes as they map points: |
| 298 | // device-space bounds and "45 degree" device-space bounds (| 1 -1 | * devCoords). |
| 299 | // | 1 1 | |
| 300 | Sk4f X = Sk4f(m.getScaleX(), m.getSkewY(), m45.getScaleX(), m45.getSkewY()); |
| 301 | Sk4f Y = Sk4f(m.getSkewX(), m.getScaleY(), m45.getSkewX(), m45.getScaleY()); |
| 302 | Sk4f T = Sk4f(m.getTranslateX(), m.getTranslateY(), m45.getTranslateX(), m45.getTranslateY()); |
| 303 | |
| 304 | // Map the path's points to device space and accumulate bounding boxes. |
| 305 | Sk4f devPt = SkNx_fma(Y, Sk4f(pts[0].y()), T); |
| 306 | devPt = SkNx_fma(X, Sk4f(pts[0].x()), devPt); |
| 307 | Sk4f topLeft = devPt; |
| 308 | Sk4f bottomRight = devPt; |
| 309 | |
| 310 | // Store all 4 values [dev.x, dev.y, dev45.x, dev45.y]. We are only interested in the first two, |
| 311 | // and will overwrite [dev45.x, dev45.y] with the next point. This is why the dst buffer must |
| 312 | // be at least one larger than the number of points. |
| 313 | devPt.store(&outDevPts[0]); |
| 314 | |
| 315 | for (int i = 1; i < numPts; ++i) { |
| 316 | devPt = SkNx_fma(Y, Sk4f(pts[i].y()), T); |
| 317 | devPt = SkNx_fma(X, Sk4f(pts[i].x()), devPt); |
| 318 | topLeft = Sk4f::Min(topLeft, devPt); |
| 319 | bottomRight = Sk4f::Max(bottomRight, devPt); |
| 320 | devPt.store(&outDevPts[i]); |
| 321 | } |
| 322 | |
Chris Dalton | ce038dc | 2018-09-14 14:14:49 -0600 | [diff] [blame] | 323 | if (!(Sk4f(0) == topLeft*0).allTrue() || !(Sk4f(0) == bottomRight*0).allTrue()) { |
| 324 | // The bounds are infinite or NaN. |
| 325 | return false; |
| 326 | } |
| 327 | |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 328 | SkPoint topLeftPts[2], bottomRightPts[2]; |
| 329 | topLeft.store(topLeftPts); |
| 330 | bottomRight.store(bottomRightPts); |
| 331 | devBounds->setLTRB(topLeftPts[0].x(), topLeftPts[0].y(), bottomRightPts[0].x(), |
| 332 | bottomRightPts[0].y()); |
| 333 | devBounds45->setLTRB(topLeftPts[1].x(), topLeftPts[1].y(), bottomRightPts[1].x(), |
| 334 | bottomRightPts[1].y()); |
Chris Dalton | ce038dc | 2018-09-14 14:14:49 -0600 | [diff] [blame] | 335 | return true; |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 336 | } |
| 337 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 338 | GrCCAtlas* GrCCPerFlushResources::renderShapeInAtlas( |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 339 | const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape, float strokeDevWidth, |
| 340 | SkRect* devBounds, SkRect* devBounds45, SkIRect* devIBounds, SkIVector* devToAtlasOffset) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 341 | SkASSERT(this->isMapped()); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 342 | SkASSERT(fNextPathInstanceIdx < fEndPathInstance); |
| 343 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 344 | SkPath path; |
| 345 | shape.asPath(&path); |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 346 | if (path.isEmpty()) { |
| 347 | SkDEBUGCODE(--fEndPathInstance); |
| 348 | return nullptr; |
| 349 | } |
Chris Dalton | ce038dc | 2018-09-14 14:14:49 -0600 | [diff] [blame] | 350 | if (!transform_path_pts(m, path, fLocalDevPtsBuffer, devBounds, devBounds45)) { |
| 351 | // The transformed path had infinite or NaN bounds. |
| 352 | SkDEBUGCODE(--fEndPathInstance); |
| 353 | return nullptr; |
| 354 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 355 | |
| 356 | const SkStrokeRec& stroke = shape.style().strokeRec(); |
| 357 | if (!stroke.isFillStyle()) { |
| 358 | float r = SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(), |
| 359 | stroke.getCap(), strokeDevWidth); |
| 360 | devBounds->outset(r, r); |
| 361 | // devBounds45 is in (| 1 -1 | * devCoords) space. |
| 362 | // | 1 1 | |
| 363 | devBounds45->outset(r*SK_ScalarSqrt2, r*SK_ScalarSqrt2); |
| 364 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 365 | devBounds->roundOut(devIBounds); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 366 | |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 367 | GrScissorTest scissorTest; |
| 368 | SkIRect clippedPathIBounds; |
| 369 | if (!this->placeRenderedPathInAtlas(clipIBounds, *devIBounds, &scissorTest, &clippedPathIBounds, |
| 370 | devToAtlasOffset)) { |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 371 | SkDEBUGCODE(--fEndPathInstance); |
| 372 | return nullptr; // Path was degenerate or clipped away. |
| 373 | } |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 374 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 375 | if (stroke.isFillStyle()) { |
| 376 | SkASSERT(0 == strokeDevWidth); |
| 377 | fFiller.parseDeviceSpaceFill(path, fLocalDevPtsBuffer.begin(), scissorTest, |
| 378 | clippedPathIBounds, *devToAtlasOffset); |
| 379 | } else { |
| 380 | // Stroke-and-fill is not yet supported. |
| 381 | SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle() || stroke.isHairlineStyle()); |
| 382 | SkASSERT(!stroke.isHairlineStyle() || 1 == strokeDevWidth); |
| 383 | fStroker.parseDeviceSpaceStroke(path, fLocalDevPtsBuffer.begin(), stroke, strokeDevWidth, |
| 384 | scissorTest, clippedPathIBounds, *devToAtlasOffset); |
| 385 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 386 | return &fRenderedAtlasStack.current(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 387 | } |
| 388 | |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 389 | const GrCCAtlas* GrCCPerFlushResources::renderDeviceSpacePathInAtlas( |
| 390 | const SkIRect& clipIBounds, const SkPath& devPath, const SkIRect& devPathIBounds, |
| 391 | SkIVector* devToAtlasOffset) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 392 | SkASSERT(this->isMapped()); |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 393 | |
| 394 | if (devPath.isEmpty()) { |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 395 | return nullptr; |
| 396 | } |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 397 | |
| 398 | GrScissorTest scissorTest; |
| 399 | SkIRect clippedPathIBounds; |
| 400 | if (!this->placeRenderedPathInAtlas(clipIBounds, devPathIBounds, &scissorTest, |
| 401 | &clippedPathIBounds, devToAtlasOffset)) { |
| 402 | return nullptr; |
| 403 | } |
| 404 | |
| 405 | fFiller.parseDeviceSpaceFill(devPath, SkPathPriv::PointData(devPath), scissorTest, |
| 406 | clippedPathIBounds, *devToAtlasOffset); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 407 | return &fRenderedAtlasStack.current(); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 408 | } |
| 409 | |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 410 | bool GrCCPerFlushResources::placeRenderedPathInAtlas(const SkIRect& clipIBounds, |
| 411 | const SkIRect& pathIBounds, |
| 412 | GrScissorTest* scissorTest, |
| 413 | SkIRect* clippedPathIBounds, |
| 414 | SkIVector* devToAtlasOffset) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 415 | if (clipIBounds.contains(pathIBounds)) { |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 416 | *clippedPathIBounds = pathIBounds; |
| 417 | *scissorTest = GrScissorTest::kDisabled; |
| 418 | } else if (clippedPathIBounds->intersect(clipIBounds, pathIBounds)) { |
| 419 | *scissorTest = GrScissorTest::kEnabled; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 420 | } else { |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 421 | return false; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 422 | } |
| 423 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 424 | if (GrCCAtlas* retiredAtlas = |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 425 | fRenderedAtlasStack.addRect(*clippedPathIBounds, devToAtlasOffset)) { |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 426 | // We did not fit in the previous coverage count atlas and it was retired. Close the path |
| 427 | // parser's current batch (which does not yet include the path we just parsed). We will |
| 428 | // render this batch into the retired atlas during finalize(). |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 429 | retiredAtlas->setFillBatchID(fFiller.closeCurrentBatch()); |
| 430 | retiredAtlas->setStrokeBatchID(fStroker.closeCurrentBatch()); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 431 | } |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 432 | return true; |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 433 | } |
| 434 | |
| 435 | bool GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP, |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 436 | SkTArray<sk_sp<GrRenderTargetContext>>* out) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 437 | SkASSERT(this->isMapped()); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 438 | SkASSERT(fNextPathInstanceIdx == fEndPathInstance); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 439 | SkASSERT(fNextCopyInstanceIdx == fEndCopyInstance); |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 440 | |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 441 | fInstanceBuffer->unmap(); |
| 442 | fPathInstanceData = nullptr; |
| 443 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 444 | if (!fCopyAtlasStack.empty()) { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 445 | fCopyAtlasStack.current().setFillBatchID(fCopyPathRanges.count()); |
| 446 | fCurrCopyAtlasRangesIdx = fCopyPathRanges.count(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 447 | } |
| 448 | if (!fRenderedAtlasStack.empty()) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 449 | fRenderedAtlasStack.current().setFillBatchID(fFiller.closeCurrentBatch()); |
| 450 | fRenderedAtlasStack.current().setStrokeBatchID(fStroker.closeCurrentBatch()); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 451 | } |
| 452 | |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 453 | // Build the GPU buffers to render path coverage counts. (This must not happen until after the |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 454 | // final calls to fFiller/fStroker.closeCurrentBatch().) |
Chris Dalton | e163969 | 2018-08-20 14:00:30 -0600 | [diff] [blame] | 455 | if (!fFiller.prepareToDraw(onFlushRP)) { |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 456 | return false; |
| 457 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 458 | if (!fStroker.prepareToDraw(onFlushRP)) { |
| 459 | return false; |
| 460 | } |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 461 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 462 | // Draw the copies from 16-bit literal coverage atlas(es) into 8-bit cached atlas(es). |
| 463 | int copyRangeIdx = 0; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 464 | int baseCopyInstance = 0; |
| 465 | for (GrCCAtlasStack::Iter atlas(fCopyAtlasStack); atlas.next();) { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 466 | int endCopyRange = atlas->getFillBatchID(); |
| 467 | SkASSERT(endCopyRange > copyRangeIdx); |
| 468 | |
| 469 | sk_sp<GrRenderTargetContext> rtc = atlas->makeRenderTargetContext(onFlushRP); |
| 470 | for (; copyRangeIdx < endCopyRange; ++copyRangeIdx) { |
| 471 | const CopyPathRange& copyRange = fCopyPathRanges[copyRangeIdx]; |
| 472 | int endCopyInstance = baseCopyInstance + copyRange.fCount; |
| 473 | if (rtc) { |
| 474 | auto op = CopyAtlasOp::Make(rtc->surfPriv().getContext(), sk_ref_sp(this), |
| 475 | copyRange.fSrcProxy, baseCopyInstance, endCopyInstance, |
| 476 | atlas->drawBounds()); |
| 477 | rtc->addDrawOp(GrNoClip(), std::move(op)); |
| 478 | } |
| 479 | baseCopyInstance = endCopyInstance; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 480 | } |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 481 | out->push_back(std::move(rtc)); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 482 | } |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 483 | SkASSERT(fCopyPathRanges.count() == copyRangeIdx); |
| 484 | SkASSERT(fNextCopyInstanceIdx == baseCopyInstance); |
| 485 | SkASSERT(baseCopyInstance == fEndCopyInstance); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 486 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 487 | // Render the coverage count atlas(es). |
| 488 | for (GrCCAtlasStack::Iter atlas(fRenderedAtlasStack); atlas.next();) { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 489 | // Copies will be finished by the time we get to rendering new atlases. See if we can |
| 490 | // recycle any previous invalidated atlas textures instead of creating new ones. |
Chris Dalton | afde18f | 2018-06-22 12:44:19 -0600 | [diff] [blame] | 491 | sk_sp<GrTexture> backingTexture; |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 492 | for (sk_sp<GrTexture>& texture : fRecyclableAtlasTextures) { |
| 493 | if (texture && atlas->currentHeight() == texture->height() && |
| 494 | atlas->currentWidth() == texture->width()) { |
| 495 | backingTexture = skstd::exchange(texture, nullptr); |
| 496 | break; |
| 497 | } |
Chris Dalton | afde18f | 2018-06-22 12:44:19 -0600 | [diff] [blame] | 498 | } |
| 499 | |
| 500 | if (auto rtc = atlas->makeRenderTargetContext(onFlushRP, std::move(backingTexture))) { |
Chris Dalton | 2c5e011 | 2019-03-29 13:14:18 -0500 | [diff] [blame] | 501 | std::unique_ptr<GrDrawOp> op; |
| 502 | if (onFlushRP->caps()->shaderCaps()->geometryShaderSupport()) { |
| 503 | op = RenderAtlasOp<GrGSCoverageProcessor>::Make( |
| 504 | rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(), |
| 505 | atlas->getStrokeBatchID(), atlas->drawBounds()); |
| 506 | } else { |
| 507 | op = RenderAtlasOp<GrVSCoverageProcessor>::Make( |
| 508 | rtc->surfPriv().getContext(), sk_ref_sp(this), atlas->getFillBatchID(), |
| 509 | atlas->getStrokeBatchID(), atlas->drawBounds()); |
| 510 | } |
Chris Dalton | 9414c96 | 2018-06-14 10:14:50 -0600 | [diff] [blame] | 511 | rtc->addDrawOp(GrNoClip(), std::move(op)); |
| 512 | out->push_back(std::move(rtc)); |
Chris Dalton | 5ba36ba | 2018-05-09 01:08:38 -0600 | [diff] [blame] | 513 | } |
| 514 | } |
| 515 | |
| 516 | return true; |
| 517 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 518 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 519 | void GrCCPerFlushResourceSpecs::cancelCopies() { |
| 520 | // Convert copies to cached draws. |
| 521 | fNumCachedPaths += fNumCopiedPaths[kFillIdx] + fNumCopiedPaths[kStrokeIdx]; |
| 522 | fNumCopiedPaths[kFillIdx] = fNumCopiedPaths[kStrokeIdx] = 0; |
| 523 | fCopyPathStats[kFillIdx] = fCopyPathStats[kStrokeIdx] = GrCCRenderedPathStats(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 524 | fCopyAtlasSpecs = GrCCAtlas::Specs(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 525 | } |