bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrResourceProvider.h" |
| 9 | |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 10 | #include "GrBuffer.h" |
robertphillips | 5fa7f30 | 2016-07-21 09:21:04 -0700 | [diff] [blame] | 11 | #include "GrCaps.h" |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 12 | #include "GrGpu.h" |
kkinnunen | cabe20c | 2015-06-01 01:37:26 -0700 | [diff] [blame] | 13 | #include "GrPathRendering.h" |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 14 | #include "GrRenderTarget.h" |
| 15 | #include "GrRenderTargetPriv.h" |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 16 | #include "GrResourceCache.h" |
| 17 | #include "GrResourceKey.h" |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 18 | #include "GrStencilAttachment.h" |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame^] | 19 | #include "GrTexturePriv.h" |
| 20 | #include "../private/GrSingleOwner.h" |
halcanary | 4dbbd04 | 2016-06-07 17:21:10 -0700 | [diff] [blame] | 21 | #include "SkMathPriv.h" |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 22 | |
| 23 | GR_DECLARE_STATIC_UNIQUE_KEY(gQuadIndexBufferKey); |
| 24 | |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame^] | 25 | const int GrResourceProvider::kMinScratchTextureSize = 16; |
| 26 | |
| 27 | #define ASSERT_SINGLE_OWNER \ |
| 28 | SkDEBUGCODE(GrSingleOwner::AutoEnforce debug_SingleOwner(fSingleOwner);) |
| 29 | |
joshualitt | 6d0872d | 2016-01-11 08:27:48 -0800 | [diff] [blame] | 30 | GrResourceProvider::GrResourceProvider(GrGpu* gpu, GrResourceCache* cache, GrSingleOwner* owner) |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame^] | 31 | : fCache(cache) |
| 32 | , fGpu(gpu) |
| 33 | #ifdef SK_DEBUG |
| 34 | , fSingleOwner(owner) |
| 35 | #endif |
| 36 | { |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 37 | GR_DEFINE_STATIC_UNIQUE_KEY(gQuadIndexBufferKey); |
| 38 | fQuadIndexBufferKey = gQuadIndexBufferKey; |
| 39 | } |
| 40 | |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame^] | 41 | |
| 42 | GrTexture* GrResourceProvider::createMipMappedTexture(const GrSurfaceDesc& desc, |
| 43 | SkBudgeted budgeted, const GrMipLevel* texels, |
| 44 | int mipLevelCount, uint32_t flags) { |
| 45 | ASSERT_SINGLE_OWNER |
| 46 | |
| 47 | if (this->isAbandoned()) { |
| 48 | return nullptr; |
| 49 | } |
| 50 | if (mipLevelCount && !texels) { |
| 51 | return nullptr; |
| 52 | } |
| 53 | for (int i = 0; i < mipLevelCount; ++i) { |
| 54 | if (!texels[i].fPixels) { |
| 55 | return nullptr; |
| 56 | } |
| 57 | } |
| 58 | if (mipLevelCount > 1 && GrPixelConfigIsSint(desc.fConfig)) { |
| 59 | return nullptr; |
| 60 | } |
| 61 | if ((desc.fFlags & kRenderTarget_GrSurfaceFlag) && |
| 62 | !fGpu->caps()->isConfigRenderable(desc.fConfig, desc.fSampleCnt > 0)) { |
| 63 | return nullptr; |
| 64 | } |
| 65 | if (!GrPixelConfigIsCompressed(desc.fConfig)) { |
| 66 | if (mipLevelCount < 2) { |
| 67 | flags |= kExact_Flag | kNoCreate_Flag; |
| 68 | if (GrTexture* texture = this->refScratchTexture(desc, flags)) { |
| 69 | if (!mipLevelCount || |
| 70 | texture->writePixels(0, 0, desc.fWidth, desc.fHeight, desc.fConfig, |
| 71 | texels[0].fPixels, texels[0].fRowBytes)) { |
| 72 | if (SkBudgeted::kNo == budgeted) { |
| 73 | texture->resourcePriv().makeUnbudgeted(); |
| 74 | } |
| 75 | return texture; |
| 76 | } |
| 77 | texture->unref(); |
| 78 | } |
| 79 | } |
| 80 | } |
| 81 | |
| 82 | SkTArray<GrMipLevel> texelsShallowCopy(mipLevelCount); |
| 83 | for (int i = 0; i < mipLevelCount; ++i) { |
| 84 | texelsShallowCopy.push_back(texels[i]); |
| 85 | } |
| 86 | return fGpu->createTexture(desc, budgeted, texelsShallowCopy); |
| 87 | } |
| 88 | |
| 89 | GrTexture* GrResourceProvider::createTexture(const GrSurfaceDesc& desc, SkBudgeted budgeted, |
| 90 | const void* srcData, size_t rowBytes, uint32_t flags) { |
| 91 | GrMipLevel tempTexels; |
| 92 | GrMipLevel* texels = nullptr; |
| 93 | int levelCount = 0; |
| 94 | if (srcData) { |
| 95 | tempTexels.fPixels = srcData; |
| 96 | tempTexels.fRowBytes = rowBytes; |
| 97 | texels = &tempTexels; |
| 98 | levelCount = 1; |
| 99 | } |
| 100 | return this->createMipMappedTexture(desc, budgeted, texels, levelCount, flags); |
| 101 | } |
| 102 | |
| 103 | GrTexture* GrResourceProvider::createApproxTexture(const GrSurfaceDesc& desc, uint32_t flags) { |
| 104 | ASSERT_SINGLE_OWNER |
| 105 | SkASSERT(0 == flags || kNoPendingIO_Flag == flags); |
| 106 | return this->internalCreateApproxTexture(desc, flags); |
| 107 | } |
| 108 | |
| 109 | GrTexture* GrResourceProvider::internalCreateApproxTexture(const GrSurfaceDesc& desc, |
| 110 | uint32_t scratchFlags) { |
| 111 | ASSERT_SINGLE_OWNER |
| 112 | if (this->isAbandoned()) { |
| 113 | return nullptr; |
| 114 | } |
| 115 | // Currently we don't recycle compressed textures as scratch. |
| 116 | if (GrPixelConfigIsCompressed(desc.fConfig)) { |
| 117 | return nullptr; |
| 118 | } else { |
| 119 | return this->refScratchTexture(desc, scratchFlags); |
| 120 | } |
| 121 | } |
| 122 | |
| 123 | GrTexture* GrResourceProvider::refScratchTexture(const GrSurfaceDesc& inDesc, |
| 124 | uint32_t flags) { |
| 125 | ASSERT_SINGLE_OWNER |
| 126 | SkASSERT(!this->isAbandoned()); |
| 127 | SkASSERT(!GrPixelConfigIsCompressed(inDesc.fConfig)); |
| 128 | |
| 129 | SkTCopyOnFirstWrite<GrSurfaceDesc> desc(inDesc); |
| 130 | |
| 131 | if (fGpu->caps()->reuseScratchTextures() || (desc->fFlags & kRenderTarget_GrSurfaceFlag)) { |
| 132 | if (!(kExact_Flag & flags)) { |
| 133 | // bin by pow2 with a reasonable min |
| 134 | GrSurfaceDesc* wdesc = desc.writable(); |
| 135 | wdesc->fWidth = SkTMax(kMinScratchTextureSize, GrNextPow2(desc->fWidth)); |
| 136 | wdesc->fHeight = SkTMax(kMinScratchTextureSize, GrNextPow2(desc->fHeight)); |
| 137 | } |
| 138 | |
| 139 | GrScratchKey key; |
| 140 | GrTexturePriv::ComputeScratchKey(*desc, &key); |
| 141 | uint32_t scratchFlags = 0; |
| 142 | if (kNoPendingIO_Flag & flags) { |
| 143 | scratchFlags = GrResourceCache::kRequireNoPendingIO_ScratchFlag; |
| 144 | } else if (!(desc->fFlags & kRenderTarget_GrSurfaceFlag)) { |
| 145 | // If it is not a render target then it will most likely be populated by |
| 146 | // writePixels() which will trigger a flush if the texture has pending IO. |
| 147 | scratchFlags = GrResourceCache::kPreferNoPendingIO_ScratchFlag; |
| 148 | } |
| 149 | GrGpuResource* resource = fCache->findAndRefScratchResource(key, |
| 150 | GrSurface::WorstCaseSize(*desc), |
| 151 | scratchFlags); |
| 152 | if (resource) { |
| 153 | GrSurface* surface = static_cast<GrSurface*>(resource); |
| 154 | GrRenderTarget* rt = surface->asRenderTarget(); |
| 155 | if (rt && fGpu->caps()->discardRenderTargetSupport()) { |
| 156 | rt->discard(); |
| 157 | } |
| 158 | return surface->asTexture(); |
| 159 | } |
| 160 | } |
| 161 | |
| 162 | if (!(kNoCreate_Flag & flags)) { |
| 163 | return fGpu->createTexture(*desc, SkBudgeted::kYes); |
| 164 | } |
| 165 | |
| 166 | return nullptr; |
| 167 | } |
| 168 | |
| 169 | sk_sp<GrTexture> GrResourceProvider::wrapBackendTexture(const GrBackendTextureDesc& desc, |
| 170 | GrWrapOwnership ownership) { |
| 171 | ASSERT_SINGLE_OWNER |
| 172 | if (this->isAbandoned()) { |
| 173 | return nullptr; |
| 174 | } |
| 175 | return fGpu->wrapBackendTexture(desc, ownership); |
| 176 | } |
| 177 | |
| 178 | sk_sp<GrRenderTarget> GrResourceProvider::wrapBackendRenderTarget( |
| 179 | const GrBackendRenderTargetDesc& desc) |
| 180 | { |
| 181 | ASSERT_SINGLE_OWNER |
| 182 | return this->isAbandoned() ? nullptr |
| 183 | : fGpu->wrapBackendRenderTarget(desc, kBorrow_GrWrapOwnership); |
| 184 | } |
| 185 | |
| 186 | void GrResourceProvider::assignUniqueKeyToResource(const GrUniqueKey& key, |
| 187 | GrGpuResource* resource) { |
| 188 | ASSERT_SINGLE_OWNER |
| 189 | if (this->isAbandoned() || !resource) { |
| 190 | return; |
| 191 | } |
| 192 | resource->resourcePriv().setUniqueKey(key); |
| 193 | } |
| 194 | |
| 195 | GrGpuResource* GrResourceProvider::findAndRefResourceByUniqueKey(const GrUniqueKey& key) { |
| 196 | ASSERT_SINGLE_OWNER |
| 197 | return this->isAbandoned() ? nullptr : fCache->findAndRefUniqueResource(key); |
| 198 | } |
| 199 | |
| 200 | GrTexture* GrResourceProvider::findAndRefTextureByUniqueKey(const GrUniqueKey& key) { |
| 201 | ASSERT_SINGLE_OWNER |
| 202 | GrGpuResource* resource = this->findAndRefResourceByUniqueKey(key); |
| 203 | if (resource) { |
| 204 | GrTexture* texture = static_cast<GrSurface*>(resource)->asTexture(); |
| 205 | SkASSERT(texture); |
| 206 | return texture; |
| 207 | } |
| 208 | return NULL; |
| 209 | } |
| 210 | |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 211 | const GrBuffer* GrResourceProvider::createInstancedIndexBuffer(const uint16_t* pattern, |
| 212 | int patternSize, |
| 213 | int reps, |
| 214 | int vertCount, |
| 215 | const GrUniqueKey& key) { |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 216 | size_t bufferSize = patternSize * reps * sizeof(uint16_t); |
| 217 | |
Brian Salomon | 09d994e | 2016-12-21 11:14:46 -0500 | [diff] [blame] | 218 | // This is typically used in GrMeshDrawOps, so we assume kNoPendingIO. |
cdalton | e2e71c2 | 2016-04-07 18:13:29 -0700 | [diff] [blame] | 219 | GrBuffer* buffer = this->createBuffer(bufferSize, kIndex_GrBufferType, kStatic_GrAccessPattern, |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 220 | kNoPendingIO_Flag); |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 221 | if (!buffer) { |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 222 | return nullptr; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 223 | } |
| 224 | uint16_t* data = (uint16_t*) buffer->map(); |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 225 | bool useTempData = (nullptr == data); |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 226 | if (useTempData) { |
halcanary | 385fe4d | 2015-08-26 13:07:48 -0700 | [diff] [blame] | 227 | data = new uint16_t[reps * patternSize]; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 228 | } |
| 229 | for (int i = 0; i < reps; ++i) { |
| 230 | int baseIdx = i * patternSize; |
| 231 | uint16_t baseVert = (uint16_t)(i * vertCount); |
| 232 | for (int j = 0; j < patternSize; ++j) { |
| 233 | data[baseIdx+j] = baseVert + pattern[j]; |
| 234 | } |
| 235 | } |
| 236 | if (useTempData) { |
| 237 | if (!buffer->updateData(data, bufferSize)) { |
| 238 | buffer->unref(); |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 239 | return nullptr; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 240 | } |
halcanary | 385fe4d | 2015-08-26 13:07:48 -0700 | [diff] [blame] | 241 | delete[] data; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 242 | } else { |
| 243 | buffer->unmap(); |
| 244 | } |
| 245 | this->assignUniqueKeyToResource(key, buffer); |
| 246 | return buffer; |
| 247 | } |
| 248 | |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 249 | const GrBuffer* GrResourceProvider::createQuadIndexBuffer() { |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 250 | static const int kMaxQuads = 1 << 12; // max possible: (1 << 14) - 1; |
| 251 | GR_STATIC_ASSERT(4 * kMaxQuads <= 65535); |
| 252 | static const uint16_t kPattern[] = { 0, 1, 2, 0, 2, 3 }; |
| 253 | |
| 254 | return this->createInstancedIndexBuffer(kPattern, 6, kMaxQuads, 4, fQuadIndexBufferKey); |
| 255 | } |
| 256 | |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 257 | GrPath* GrResourceProvider::createPath(const SkPath& path, const GrStyle& style) { |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 258 | SkASSERT(this->gpu()->pathRendering()); |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 259 | return this->gpu()->pathRendering()->createPath(path, style); |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 260 | } |
| 261 | |
| 262 | GrPathRange* GrResourceProvider::createPathRange(GrPathRange::PathGenerator* gen, |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 263 | const GrStyle& style) { |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 264 | SkASSERT(this->gpu()->pathRendering()); |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 265 | return this->gpu()->pathRendering()->createPathRange(gen, style); |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 266 | } |
| 267 | |
reed | a9322c2 | 2016-04-12 06:47:05 -0700 | [diff] [blame] | 268 | GrPathRange* GrResourceProvider::createGlyphs(const SkTypeface* tf, |
| 269 | const SkScalerContextEffects& effects, |
| 270 | const SkDescriptor* desc, |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 271 | const GrStyle& style) { |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 272 | |
| 273 | SkASSERT(this->gpu()->pathRendering()); |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 274 | return this->gpu()->pathRendering()->createGlyphs(tf, effects, desc, style); |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 275 | } |
| 276 | |
cdalton | e2e71c2 | 2016-04-07 18:13:29 -0700 | [diff] [blame] | 277 | GrBuffer* GrResourceProvider::createBuffer(size_t size, GrBufferType intendedType, |
cdalton | 1bf3e71 | 2016-04-19 10:00:02 -0700 | [diff] [blame] | 278 | GrAccessPattern accessPattern, uint32_t flags, |
| 279 | const void* data) { |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 280 | if (this->isAbandoned()) { |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 281 | return nullptr; |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 282 | } |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 283 | if (kDynamic_GrAccessPattern != accessPattern) { |
| 284 | return this->gpu()->createBuffer(size, intendedType, accessPattern, data); |
| 285 | } |
csmartdalton | 485a120 | 2016-07-13 10:16:32 -0700 | [diff] [blame] | 286 | if (!(flags & kRequireGpuMemory_Flag) && |
| 287 | this->gpu()->caps()->preferClientSideDynamicBuffers() && |
| 288 | GrBufferTypeIsVertexOrIndex(intendedType) && |
| 289 | kDynamic_GrAccessPattern == accessPattern) { |
| 290 | return GrBuffer::CreateCPUBacked(this->gpu(), size, intendedType, data); |
| 291 | } |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 292 | |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 293 | // bin by pow2 with a reasonable min |
Robert Phillips | 9e38047 | 2016-10-28 12:15:03 -0400 | [diff] [blame] | 294 | static const size_t MIN_SIZE = 1 << 12; |
| 295 | size_t allocSize = SkTMax(MIN_SIZE, GrNextSizePow2(size)); |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 296 | |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 297 | GrScratchKey key; |
csmartdalton | 485a120 | 2016-07-13 10:16:32 -0700 | [diff] [blame] | 298 | GrBuffer::ComputeScratchKeyForDynamicVBO(allocSize, intendedType, &key); |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 299 | uint32_t scratchFlags = 0; |
| 300 | if (flags & kNoPendingIO_Flag) { |
| 301 | scratchFlags = GrResourceCache::kRequireNoPendingIO_ScratchFlag; |
| 302 | } else { |
| 303 | scratchFlags = GrResourceCache::kPreferNoPendingIO_ScratchFlag; |
| 304 | } |
| 305 | GrBuffer* buffer = static_cast<GrBuffer*>( |
| 306 | this->cache()->findAndRefScratchResource(key, allocSize, scratchFlags)); |
| 307 | if (!buffer) { |
| 308 | buffer = this->gpu()->createBuffer(allocSize, intendedType, kDynamic_GrAccessPattern); |
| 309 | if (!buffer) { |
| 310 | return nullptr; |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 311 | } |
| 312 | } |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 313 | if (data) { |
| 314 | buffer->updateData(data, size); |
| 315 | } |
csmartdalton | 485a120 | 2016-07-13 10:16:32 -0700 | [diff] [blame] | 316 | SkASSERT(!buffer->isCPUBacked()); // We should only cache real VBOs. |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 317 | return buffer; |
jvanverth | 17aa047 | 2016-01-05 10:41:27 -0800 | [diff] [blame] | 318 | } |
| 319 | |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 320 | GrStencilAttachment* GrResourceProvider::attachStencilAttachment(GrRenderTarget* rt) { |
| 321 | SkASSERT(rt); |
| 322 | if (rt->renderTargetPriv().getStencilAttachment()) { |
| 323 | return rt->renderTargetPriv().getStencilAttachment(); |
| 324 | } |
| 325 | |
| 326 | if (!rt->wasDestroyed() && rt->canAttemptStencilAttachment()) { |
| 327 | GrUniqueKey sbKey; |
| 328 | |
| 329 | int width = rt->width(); |
| 330 | int height = rt->height(); |
| 331 | #if 0 |
| 332 | if (this->caps()->oversizedStencilSupport()) { |
| 333 | width = SkNextPow2(width); |
| 334 | height = SkNextPow2(height); |
| 335 | } |
| 336 | #endif |
| 337 | bool newStencil = false; |
| 338 | GrStencilAttachment::ComputeSharedStencilAttachmentKey(width, height, |
| 339 | rt->numStencilSamples(), &sbKey); |
| 340 | GrStencilAttachment* stencil = static_cast<GrStencilAttachment*>( |
| 341 | this->findAndRefResourceByUniqueKey(sbKey)); |
| 342 | if (!stencil) { |
| 343 | // Need to try and create a new stencil |
| 344 | stencil = this->gpu()->createStencilAttachmentForRenderTarget(rt, width, height); |
| 345 | if (stencil) { |
Robert Phillips | f7cf81a | 2017-03-02 10:23:52 -0500 | [diff] [blame] | 346 | this->assignUniqueKeyToResource(sbKey, stencil); |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 347 | newStencil = true; |
| 348 | } |
| 349 | } |
| 350 | if (rt->renderTargetPriv().attachStencilAttachment(stencil)) { |
| 351 | if (newStencil) { |
| 352 | // Right now we're clearing the stencil attachment here after it is |
bsalomon | 7ea33f5 | 2015-11-22 14:51:00 -0800 | [diff] [blame] | 353 | // attached to a RT for the first time. When we start matching |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 354 | // stencil buffers with smaller color targets this will no longer |
| 355 | // be correct because it won't be guaranteed to clear the entire |
| 356 | // sb. |
| 357 | // We used to clear down in the GL subclass using a special purpose |
| 358 | // FBO. But iOS doesn't allow a stencil-only FBO. It reports unsupported |
| 359 | // FBO status. |
| 360 | this->gpu()->clearStencil(rt); |
| 361 | } |
| 362 | } |
| 363 | } |
| 364 | return rt->renderTargetPriv().getStencilAttachment(); |
| 365 | } |
| 366 | |
bungeman | 6bd5284 | 2016-10-27 09:30:08 -0700 | [diff] [blame] | 367 | sk_sp<GrRenderTarget> GrResourceProvider::wrapBackendTextureAsRenderTarget( |
| 368 | const GrBackendTextureDesc& desc) |
| 369 | { |
ericrk | f7b8b8a | 2016-02-24 14:49:51 -0800 | [diff] [blame] | 370 | if (this->isAbandoned()) { |
| 371 | return nullptr; |
| 372 | } |
kkinnunen | 49c4c22 | 2016-04-01 04:50:37 -0700 | [diff] [blame] | 373 | return this->gpu()->wrapBackendTextureAsRenderTarget(desc); |
ericrk | f7b8b8a | 2016-02-24 14:49:51 -0800 | [diff] [blame] | 374 | } |