bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrResourceProvider.h" |
| 9 | |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 10 | #include "GrBuffer.h" |
robertphillips | 5fa7f30 | 2016-07-21 09:21:04 -0700 | [diff] [blame] | 11 | #include "GrCaps.h" |
Robert Phillips | 26c90e0 | 2017-03-14 14:39:29 -0400 | [diff] [blame^] | 12 | #include "GrContext.h" |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 13 | #include "GrGpu.h" |
kkinnunen | cabe20c | 2015-06-01 01:37:26 -0700 | [diff] [blame] | 14 | #include "GrPathRendering.h" |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 15 | #include "GrRenderTarget.h" |
| 16 | #include "GrRenderTargetPriv.h" |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 17 | #include "GrResourceCache.h" |
| 18 | #include "GrResourceKey.h" |
Greg Daniel | d85f97d | 2017-03-07 13:37:21 -0500 | [diff] [blame] | 19 | #include "GrSemaphore.h" |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 20 | #include "GrStencilAttachment.h" |
Robert Phillips | b66b42f | 2017-03-14 08:53:02 -0400 | [diff] [blame] | 21 | #include "GrSurfaceProxyPriv.h" |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame] | 22 | #include "GrTexturePriv.h" |
| 23 | #include "../private/GrSingleOwner.h" |
halcanary | 4dbbd04 | 2016-06-07 17:21:10 -0700 | [diff] [blame] | 24 | #include "SkMathPriv.h" |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 25 | |
| 26 | GR_DECLARE_STATIC_UNIQUE_KEY(gQuadIndexBufferKey); |
| 27 | |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame] | 28 | const int GrResourceProvider::kMinScratchTextureSize = 16; |
| 29 | |
| 30 | #define ASSERT_SINGLE_OWNER \ |
| 31 | SkDEBUGCODE(GrSingleOwner::AutoEnforce debug_SingleOwner(fSingleOwner);) |
| 32 | |
joshualitt | 6d0872d | 2016-01-11 08:27:48 -0800 | [diff] [blame] | 33 | GrResourceProvider::GrResourceProvider(GrGpu* gpu, GrResourceCache* cache, GrSingleOwner* owner) |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame] | 34 | : fCache(cache) |
| 35 | , fGpu(gpu) |
| 36 | #ifdef SK_DEBUG |
| 37 | , fSingleOwner(owner) |
| 38 | #endif |
| 39 | { |
Robert Phillips | 26c90e0 | 2017-03-14 14:39:29 -0400 | [diff] [blame^] | 40 | fCaps = sk_ref_sp(fGpu->caps()); |
| 41 | |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 42 | GR_DEFINE_STATIC_UNIQUE_KEY(gQuadIndexBufferKey); |
| 43 | fQuadIndexBufferKey = gQuadIndexBufferKey; |
| 44 | } |
| 45 | |
Robert Phillips | b66b42f | 2017-03-14 08:53:02 -0400 | [diff] [blame] | 46 | bool GrResourceProvider::IsFunctionallyExact(GrTextureProxy* proxy) { |
| 47 | return proxy->priv().isExact() || (SkIsPow2(proxy->width()) && SkIsPow2(proxy->height())); |
| 48 | } |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame] | 49 | |
| 50 | GrTexture* GrResourceProvider::createMipMappedTexture(const GrSurfaceDesc& desc, |
| 51 | SkBudgeted budgeted, const GrMipLevel* texels, |
| 52 | int mipLevelCount, uint32_t flags) { |
| 53 | ASSERT_SINGLE_OWNER |
| 54 | |
| 55 | if (this->isAbandoned()) { |
| 56 | return nullptr; |
| 57 | } |
| 58 | if (mipLevelCount && !texels) { |
| 59 | return nullptr; |
| 60 | } |
| 61 | for (int i = 0; i < mipLevelCount; ++i) { |
| 62 | if (!texels[i].fPixels) { |
| 63 | return nullptr; |
| 64 | } |
| 65 | } |
| 66 | if (mipLevelCount > 1 && GrPixelConfigIsSint(desc.fConfig)) { |
| 67 | return nullptr; |
| 68 | } |
| 69 | if ((desc.fFlags & kRenderTarget_GrSurfaceFlag) && |
| 70 | !fGpu->caps()->isConfigRenderable(desc.fConfig, desc.fSampleCnt > 0)) { |
| 71 | return nullptr; |
| 72 | } |
| 73 | if (!GrPixelConfigIsCompressed(desc.fConfig)) { |
| 74 | if (mipLevelCount < 2) { |
| 75 | flags |= kExact_Flag | kNoCreate_Flag; |
| 76 | if (GrTexture* texture = this->refScratchTexture(desc, flags)) { |
| 77 | if (!mipLevelCount || |
| 78 | texture->writePixels(0, 0, desc.fWidth, desc.fHeight, desc.fConfig, |
| 79 | texels[0].fPixels, texels[0].fRowBytes)) { |
| 80 | if (SkBudgeted::kNo == budgeted) { |
| 81 | texture->resourcePriv().makeUnbudgeted(); |
| 82 | } |
| 83 | return texture; |
| 84 | } |
| 85 | texture->unref(); |
| 86 | } |
| 87 | } |
| 88 | } |
| 89 | |
| 90 | SkTArray<GrMipLevel> texelsShallowCopy(mipLevelCount); |
| 91 | for (int i = 0; i < mipLevelCount; ++i) { |
| 92 | texelsShallowCopy.push_back(texels[i]); |
| 93 | } |
| 94 | return fGpu->createTexture(desc, budgeted, texelsShallowCopy); |
| 95 | } |
| 96 | |
| 97 | GrTexture* GrResourceProvider::createTexture(const GrSurfaceDesc& desc, SkBudgeted budgeted, |
| 98 | const void* srcData, size_t rowBytes, uint32_t flags) { |
| 99 | GrMipLevel tempTexels; |
| 100 | GrMipLevel* texels = nullptr; |
| 101 | int levelCount = 0; |
| 102 | if (srcData) { |
| 103 | tempTexels.fPixels = srcData; |
| 104 | tempTexels.fRowBytes = rowBytes; |
| 105 | texels = &tempTexels; |
| 106 | levelCount = 1; |
| 107 | } |
| 108 | return this->createMipMappedTexture(desc, budgeted, texels, levelCount, flags); |
| 109 | } |
| 110 | |
| 111 | GrTexture* GrResourceProvider::createApproxTexture(const GrSurfaceDesc& desc, uint32_t flags) { |
| 112 | ASSERT_SINGLE_OWNER |
| 113 | SkASSERT(0 == flags || kNoPendingIO_Flag == flags); |
| 114 | return this->internalCreateApproxTexture(desc, flags); |
| 115 | } |
| 116 | |
| 117 | GrTexture* GrResourceProvider::internalCreateApproxTexture(const GrSurfaceDesc& desc, |
| 118 | uint32_t scratchFlags) { |
| 119 | ASSERT_SINGLE_OWNER |
| 120 | if (this->isAbandoned()) { |
| 121 | return nullptr; |
| 122 | } |
| 123 | // Currently we don't recycle compressed textures as scratch. |
| 124 | if (GrPixelConfigIsCompressed(desc.fConfig)) { |
| 125 | return nullptr; |
| 126 | } else { |
| 127 | return this->refScratchTexture(desc, scratchFlags); |
| 128 | } |
| 129 | } |
| 130 | |
| 131 | GrTexture* GrResourceProvider::refScratchTexture(const GrSurfaceDesc& inDesc, |
| 132 | uint32_t flags) { |
| 133 | ASSERT_SINGLE_OWNER |
| 134 | SkASSERT(!this->isAbandoned()); |
| 135 | SkASSERT(!GrPixelConfigIsCompressed(inDesc.fConfig)); |
| 136 | |
| 137 | SkTCopyOnFirstWrite<GrSurfaceDesc> desc(inDesc); |
| 138 | |
| 139 | if (fGpu->caps()->reuseScratchTextures() || (desc->fFlags & kRenderTarget_GrSurfaceFlag)) { |
| 140 | if (!(kExact_Flag & flags)) { |
| 141 | // bin by pow2 with a reasonable min |
| 142 | GrSurfaceDesc* wdesc = desc.writable(); |
| 143 | wdesc->fWidth = SkTMax(kMinScratchTextureSize, GrNextPow2(desc->fWidth)); |
| 144 | wdesc->fHeight = SkTMax(kMinScratchTextureSize, GrNextPow2(desc->fHeight)); |
| 145 | } |
| 146 | |
| 147 | GrScratchKey key; |
| 148 | GrTexturePriv::ComputeScratchKey(*desc, &key); |
| 149 | uint32_t scratchFlags = 0; |
| 150 | if (kNoPendingIO_Flag & flags) { |
| 151 | scratchFlags = GrResourceCache::kRequireNoPendingIO_ScratchFlag; |
| 152 | } else if (!(desc->fFlags & kRenderTarget_GrSurfaceFlag)) { |
| 153 | // If it is not a render target then it will most likely be populated by |
| 154 | // writePixels() which will trigger a flush if the texture has pending IO. |
| 155 | scratchFlags = GrResourceCache::kPreferNoPendingIO_ScratchFlag; |
| 156 | } |
| 157 | GrGpuResource* resource = fCache->findAndRefScratchResource(key, |
| 158 | GrSurface::WorstCaseSize(*desc), |
| 159 | scratchFlags); |
| 160 | if (resource) { |
| 161 | GrSurface* surface = static_cast<GrSurface*>(resource); |
| 162 | GrRenderTarget* rt = surface->asRenderTarget(); |
| 163 | if (rt && fGpu->caps()->discardRenderTargetSupport()) { |
| 164 | rt->discard(); |
| 165 | } |
| 166 | return surface->asTexture(); |
| 167 | } |
| 168 | } |
| 169 | |
| 170 | if (!(kNoCreate_Flag & flags)) { |
| 171 | return fGpu->createTexture(*desc, SkBudgeted::kYes); |
| 172 | } |
| 173 | |
| 174 | return nullptr; |
| 175 | } |
| 176 | |
| 177 | sk_sp<GrTexture> GrResourceProvider::wrapBackendTexture(const GrBackendTextureDesc& desc, |
| 178 | GrWrapOwnership ownership) { |
| 179 | ASSERT_SINGLE_OWNER |
| 180 | if (this->isAbandoned()) { |
| 181 | return nullptr; |
| 182 | } |
| 183 | return fGpu->wrapBackendTexture(desc, ownership); |
| 184 | } |
| 185 | |
| 186 | sk_sp<GrRenderTarget> GrResourceProvider::wrapBackendRenderTarget( |
| 187 | const GrBackendRenderTargetDesc& desc) |
| 188 | { |
| 189 | ASSERT_SINGLE_OWNER |
Brian Osman | 0b791f5 | 2017-03-10 08:30:22 -0500 | [diff] [blame] | 190 | return this->isAbandoned() ? nullptr : fGpu->wrapBackendRenderTarget(desc); |
Brian Osman | 32342f0 | 2017-03-04 08:12:46 -0500 | [diff] [blame] | 191 | } |
| 192 | |
| 193 | void GrResourceProvider::assignUniqueKeyToResource(const GrUniqueKey& key, |
| 194 | GrGpuResource* resource) { |
| 195 | ASSERT_SINGLE_OWNER |
| 196 | if (this->isAbandoned() || !resource) { |
| 197 | return; |
| 198 | } |
| 199 | resource->resourcePriv().setUniqueKey(key); |
| 200 | } |
| 201 | |
| 202 | GrGpuResource* GrResourceProvider::findAndRefResourceByUniqueKey(const GrUniqueKey& key) { |
| 203 | ASSERT_SINGLE_OWNER |
| 204 | return this->isAbandoned() ? nullptr : fCache->findAndRefUniqueResource(key); |
| 205 | } |
| 206 | |
| 207 | GrTexture* GrResourceProvider::findAndRefTextureByUniqueKey(const GrUniqueKey& key) { |
| 208 | ASSERT_SINGLE_OWNER |
| 209 | GrGpuResource* resource = this->findAndRefResourceByUniqueKey(key); |
| 210 | if (resource) { |
| 211 | GrTexture* texture = static_cast<GrSurface*>(resource)->asTexture(); |
| 212 | SkASSERT(texture); |
| 213 | return texture; |
| 214 | } |
| 215 | return NULL; |
| 216 | } |
| 217 | |
Robert Phillips | d374948 | 2017-03-14 09:17:43 -0400 | [diff] [blame] | 218 | // MDB TODO (caching): this side-steps the issue of texture proxies with unique IDs |
| 219 | void GrResourceProvider::assignUniqueKeyToProxy(const GrUniqueKey& key, GrTextureProxy* proxy) { |
| 220 | ASSERT_SINGLE_OWNER |
| 221 | SkASSERT(key.isValid()); |
| 222 | if (this->isAbandoned() || !proxy) { |
| 223 | return; |
| 224 | } |
| 225 | |
| 226 | GrTexture* texture = proxy->instantiate(this); |
| 227 | if (!texture) { |
| 228 | return; |
| 229 | } |
| 230 | |
| 231 | this->assignUniqueKeyToResource(key, texture); |
| 232 | } |
| 233 | |
| 234 | // MDB TODO (caching): this side-steps the issue of texture proxies with unique IDs |
| 235 | sk_sp<GrTextureProxy> GrResourceProvider::findProxyByUniqueKey(const GrUniqueKey& key) { |
| 236 | ASSERT_SINGLE_OWNER |
| 237 | |
| 238 | sk_sp<GrTexture> texture(this->findAndRefTextureByUniqueKey(key)); |
| 239 | if (!texture) { |
| 240 | return nullptr; |
| 241 | } |
| 242 | |
| 243 | return GrSurfaceProxy::MakeWrapped(std::move(texture)); |
| 244 | } |
| 245 | |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 246 | const GrBuffer* GrResourceProvider::createInstancedIndexBuffer(const uint16_t* pattern, |
| 247 | int patternSize, |
| 248 | int reps, |
| 249 | int vertCount, |
| 250 | const GrUniqueKey& key) { |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 251 | size_t bufferSize = patternSize * reps * sizeof(uint16_t); |
| 252 | |
Brian Salomon | 09d994e | 2016-12-21 11:14:46 -0500 | [diff] [blame] | 253 | // This is typically used in GrMeshDrawOps, so we assume kNoPendingIO. |
cdalton | e2e71c2 | 2016-04-07 18:13:29 -0700 | [diff] [blame] | 254 | GrBuffer* buffer = this->createBuffer(bufferSize, kIndex_GrBufferType, kStatic_GrAccessPattern, |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 255 | kNoPendingIO_Flag); |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 256 | if (!buffer) { |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 257 | return nullptr; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 258 | } |
| 259 | uint16_t* data = (uint16_t*) buffer->map(); |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 260 | bool useTempData = (nullptr == data); |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 261 | if (useTempData) { |
halcanary | 385fe4d | 2015-08-26 13:07:48 -0700 | [diff] [blame] | 262 | data = new uint16_t[reps * patternSize]; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 263 | } |
| 264 | for (int i = 0; i < reps; ++i) { |
| 265 | int baseIdx = i * patternSize; |
| 266 | uint16_t baseVert = (uint16_t)(i * vertCount); |
| 267 | for (int j = 0; j < patternSize; ++j) { |
| 268 | data[baseIdx+j] = baseVert + pattern[j]; |
| 269 | } |
| 270 | } |
| 271 | if (useTempData) { |
| 272 | if (!buffer->updateData(data, bufferSize)) { |
| 273 | buffer->unref(); |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 274 | return nullptr; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 275 | } |
halcanary | 385fe4d | 2015-08-26 13:07:48 -0700 | [diff] [blame] | 276 | delete[] data; |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 277 | } else { |
| 278 | buffer->unmap(); |
| 279 | } |
| 280 | this->assignUniqueKeyToResource(key, buffer); |
| 281 | return buffer; |
| 282 | } |
| 283 | |
cdalton | 397536c | 2016-03-25 12:15:03 -0700 | [diff] [blame] | 284 | const GrBuffer* GrResourceProvider::createQuadIndexBuffer() { |
bsalomon | ed0bcad | 2015-05-04 10:36:42 -0700 | [diff] [blame] | 285 | static const int kMaxQuads = 1 << 12; // max possible: (1 << 14) - 1; |
| 286 | GR_STATIC_ASSERT(4 * kMaxQuads <= 65535); |
| 287 | static const uint16_t kPattern[] = { 0, 1, 2, 0, 2, 3 }; |
| 288 | |
| 289 | return this->createInstancedIndexBuffer(kPattern, 6, kMaxQuads, 4, fQuadIndexBufferKey); |
| 290 | } |
| 291 | |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 292 | GrPath* GrResourceProvider::createPath(const SkPath& path, const GrStyle& style) { |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 293 | SkASSERT(this->gpu()->pathRendering()); |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 294 | return this->gpu()->pathRendering()->createPath(path, style); |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 295 | } |
| 296 | |
| 297 | GrPathRange* GrResourceProvider::createPathRange(GrPathRange::PathGenerator* gen, |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 298 | const GrStyle& style) { |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 299 | SkASSERT(this->gpu()->pathRendering()); |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 300 | return this->gpu()->pathRendering()->createPathRange(gen, style); |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 301 | } |
| 302 | |
reed | a9322c2 | 2016-04-12 06:47:05 -0700 | [diff] [blame] | 303 | GrPathRange* GrResourceProvider::createGlyphs(const SkTypeface* tf, |
| 304 | const SkScalerContextEffects& effects, |
| 305 | const SkDescriptor* desc, |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 306 | const GrStyle& style) { |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 307 | |
| 308 | SkASSERT(this->gpu()->pathRendering()); |
bsalomon | 6663acf | 2016-05-10 09:14:17 -0700 | [diff] [blame] | 309 | return this->gpu()->pathRendering()->createGlyphs(tf, effects, desc, style); |
bsalomon | 706f08f | 2015-05-22 07:35:58 -0700 | [diff] [blame] | 310 | } |
| 311 | |
cdalton | e2e71c2 | 2016-04-07 18:13:29 -0700 | [diff] [blame] | 312 | GrBuffer* GrResourceProvider::createBuffer(size_t size, GrBufferType intendedType, |
cdalton | 1bf3e71 | 2016-04-19 10:00:02 -0700 | [diff] [blame] | 313 | GrAccessPattern accessPattern, uint32_t flags, |
| 314 | const void* data) { |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 315 | if (this->isAbandoned()) { |
halcanary | 96fcdcc | 2015-08-27 07:41:13 -0700 | [diff] [blame] | 316 | return nullptr; |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 317 | } |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 318 | if (kDynamic_GrAccessPattern != accessPattern) { |
| 319 | return this->gpu()->createBuffer(size, intendedType, accessPattern, data); |
| 320 | } |
csmartdalton | 485a120 | 2016-07-13 10:16:32 -0700 | [diff] [blame] | 321 | if (!(flags & kRequireGpuMemory_Flag) && |
| 322 | this->gpu()->caps()->preferClientSideDynamicBuffers() && |
| 323 | GrBufferTypeIsVertexOrIndex(intendedType) && |
| 324 | kDynamic_GrAccessPattern == accessPattern) { |
| 325 | return GrBuffer::CreateCPUBacked(this->gpu(), size, intendedType, data); |
| 326 | } |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 327 | |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 328 | // bin by pow2 with a reasonable min |
Robert Phillips | 9e38047 | 2016-10-28 12:15:03 -0400 | [diff] [blame] | 329 | static const size_t MIN_SIZE = 1 << 12; |
| 330 | size_t allocSize = SkTMax(MIN_SIZE, GrNextSizePow2(size)); |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 331 | |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 332 | GrScratchKey key; |
csmartdalton | 485a120 | 2016-07-13 10:16:32 -0700 | [diff] [blame] | 333 | GrBuffer::ComputeScratchKeyForDynamicVBO(allocSize, intendedType, &key); |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 334 | uint32_t scratchFlags = 0; |
| 335 | if (flags & kNoPendingIO_Flag) { |
| 336 | scratchFlags = GrResourceCache::kRequireNoPendingIO_ScratchFlag; |
| 337 | } else { |
| 338 | scratchFlags = GrResourceCache::kPreferNoPendingIO_ScratchFlag; |
| 339 | } |
| 340 | GrBuffer* buffer = static_cast<GrBuffer*>( |
| 341 | this->cache()->findAndRefScratchResource(key, allocSize, scratchFlags)); |
| 342 | if (!buffer) { |
| 343 | buffer = this->gpu()->createBuffer(allocSize, intendedType, kDynamic_GrAccessPattern); |
| 344 | if (!buffer) { |
| 345 | return nullptr; |
robertphillips | 1b8e1b5 | 2015-06-24 06:54:10 -0700 | [diff] [blame] | 346 | } |
| 347 | } |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 348 | if (data) { |
| 349 | buffer->updateData(data, size); |
| 350 | } |
csmartdalton | 485a120 | 2016-07-13 10:16:32 -0700 | [diff] [blame] | 351 | SkASSERT(!buffer->isCPUBacked()); // We should only cache real VBOs. |
cdalton | d37fe76 | 2016-04-21 07:41:50 -0700 | [diff] [blame] | 352 | return buffer; |
jvanverth | 17aa047 | 2016-01-05 10:41:27 -0800 | [diff] [blame] | 353 | } |
| 354 | |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 355 | GrStencilAttachment* GrResourceProvider::attachStencilAttachment(GrRenderTarget* rt) { |
| 356 | SkASSERT(rt); |
| 357 | if (rt->renderTargetPriv().getStencilAttachment()) { |
| 358 | return rt->renderTargetPriv().getStencilAttachment(); |
| 359 | } |
| 360 | |
| 361 | if (!rt->wasDestroyed() && rt->canAttemptStencilAttachment()) { |
| 362 | GrUniqueKey sbKey; |
| 363 | |
| 364 | int width = rt->width(); |
| 365 | int height = rt->height(); |
| 366 | #if 0 |
| 367 | if (this->caps()->oversizedStencilSupport()) { |
| 368 | width = SkNextPow2(width); |
| 369 | height = SkNextPow2(height); |
| 370 | } |
| 371 | #endif |
| 372 | bool newStencil = false; |
| 373 | GrStencilAttachment::ComputeSharedStencilAttachmentKey(width, height, |
| 374 | rt->numStencilSamples(), &sbKey); |
| 375 | GrStencilAttachment* stencil = static_cast<GrStencilAttachment*>( |
| 376 | this->findAndRefResourceByUniqueKey(sbKey)); |
| 377 | if (!stencil) { |
| 378 | // Need to try and create a new stencil |
| 379 | stencil = this->gpu()->createStencilAttachmentForRenderTarget(rt, width, height); |
| 380 | if (stencil) { |
Robert Phillips | f7cf81a | 2017-03-02 10:23:52 -0500 | [diff] [blame] | 381 | this->assignUniqueKeyToResource(sbKey, stencil); |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 382 | newStencil = true; |
| 383 | } |
| 384 | } |
| 385 | if (rt->renderTargetPriv().attachStencilAttachment(stencil)) { |
| 386 | if (newStencil) { |
| 387 | // Right now we're clearing the stencil attachment here after it is |
bsalomon | 7ea33f5 | 2015-11-22 14:51:00 -0800 | [diff] [blame] | 388 | // attached to a RT for the first time. When we start matching |
egdaniel | ec00d94 | 2015-09-14 12:56:10 -0700 | [diff] [blame] | 389 | // stencil buffers with smaller color targets this will no longer |
| 390 | // be correct because it won't be guaranteed to clear the entire |
| 391 | // sb. |
| 392 | // We used to clear down in the GL subclass using a special purpose |
| 393 | // FBO. But iOS doesn't allow a stencil-only FBO. It reports unsupported |
| 394 | // FBO status. |
| 395 | this->gpu()->clearStencil(rt); |
| 396 | } |
| 397 | } |
| 398 | } |
| 399 | return rt->renderTargetPriv().getStencilAttachment(); |
| 400 | } |
| 401 | |
bungeman | 6bd5284 | 2016-10-27 09:30:08 -0700 | [diff] [blame] | 402 | sk_sp<GrRenderTarget> GrResourceProvider::wrapBackendTextureAsRenderTarget( |
| 403 | const GrBackendTextureDesc& desc) |
| 404 | { |
ericrk | f7b8b8a | 2016-02-24 14:49:51 -0800 | [diff] [blame] | 405 | if (this->isAbandoned()) { |
| 406 | return nullptr; |
| 407 | } |
kkinnunen | 49c4c22 | 2016-04-01 04:50:37 -0700 | [diff] [blame] | 408 | return this->gpu()->wrapBackendTextureAsRenderTarget(desc); |
ericrk | f7b8b8a | 2016-02-24 14:49:51 -0800 | [diff] [blame] | 409 | } |
Greg Daniel | d85f97d | 2017-03-07 13:37:21 -0500 | [diff] [blame] | 410 | |
| 411 | sk_sp<GrSemaphore> SK_WARN_UNUSED_RESULT GrResourceProvider::makeSemaphore() { |
| 412 | return fGpu->makeSemaphore(); |
| 413 | } |
| 414 | |
| 415 | void GrResourceProvider::takeOwnershipOfSemaphore(sk_sp<GrSemaphore> semaphore) { |
| 416 | semaphore->resetGpu(fGpu); |
| 417 | } |
| 418 | |
| 419 | void GrResourceProvider::releaseOwnershipOfSemaphore(sk_sp<GrSemaphore> semaphore) { |
| 420 | semaphore->resetGpu(nullptr); |
| 421 | } |
| 422 | |
| 423 | |