blob: fa88db1ba974c8d61d747b3b8a1be5f806fb25d3 [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Brian Osmanbd659552018-09-11 10:03:19 -04008#include "SkImage_Lazy.h"
Brian Osmandf7e0752017-04-26 16:20:28 -04009
10#include "SkBitmap.h"
11#include "SkBitmapCache.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040012#include "SkData.h"
13#include "SkImageGenerator.h"
reed85d91782015-09-10 14:33:38 -070014#include "SkImagePriv.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040015#include "SkNextID.h"
reed85d91782015-09-10 14:33:38 -070016
Brian Osmandf7e0752017-04-26 16:20:28 -040017#if SK_SUPPORT_GPU
18#include "GrContext.h"
19#include "GrContextPriv.h"
20#include "GrGpuResourcePriv.h"
21#include "GrImageTextureMaker.h"
22#include "GrResourceKey.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050023#include "GrProxyProvider.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040024#include "GrSamplerState.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040025#include "GrYUVProvider.h"
26#include "SkGr.h"
27#endif
reed85d91782015-09-10 14:33:38 -070028
Brian Osmandf7e0752017-04-26 16:20:28 -040029// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
30class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
31public:
32 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
33 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
34 }
35
Matt Sarettb2004f72017-05-18 09:26:50 -040036 // This is thread safe. It is a const field set in the constructor.
37 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
38
Brian Osmandf7e0752017-04-26 16:20:28 -040039private:
40 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
41 : fGenerator(std::move(gen)) {
42 SkASSERT(fGenerator);
43 }
44
45 friend class ScopedGenerator;
46 friend class SkImage_Lazy;
47
48 std::unique_ptr<SkImageGenerator> fGenerator;
49 SkMutex fMutex;
50};
51
reed85d91782015-09-10 14:33:38 -070052///////////////////////////////////////////////////////////////////////////////
53
Christopher Cameron77e96662017-07-08 01:47:47 -070054SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
55 sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -040056 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -040057 if (!fSharedGenerator) {
58 return;
59 }
60
61 // The following generator accessors are safe without acquiring the mutex (const getters).
62 // TODO: refactor to use a ScopedGenerator instead, for clarity.
63 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
64 if (info.isEmpty()) {
65 fSharedGenerator.reset();
66 return;
67 }
68
69 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
70 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
71 if (subset) {
72 if (!bounds.contains(*subset)) {
73 fSharedGenerator.reset();
74 return;
75 }
76 if (*subset != bounds) {
77 // we need a different uniqueID since we really are a subset of the raw generator
78 fUniqueID = SkNextID::ImageID();
79 }
80 } else {
81 subset = &bounds;
82 }
83
84 fInfo = info.makeWH(subset->width(), subset->height());
85 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Christopher Cameron77e96662017-07-08 01:47:47 -070086 if (colorSpace) {
87 fInfo = fInfo.makeColorSpace(colorSpace);
88 fUniqueID = SkNextID::ImageID();
89 }
Brian Osmandf7e0752017-04-26 16:20:28 -040090}
91
92///////////////////////////////////////////////////////////////////////////////
93
94// Helper for exclusive access to a shared generator.
95class SkImage_Lazy::ScopedGenerator {
96public:
97 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
98 : fSharedGenerator(gen)
99 , fAutoAquire(gen->fMutex) {}
100
101 SkImageGenerator* operator->() const {
102 fSharedGenerator->fMutex.assertHeld();
103 return fSharedGenerator->fGenerator.get();
104 }
105
106 operator SkImageGenerator*() const {
107 fSharedGenerator->fMutex.assertHeld();
108 return fSharedGenerator->fGenerator.get();
109 }
110
111private:
112 const sk_sp<SharedGenerator>& fSharedGenerator;
113 SkAutoExclusive fAutoAquire;
114};
115
116///////////////////////////////////////////////////////////////////////////////
117
118SkImage_Lazy::SkImage_Lazy(Validator* validator)
119 : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID)
120 , fSharedGenerator(std::move(validator->fSharedGenerator))
121 , fInfo(validator->fInfo)
122 , fOrigin(validator->fOrigin) {
123 SkASSERT(fSharedGenerator);
Brian Osmaneb7e5292018-08-08 14:32:06 -0400124 fUniqueID = validator->fUniqueID;
Brian Osmandf7e0752017-04-26 16:20:28 -0400125}
126
Brian Osmanbd659552018-09-11 10:03:19 -0400127SkImage_Lazy::~SkImage_Lazy() {
128#if SK_SUPPORT_GPU
129 for (int i = 0; i < fUniqueKeyInvalidatedMessages.count(); ++i) {
130 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(*fUniqueKeyInvalidatedMessages[i]);
131 }
132 fUniqueKeyInvalidatedMessages.deleteAll();
133#endif
134}
135
Brian Osmandf7e0752017-04-26 16:20:28 -0400136//////////////////////////////////////////////////////////////////////////////////////////////////
137
Brian Osmanc87cfb62018-07-11 09:08:46 -0400138static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400139 const int genW = gen->getInfo().width();
140 const int genH = gen->getInfo().height();
141 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
142 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
143 if (!srcR.contains(dstR)) {
144 return false;
145 }
146
147 // If they are requesting a subset, we have to have a temp allocation for full image, and
148 // then copy the subset into their allocation
149 SkBitmap full;
150 SkPixmap fullPM;
151 const SkPixmap* dstPM = &pmap;
152 if (srcR != dstR) {
153 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
154 return false;
155 }
156 if (!full.peekPixels(&fullPM)) {
157 return false;
158 }
159 dstPM = &fullPM;
160 }
161
Brian Osmanc87cfb62018-07-11 09:08:46 -0400162 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400163 return false;
164 }
165
166 if (srcR != dstR) {
167 if (!full.readPixels(pmap, originX, originY)) {
168 return false;
169 }
170 }
171 return true;
172}
173
Brian Osman00766bf2018-10-22 15:59:23 -0400174bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkImage::CachingHint chint) const {
175 auto check_output_bitmap = [bitmap]() {
176 SkASSERT(bitmap->isImmutable());
177 SkASSERT(bitmap->getPixels());
178 (void)bitmap;
179 };
180
181 auto desc = SkBitmapCacheDesc::Make(this);
182 if (SkBitmapCache::Find(desc, bitmap)) {
183 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400184 return true;
185 }
186
Brian Osmandf7e0752017-04-26 16:20:28 -0400187 if (SkImage::kAllow_CachingHint == chint) {
Brian Osman00766bf2018-10-22 15:59:23 -0400188 SkPixmap pmap;
189 SkBitmapCache::RecPtr cacheRec = SkBitmapCache::Alloc(desc, fInfo, &pmap);
190 if (!cacheRec ||
191 !generate_pixels(ScopedGenerator(fSharedGenerator), pmap,
192 fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400193 return false;
194 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400195 SkBitmapCache::Add(std::move(cacheRec), bitmap);
Mike Reed30301c42018-07-19 09:39:21 -0400196 this->notifyAddedToRasterCache();
Brian Osmandf7e0752017-04-26 16:20:28 -0400197 } else {
Brian Osman00766bf2018-10-22 15:59:23 -0400198 if (!bitmap->tryAllocPixels(fInfo) ||
199 !generate_pixels(ScopedGenerator(fSharedGenerator), bitmap->pixmap(),
200 fOrigin.x(), fOrigin.y())) {
201 return false;
202 }
Brian Osmana0dc3d22018-10-09 15:17:38 -0400203 bitmap->setImmutable();
Brian Osmandf7e0752017-04-26 16:20:28 -0400204 }
205
Brian Osman00766bf2018-10-22 15:59:23 -0400206 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400207 return true;
208}
209
210//////////////////////////////////////////////////////////////////////////////////////////////////
211
Brian Osmanf1b43822017-04-20 13:43:23 -0400212bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
213 int srcX, int srcY, CachingHint chint) const {
reed85d91782015-09-10 14:33:38 -0700214 SkBitmap bm;
Brian Osmane50cdf02018-10-19 13:02:14 -0400215 if (this->getROPixels(&bm, chint)) {
reed85d91782015-09-10 14:33:38 -0700216 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
217 }
218 return false;
219}
220
Ben Wagnerbdf54332018-05-15 14:12:14 -0400221sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
222 ScopedGenerator generator(fSharedGenerator);
223 return generator->refEncodedData();
224}
reed85d91782015-09-10 14:33:38 -0700225
Brian Osman5bbd0762017-05-08 11:07:42 -0400226bool SkImage_Lazy::onIsValid(GrContext* context) const {
227 ScopedGenerator generator(fSharedGenerator);
228 return generator->isValid(context);
229}
230
Brian Osmandf7e0752017-04-26 16:20:28 -0400231///////////////////////////////////////////////////////////////////////////////////////////////////
232
Robert Phillipsb726d582017-03-09 16:36:32 -0500233#if SK_SUPPORT_GPU
Brian Osmanf1b43822017-04-20 13:43:23 -0400234sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400235 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400236 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400237 if (!context) {
238 return nullptr;
239 }
240
241 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
Brian Osman6064e1c2018-10-19 14:27:54 -0400242 return textureMaker.refTextureProxyForParams(params, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500243}
244#endif
245
Brian Osmanf1b43822017-04-20 13:43:23 -0400246sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400247 SkASSERT(fInfo.bounds().contains(subset));
248 SkASSERT(fInfo.bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700249
Brian Osmandf7e0752017-04-26 16:20:28 -0400250 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700251 Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400252 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700253}
254
Brian Osman15f0f292018-10-01 14:14:46 -0400255sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target) const {
Christopher Camerond4b67872017-07-13 15:18:08 -0700256 SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex);
Brian Osmanb4ae4992018-10-18 11:16:14 -0400257 if (fOnMakeColorSpaceTarget &&
Christopher Camerond4b67872017-07-13 15:18:08 -0700258 SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) {
259 return fOnMakeColorSpaceResult;
260 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700261 const SkIRect generatorSubset =
262 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
263 Validator validator(fSharedGenerator, &generatorSubset, target);
Christopher Camerond4b67872017-07-13 15:18:08 -0700264 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
265 if (result) {
266 fOnMakeColorSpaceTarget = target;
267 fOnMakeColorSpaceResult = result;
268 }
269 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400270}
271
Mike Reed185130c2017-02-15 15:14:16 -0500272sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
273 const SkIRect* subset) {
Christopher Cameron77e96662017-07-08 01:47:47 -0700274 SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700275
Brian Osmanf1b43822017-04-20 13:43:23 -0400276 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700277}
Brian Osmandf7e0752017-04-26 16:20:28 -0400278
279//////////////////////////////////////////////////////////////////////////////////////////////////
280
Brian Osmandf7e0752017-04-26 16:20:28 -0400281#if SK_SUPPORT_GPU
282
Brian Osmanbd659552018-09-11 10:03:19 -0400283void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey,
284 GrUniqueKey* cacheKey) const {
Brian Osman10494e32018-09-10 12:45:18 -0400285 // TODO: Take dstColorSpace, include hash in key
286 SkASSERT(!cacheKey->isValid());
287 if (origKey.isValid()) {
288 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
289 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image");
290 }
291}
292
Brian Osmandf7e0752017-04-26 16:20:28 -0400293class Generator_GrYUVProvider : public GrYUVProvider {
Brian Osmandf7e0752017-04-26 16:20:28 -0400294public:
295 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
296
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400297private:
298 uint32_t onGetID() const override { return fGen->uniqueID(); }
Jim Van Verth8f11e432018-10-18 14:36:59 -0400299 bool onQueryYUVA8(SkYUVSizeInfo* sizeInfo,
300 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
301 SkYUVColorSpace* colorSpace) const override {
302 return fGen->queryYUVA8(sizeInfo, yuvaIndices, colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -0400303 }
Jim Van Verth8f11e432018-10-18 14:36:59 -0400304 bool onGetYUVA8Planes(const SkYUVSizeInfo& sizeInfo,
305 const SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
306 void* planes[]) override {
307 return fGen->getYUVA8Planes(sizeInfo, yuvaIndices, planes);
Brian Osmandf7e0752017-04-26 16:20:28 -0400308 }
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400309
310 SkImageGenerator* fGen;
311
312 typedef GrYUVProvider INHERITED;
Brian Osmandf7e0752017-04-26 16:20:28 -0400313};
314
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500315static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000316 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
317 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400318 if (key.isValid()) {
Greg Danielf6f7b672018-02-15 13:06:26 -0500319 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
320 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400321 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
322 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500323 // If we had an originalProxy with a valid key, that means there already is a proxy in
324 // the cache which matches the key, but it does not have mip levels and we require them.
325 // Thus we must remove the unique key from that proxy.
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500326 proxyProvider->removeUniqueKeyFromProxy(key, originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000327 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500328 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400329 }
330}
331
Jim Van Verth8f11e432018-10-18 14:36:59 -0400332sk_sp<SkCachedData> SkImage_Lazy::getPlanes(SkYUVSizeInfo* yuvaSizeInfo,
333 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400334 SkYUVColorSpace* yuvColorSpace,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400335 const void* planes[SkYUVSizeInfo::kMaxCount]) {
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400336 ScopedGenerator generator(fSharedGenerator);
337 Generator_GrYUVProvider provider(generator);
338
Jim Van Verth8f11e432018-10-18 14:36:59 -0400339 sk_sp<SkCachedData> data = provider.getPlanes(yuvaSizeInfo, yuvaIndices, yuvColorSpace, planes);
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400340 if (!data) {
341 return nullptr;
342 }
343
344 return data;
345}
346
347
Brian Osmandf7e0752017-04-26 16:20:28 -0400348/*
349 * We have 4 ways to try to return a texture (in sorted order)
350 *
351 * 1. Check the cache for a pre-existing one
352 * 2. Ask the generator to natively create one
353 * 3. Ask the generator to return YUV planes, which the GPU can convert
354 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
355 */
Brian Osmanbd659552018-09-11 10:03:19 -0400356sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(
357 GrContext* ctx,
358 const GrUniqueKey& origKey,
359 SkImage::CachingHint chint,
360 bool willBeMipped,
Brian Osmanbd659552018-09-11 10:03:19 -0400361 GrTextureMaker::AllowedTexGenType genType) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400362 // Values representing the various texture lock paths we can take. Used for logging the path
363 // taken to a histogram.
364 enum LockTexturePath {
365 kFailure_LockTexturePath,
366 kPreExisting_LockTexturePath,
367 kNative_LockTexturePath,
368 kCompressed_LockTexturePath, // Deprecated
369 kYUV_LockTexturePath,
370 kRGBA_LockTexturePath,
371 };
372
373 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
374
Brian Osman10494e32018-09-10 12:45:18 -0400375 // Build our texture key.
Greg Daniel25ceb1c2018-09-27 17:01:41 -0400376 // Even though some proxies created here may have a specific origin and use that origin, we do
377 // not include that in the key. Since SkImages are meant to be immutable, a given SkImage will
378 // always have an associated proxy that is always one origin or the other. It never can change
379 // origins. Thus we don't need to include that info in the key iteself.
Brian Osman10494e32018-09-10 12:45:18 -0400380 GrUniqueKey key;
381 this->makeCacheKeyFromOrigKey(origKey, &key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400382
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500383 GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000384 sk_sp<GrTextureProxy> proxy;
385
Brian Osmandf7e0752017-04-26 16:20:28 -0400386 // 1. Check the cache for a pre-existing one
387 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500388 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000389 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400390 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
391 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400392 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000393 return proxy;
394 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400395 }
396 }
397
Brian Osmandf7e0752017-04-26 16:20:28 -0400398 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000399 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400400 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400401 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
402 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
403 return nullptr;
404 }
Brian Osmanb3f38302018-09-07 15:24:44 -0400405 if ((proxy = generator->generateTexture(ctx, fInfo, fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400406 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
407 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500408 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400409 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Daniel2ad08202018-09-07 09:13:36 -0400410 *fUniqueKeyInvalidatedMessages.append() =
411 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Greg Danielfc5060d2017-10-04 18:36:15 +0000412 return proxy;
413 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400414 }
415 }
416
Greg Daniel3e70fa32017-10-05 16:27:06 -0400417 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
418 // the texture we fall through here and have the CPU generate the mip maps for us.
419 if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) {
Brian Osmanb3f38302018-09-07 15:24:44 -0400420 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(fInfo);
Brian Osmandf7e0752017-04-26 16:20:28 -0400421 ScopedGenerator generator(fSharedGenerator);
422 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700423
424 // The pixels in the texture will be in the generator's color space. If onMakeColorSpace
425 // has been called then this will not match this image's color space. To correct this, apply
426 // a color space conversion from the generator's color space to this image's color space.
Brian Osman861ea5b2018-06-14 09:14:03 -0400427 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
428 SkColorSpace* thisColorSpace = fInfo.colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700429
Mike Kleinae5e8642018-10-03 17:00:41 -0400430 // TODO: Update to create the mipped surface in the YUV generator and draw the base
431 // layer directly into the mipped surface.
432 proxy = provider.refAsTextureProxy(ctx, desc, generatorColorSpace, thisColorSpace);
433 if (proxy) {
434 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
435 kLockTexturePathCount);
436 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
437 *fUniqueKeyInvalidatedMessages.append() =
438 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
439 return proxy;
Brian Osmandf7e0752017-04-26 16:20:28 -0400440 }
441 }
442
443 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
444 SkBitmap bitmap;
Brian Osman00766bf2018-10-22 15:59:23 -0400445 if (!proxy && this->getROPixels(&bitmap, chint)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400446 if (willBeMipped) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400447 proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400448 }
449 if (!proxy) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400450 proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400451 }
Greg Daniele252f082017-10-23 16:05:23 -0400452 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400453 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
454 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500455 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniel2ad08202018-09-07 09:13:36 -0400456 *fUniqueKeyInvalidatedMessages.append() =
457 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Brian Osmandf7e0752017-04-26 16:20:28 -0400458 return proxy;
459 }
460 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000461
462 if (proxy) {
463 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
464 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
465 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
466 // generate the rest of the mips.
467 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400468 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniel2ad08202018-09-07 09:13:36 -0400469 *fUniqueKeyInvalidatedMessages.append() =
470 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Greg Daniele1da1d92017-10-06 15:59:27 -0400471 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500472 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000473 return mippedProxy;
474 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400475 // We failed to make a mipped proxy with the base copied into it. This could have
476 // been from failure to make the proxy or failure to do the copy. Thus we will fall
477 // back to just using the non mipped proxy; See skbug.com/7094.
478 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000479 }
480
Brian Osmandf7e0752017-04-26 16:20:28 -0400481 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
482 kLockTexturePathCount);
483 return nullptr;
484}
485
486///////////////////////////////////////////////////////////////////////////////////////////////////
487
488#endif