blob: a1557e1aa526d15be2e25951720599d7694c3e8e [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Brian Osmanbd659552018-09-11 10:03:19 -04008#include "SkImage_Lazy.h"
Brian Osmandf7e0752017-04-26 16:20:28 -04009
10#include "SkBitmap.h"
11#include "SkBitmapCache.h"
Brian Osmanb70fd912018-10-22 16:10:44 -040012#include "SkCachedData.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040013#include "SkData.h"
14#include "SkImageGenerator.h"
reed85d91782015-09-10 14:33:38 -070015#include "SkImagePriv.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040016#include "SkNextID.h"
reed85d91782015-09-10 14:33:38 -070017
Brian Osmandf7e0752017-04-26 16:20:28 -040018#if SK_SUPPORT_GPU
19#include "GrContext.h"
20#include "GrContextPriv.h"
21#include "GrGpuResourcePriv.h"
22#include "GrImageTextureMaker.h"
23#include "GrResourceKey.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050024#include "GrProxyProvider.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040025#include "GrSamplerState.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040026#include "GrYUVProvider.h"
27#include "SkGr.h"
28#endif
reed85d91782015-09-10 14:33:38 -070029
Brian Osmandf7e0752017-04-26 16:20:28 -040030// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
Mike Klein408ef212018-10-30 15:23:00 +000031class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
Brian Osmandf7e0752017-04-26 16:20:28 -040032public:
33 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
34 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
35 }
36
Matt Sarettb2004f72017-05-18 09:26:50 -040037 // This is thread safe. It is a const field set in the constructor.
38 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
39
Brian Osmandf7e0752017-04-26 16:20:28 -040040private:
41 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
42 : fGenerator(std::move(gen)) {
43 SkASSERT(fGenerator);
44 }
45
46 friend class ScopedGenerator;
47 friend class SkImage_Lazy;
48
49 std::unique_ptr<SkImageGenerator> fGenerator;
50 SkMutex fMutex;
51};
52
reed85d91782015-09-10 14:33:38 -070053///////////////////////////////////////////////////////////////////////////////
54
Christopher Cameron77e96662017-07-08 01:47:47 -070055SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
56 sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -040057 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -040058 if (!fSharedGenerator) {
59 return;
60 }
61
62 // The following generator accessors are safe without acquiring the mutex (const getters).
63 // TODO: refactor to use a ScopedGenerator instead, for clarity.
64 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
65 if (info.isEmpty()) {
66 fSharedGenerator.reset();
67 return;
68 }
69
70 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
71 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
72 if (subset) {
73 if (!bounds.contains(*subset)) {
74 fSharedGenerator.reset();
75 return;
76 }
77 if (*subset != bounds) {
78 // we need a different uniqueID since we really are a subset of the raw generator
79 fUniqueID = SkNextID::ImageID();
80 }
81 } else {
82 subset = &bounds;
83 }
84
85 fInfo = info.makeWH(subset->width(), subset->height());
86 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Christopher Cameron77e96662017-07-08 01:47:47 -070087 if (colorSpace) {
88 fInfo = fInfo.makeColorSpace(colorSpace);
89 fUniqueID = SkNextID::ImageID();
90 }
Brian Osmandf7e0752017-04-26 16:20:28 -040091}
92
93///////////////////////////////////////////////////////////////////////////////
94
95// Helper for exclusive access to a shared generator.
96class SkImage_Lazy::ScopedGenerator {
97public:
98 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
99 : fSharedGenerator(gen)
100 , fAutoAquire(gen->fMutex) {}
101
102 SkImageGenerator* operator->() const {
103 fSharedGenerator->fMutex.assertHeld();
104 return fSharedGenerator->fGenerator.get();
105 }
106
107 operator SkImageGenerator*() const {
108 fSharedGenerator->fMutex.assertHeld();
109 return fSharedGenerator->fGenerator.get();
110 }
111
112private:
113 const sk_sp<SharedGenerator>& fSharedGenerator;
114 SkAutoExclusive fAutoAquire;
115};
116
117///////////////////////////////////////////////////////////////////////////////
118
119SkImage_Lazy::SkImage_Lazy(Validator* validator)
120 : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID)
121 , fSharedGenerator(std::move(validator->fSharedGenerator))
122 , fInfo(validator->fInfo)
123 , fOrigin(validator->fOrigin) {
124 SkASSERT(fSharedGenerator);
Brian Osmaneb7e5292018-08-08 14:32:06 -0400125 fUniqueID = validator->fUniqueID;
Brian Osmandf7e0752017-04-26 16:20:28 -0400126}
127
Brian Osmanbd659552018-09-11 10:03:19 -0400128SkImage_Lazy::~SkImage_Lazy() {
129#if SK_SUPPORT_GPU
130 for (int i = 0; i < fUniqueKeyInvalidatedMessages.count(); ++i) {
131 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(*fUniqueKeyInvalidatedMessages[i]);
132 }
133 fUniqueKeyInvalidatedMessages.deleteAll();
134#endif
135}
136
Brian Osmandf7e0752017-04-26 16:20:28 -0400137//////////////////////////////////////////////////////////////////////////////////////////////////
138
Brian Osmanc87cfb62018-07-11 09:08:46 -0400139static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400140 const int genW = gen->getInfo().width();
141 const int genH = gen->getInfo().height();
142 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
143 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
144 if (!srcR.contains(dstR)) {
145 return false;
146 }
147
148 // If they are requesting a subset, we have to have a temp allocation for full image, and
149 // then copy the subset into their allocation
150 SkBitmap full;
151 SkPixmap fullPM;
152 const SkPixmap* dstPM = &pmap;
153 if (srcR != dstR) {
154 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
155 return false;
156 }
157 if (!full.peekPixels(&fullPM)) {
158 return false;
159 }
160 dstPM = &fullPM;
161 }
162
Brian Osmanc87cfb62018-07-11 09:08:46 -0400163 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400164 return false;
165 }
166
167 if (srcR != dstR) {
168 if (!full.readPixels(pmap, originX, originY)) {
169 return false;
170 }
171 }
172 return true;
173}
174
Brian Osman00766bf2018-10-22 15:59:23 -0400175bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkImage::CachingHint chint) const {
176 auto check_output_bitmap = [bitmap]() {
177 SkASSERT(bitmap->isImmutable());
178 SkASSERT(bitmap->getPixels());
179 (void)bitmap;
180 };
181
182 auto desc = SkBitmapCacheDesc::Make(this);
183 if (SkBitmapCache::Find(desc, bitmap)) {
184 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400185 return true;
186 }
187
Brian Osmandf7e0752017-04-26 16:20:28 -0400188 if (SkImage::kAllow_CachingHint == chint) {
Brian Osman00766bf2018-10-22 15:59:23 -0400189 SkPixmap pmap;
190 SkBitmapCache::RecPtr cacheRec = SkBitmapCache::Alloc(desc, fInfo, &pmap);
191 if (!cacheRec ||
192 !generate_pixels(ScopedGenerator(fSharedGenerator), pmap,
193 fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400194 return false;
195 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400196 SkBitmapCache::Add(std::move(cacheRec), bitmap);
Mike Reed30301c42018-07-19 09:39:21 -0400197 this->notifyAddedToRasterCache();
Brian Osmandf7e0752017-04-26 16:20:28 -0400198 } else {
Brian Osman00766bf2018-10-22 15:59:23 -0400199 if (!bitmap->tryAllocPixels(fInfo) ||
200 !generate_pixels(ScopedGenerator(fSharedGenerator), bitmap->pixmap(),
201 fOrigin.x(), fOrigin.y())) {
202 return false;
203 }
Brian Osmana0dc3d22018-10-09 15:17:38 -0400204 bitmap->setImmutable();
Brian Osmandf7e0752017-04-26 16:20:28 -0400205 }
206
Brian Osman00766bf2018-10-22 15:59:23 -0400207 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400208 return true;
209}
210
211//////////////////////////////////////////////////////////////////////////////////////////////////
212
Brian Osmanf1b43822017-04-20 13:43:23 -0400213bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
214 int srcX, int srcY, CachingHint chint) const {
reed85d91782015-09-10 14:33:38 -0700215 SkBitmap bm;
Brian Osmane50cdf02018-10-19 13:02:14 -0400216 if (this->getROPixels(&bm, chint)) {
reed85d91782015-09-10 14:33:38 -0700217 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
218 }
219 return false;
220}
221
Ben Wagnerbdf54332018-05-15 14:12:14 -0400222sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
223 ScopedGenerator generator(fSharedGenerator);
224 return generator->refEncodedData();
225}
reed85d91782015-09-10 14:33:38 -0700226
Brian Osman5bbd0762017-05-08 11:07:42 -0400227bool SkImage_Lazy::onIsValid(GrContext* context) const {
228 ScopedGenerator generator(fSharedGenerator);
229 return generator->isValid(context);
230}
231
Brian Osmandf7e0752017-04-26 16:20:28 -0400232///////////////////////////////////////////////////////////////////////////////////////////////////
233
Robert Phillipsb726d582017-03-09 16:36:32 -0500234#if SK_SUPPORT_GPU
Brian Osmanf1b43822017-04-20 13:43:23 -0400235sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400236 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400237 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400238 if (!context) {
239 return nullptr;
240 }
241
242 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
Brian Osman6064e1c2018-10-19 14:27:54 -0400243 return textureMaker.refTextureProxyForParams(params, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500244}
245#endif
246
Brian Osmanf1b43822017-04-20 13:43:23 -0400247sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400248 SkASSERT(fInfo.bounds().contains(subset));
249 SkASSERT(fInfo.bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700250
Brian Osmandf7e0752017-04-26 16:20:28 -0400251 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700252 Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400253 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700254}
255
Brian Osman15f0f292018-10-01 14:14:46 -0400256sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target) const {
Christopher Camerond4b67872017-07-13 15:18:08 -0700257 SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex);
Brian Osmanb4ae4992018-10-18 11:16:14 -0400258 if (fOnMakeColorSpaceTarget &&
Christopher Camerond4b67872017-07-13 15:18:08 -0700259 SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) {
260 return fOnMakeColorSpaceResult;
261 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700262 const SkIRect generatorSubset =
263 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
264 Validator validator(fSharedGenerator, &generatorSubset, target);
Christopher Camerond4b67872017-07-13 15:18:08 -0700265 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
266 if (result) {
267 fOnMakeColorSpaceTarget = target;
268 fOnMakeColorSpaceResult = result;
269 }
270 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400271}
272
Mike Reed185130c2017-02-15 15:14:16 -0500273sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
274 const SkIRect* subset) {
Christopher Cameron77e96662017-07-08 01:47:47 -0700275 SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700276
Brian Osmanf1b43822017-04-20 13:43:23 -0400277 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700278}
Brian Osmandf7e0752017-04-26 16:20:28 -0400279
280//////////////////////////////////////////////////////////////////////////////////////////////////
281
Brian Osmandf7e0752017-04-26 16:20:28 -0400282#if SK_SUPPORT_GPU
283
Brian Osmanbd659552018-09-11 10:03:19 -0400284void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey,
285 GrUniqueKey* cacheKey) const {
Brian Osman10494e32018-09-10 12:45:18 -0400286 SkASSERT(!cacheKey->isValid());
287 if (origKey.isValid()) {
288 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
289 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image");
290 }
291}
292
Brian Osmandf7e0752017-04-26 16:20:28 -0400293class Generator_GrYUVProvider : public GrYUVProvider {
Brian Osmandf7e0752017-04-26 16:20:28 -0400294public:
295 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
296
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400297private:
298 uint32_t onGetID() const override { return fGen->uniqueID(); }
Jim Van Verthe24b5872018-10-29 16:26:02 -0400299 bool onQueryYUVA8(SkYUVASizeInfo* sizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400300 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
301 SkYUVColorSpace* colorSpace) const override {
302 return fGen->queryYUVA8(sizeInfo, yuvaIndices, colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -0400303 }
Jim Van Verthe24b5872018-10-29 16:26:02 -0400304 bool onGetYUVA8Planes(const SkYUVASizeInfo& sizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400305 const SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
306 void* planes[]) override {
307 return fGen->getYUVA8Planes(sizeInfo, yuvaIndices, planes);
Brian Osmandf7e0752017-04-26 16:20:28 -0400308 }
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400309
310 SkImageGenerator* fGen;
311
312 typedef GrYUVProvider INHERITED;
Brian Osmandf7e0752017-04-26 16:20:28 -0400313};
314
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500315static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000316 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
317 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400318 if (key.isValid()) {
Greg Danielf6f7b672018-02-15 13:06:26 -0500319 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
320 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400321 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
322 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500323 // If we had an originalProxy with a valid key, that means there already is a proxy in
324 // the cache which matches the key, but it does not have mip levels and we require them.
325 // Thus we must remove the unique key from that proxy.
Chris Dalton2de13dd2019-01-03 15:11:59 -0700326 SkASSERT(originalProxy->getUniqueKey() == key);
327 proxyProvider->removeUniqueKeyFromProxy(originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000328 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500329 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400330 }
331}
332
Jim Van Verthe24b5872018-10-29 16:26:02 -0400333sk_sp<SkCachedData> SkImage_Lazy::getPlanes(SkYUVASizeInfo* yuvaSizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400334 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400335 SkYUVColorSpace* yuvColorSpace,
Jim Van Verthe24b5872018-10-29 16:26:02 -0400336 const void* planes[SkYUVASizeInfo::kMaxCount]) {
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400337 ScopedGenerator generator(fSharedGenerator);
338 Generator_GrYUVProvider provider(generator);
339
Jim Van Verth8f11e432018-10-18 14:36:59 -0400340 sk_sp<SkCachedData> data = provider.getPlanes(yuvaSizeInfo, yuvaIndices, yuvColorSpace, planes);
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400341 if (!data) {
342 return nullptr;
343 }
344
345 return data;
346}
347
348
Brian Osmandf7e0752017-04-26 16:20:28 -0400349/*
350 * We have 4 ways to try to return a texture (in sorted order)
351 *
352 * 1. Check the cache for a pre-existing one
353 * 2. Ask the generator to natively create one
354 * 3. Ask the generator to return YUV planes, which the GPU can convert
355 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
356 */
Brian Osmanbd659552018-09-11 10:03:19 -0400357sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(
358 GrContext* ctx,
359 const GrUniqueKey& origKey,
360 SkImage::CachingHint chint,
361 bool willBeMipped,
Brian Osmanbd659552018-09-11 10:03:19 -0400362 GrTextureMaker::AllowedTexGenType genType) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400363 // Values representing the various texture lock paths we can take. Used for logging the path
364 // taken to a histogram.
365 enum LockTexturePath {
366 kFailure_LockTexturePath,
367 kPreExisting_LockTexturePath,
368 kNative_LockTexturePath,
369 kCompressed_LockTexturePath, // Deprecated
370 kYUV_LockTexturePath,
371 kRGBA_LockTexturePath,
372 };
373
374 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
375
Brian Osman10494e32018-09-10 12:45:18 -0400376 // Build our texture key.
Greg Daniel25ceb1c2018-09-27 17:01:41 -0400377 // Even though some proxies created here may have a specific origin and use that origin, we do
378 // not include that in the key. Since SkImages are meant to be immutable, a given SkImage will
379 // always have an associated proxy that is always one origin or the other. It never can change
380 // origins. Thus we don't need to include that info in the key iteself.
Brian Osman10494e32018-09-10 12:45:18 -0400381 GrUniqueKey key;
382 this->makeCacheKeyFromOrigKey(origKey, &key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400383
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500384 GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000385 sk_sp<GrTextureProxy> proxy;
386
Brian Osmandf7e0752017-04-26 16:20:28 -0400387 // 1. Check the cache for a pre-existing one
388 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500389 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000390 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400391 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
392 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400393 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000394 return proxy;
395 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400396 }
397 }
398
Brian Osmandf7e0752017-04-26 16:20:28 -0400399 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000400 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400401 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400402 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
403 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
404 return nullptr;
405 }
Brian Osmanb3f38302018-09-07 15:24:44 -0400406 if ((proxy = generator->generateTexture(ctx, fInfo, fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400407 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
408 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500409 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400410 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Daniel2ad08202018-09-07 09:13:36 -0400411 *fUniqueKeyInvalidatedMessages.append() =
412 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Greg Danielfc5060d2017-10-04 18:36:15 +0000413 return proxy;
414 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400415 }
416 }
417
Greg Daniel3e70fa32017-10-05 16:27:06 -0400418 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
419 // the texture we fall through here and have the CPU generate the mip maps for us.
420 if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) {
Brian Osmanb3f38302018-09-07 15:24:44 -0400421 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(fInfo);
Greg Daniel4065d452018-11-16 15:43:41 -0500422
423 SkColorType colorType = fInfo.colorType();
424 GrBackendFormat format =
425 ctx->contextPriv().caps()->getBackendFormatFromColorType(colorType);
426
Brian Osmandf7e0752017-04-26 16:20:28 -0400427 ScopedGenerator generator(fSharedGenerator);
428 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700429
430 // The pixels in the texture will be in the generator's color space. If onMakeColorSpace
431 // has been called then this will not match this image's color space. To correct this, apply
432 // a color space conversion from the generator's color space to this image's color space.
Brian Osman861ea5b2018-06-14 09:14:03 -0400433 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
434 SkColorSpace* thisColorSpace = fInfo.colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700435
Mike Kleinae5e8642018-10-03 17:00:41 -0400436 // TODO: Update to create the mipped surface in the YUV generator and draw the base
437 // layer directly into the mipped surface.
Greg Daniel4065d452018-11-16 15:43:41 -0500438 proxy = provider.refAsTextureProxy(ctx, format, desc, generatorColorSpace, thisColorSpace);
Mike Kleinae5e8642018-10-03 17:00:41 -0400439 if (proxy) {
440 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
441 kLockTexturePathCount);
442 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
443 *fUniqueKeyInvalidatedMessages.append() =
444 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
445 return proxy;
Brian Osmandf7e0752017-04-26 16:20:28 -0400446 }
447 }
448
449 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
450 SkBitmap bitmap;
Brian Osman00766bf2018-10-22 15:59:23 -0400451 if (!proxy && this->getROPixels(&bitmap, chint)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400452 if (willBeMipped) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400453 proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400454 }
455 if (!proxy) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400456 proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400457 }
Greg Daniele252f082017-10-23 16:05:23 -0400458 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400459 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
460 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500461 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniel2ad08202018-09-07 09:13:36 -0400462 *fUniqueKeyInvalidatedMessages.append() =
463 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Brian Osmandf7e0752017-04-26 16:20:28 -0400464 return proxy;
465 }
466 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000467
468 if (proxy) {
469 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
470 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
471 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
472 // generate the rest of the mips.
473 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400474 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniel2ad08202018-09-07 09:13:36 -0400475 *fUniqueKeyInvalidatedMessages.append() =
476 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Greg Daniele1da1d92017-10-06 15:59:27 -0400477 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500478 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000479 return mippedProxy;
480 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400481 // We failed to make a mipped proxy with the base copied into it. This could have
482 // been from failure to make the proxy or failure to do the copy. Thus we will fall
483 // back to just using the non mipped proxy; See skbug.com/7094.
484 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000485 }
486
Brian Osmandf7e0752017-04-26 16:20:28 -0400487 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
488 kLockTexturePathCount);
489 return nullptr;
490}
491
492///////////////////////////////////////////////////////////////////////////////////////////////////
493
494#endif