blob: fef4bf418d277491188bdae4c90b60092c896c50 [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Brian Osmanbd659552018-09-11 10:03:19 -04008#include "SkImage_Lazy.h"
Brian Osmandf7e0752017-04-26 16:20:28 -04009
10#include "SkBitmap.h"
11#include "SkBitmapCache.h"
Brian Osmanb70fd912018-10-22 16:10:44 -040012#include "SkCachedData.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040013#include "SkData.h"
14#include "SkImageGenerator.h"
reed85d91782015-09-10 14:33:38 -070015#include "SkImagePriv.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040016#include "SkNextID.h"
reed85d91782015-09-10 14:33:38 -070017
Brian Osmandf7e0752017-04-26 16:20:28 -040018#if SK_SUPPORT_GPU
19#include "GrContext.h"
20#include "GrContextPriv.h"
21#include "GrGpuResourcePriv.h"
22#include "GrImageTextureMaker.h"
23#include "GrResourceKey.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050024#include "GrProxyProvider.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040025#include "GrSamplerState.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040026#include "GrYUVProvider.h"
27#include "SkGr.h"
28#endif
reed85d91782015-09-10 14:33:38 -070029
Brian Osmandf7e0752017-04-26 16:20:28 -040030// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
Mike Klein408ef212018-10-30 15:23:00 +000031class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
Brian Osmandf7e0752017-04-26 16:20:28 -040032public:
33 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
34 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
35 }
36
Matt Sarettb2004f72017-05-18 09:26:50 -040037 // This is thread safe. It is a const field set in the constructor.
38 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
39
Brian Osmandf7e0752017-04-26 16:20:28 -040040private:
41 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
42 : fGenerator(std::move(gen)) {
43 SkASSERT(fGenerator);
44 }
45
46 friend class ScopedGenerator;
47 friend class SkImage_Lazy;
48
49 std::unique_ptr<SkImageGenerator> fGenerator;
50 SkMutex fMutex;
51};
52
reed85d91782015-09-10 14:33:38 -070053///////////////////////////////////////////////////////////////////////////////
54
Christopher Cameron77e96662017-07-08 01:47:47 -070055SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
56 sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -040057 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -040058 if (!fSharedGenerator) {
59 return;
60 }
61
62 // The following generator accessors are safe without acquiring the mutex (const getters).
63 // TODO: refactor to use a ScopedGenerator instead, for clarity.
64 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
65 if (info.isEmpty()) {
66 fSharedGenerator.reset();
67 return;
68 }
69
70 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
71 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
72 if (subset) {
73 if (!bounds.contains(*subset)) {
74 fSharedGenerator.reset();
75 return;
76 }
77 if (*subset != bounds) {
78 // we need a different uniqueID since we really are a subset of the raw generator
79 fUniqueID = SkNextID::ImageID();
80 }
81 } else {
82 subset = &bounds;
83 }
84
85 fInfo = info.makeWH(subset->width(), subset->height());
86 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Christopher Cameron77e96662017-07-08 01:47:47 -070087 if (colorSpace) {
88 fInfo = fInfo.makeColorSpace(colorSpace);
89 fUniqueID = SkNextID::ImageID();
90 }
Brian Osmandf7e0752017-04-26 16:20:28 -040091}
92
93///////////////////////////////////////////////////////////////////////////////
94
95// Helper for exclusive access to a shared generator.
96class SkImage_Lazy::ScopedGenerator {
97public:
98 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
99 : fSharedGenerator(gen)
100 , fAutoAquire(gen->fMutex) {}
101
102 SkImageGenerator* operator->() const {
103 fSharedGenerator->fMutex.assertHeld();
104 return fSharedGenerator->fGenerator.get();
105 }
106
107 operator SkImageGenerator*() const {
108 fSharedGenerator->fMutex.assertHeld();
109 return fSharedGenerator->fGenerator.get();
110 }
111
112private:
113 const sk_sp<SharedGenerator>& fSharedGenerator;
114 SkAutoExclusive fAutoAquire;
115};
116
117///////////////////////////////////////////////////////////////////////////////
118
119SkImage_Lazy::SkImage_Lazy(Validator* validator)
120 : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID)
121 , fSharedGenerator(std::move(validator->fSharedGenerator))
122 , fInfo(validator->fInfo)
123 , fOrigin(validator->fOrigin) {
124 SkASSERT(fSharedGenerator);
Brian Osmaneb7e5292018-08-08 14:32:06 -0400125 fUniqueID = validator->fUniqueID;
Brian Osmandf7e0752017-04-26 16:20:28 -0400126}
127
Brian Osmanbd659552018-09-11 10:03:19 -0400128SkImage_Lazy::~SkImage_Lazy() {
129#if SK_SUPPORT_GPU
130 for (int i = 0; i < fUniqueKeyInvalidatedMessages.count(); ++i) {
131 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(*fUniqueKeyInvalidatedMessages[i]);
132 }
133 fUniqueKeyInvalidatedMessages.deleteAll();
134#endif
135}
136
Brian Osmandf7e0752017-04-26 16:20:28 -0400137//////////////////////////////////////////////////////////////////////////////////////////////////
138
Brian Osmanc87cfb62018-07-11 09:08:46 -0400139static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400140 const int genW = gen->getInfo().width();
141 const int genH = gen->getInfo().height();
142 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
143 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
144 if (!srcR.contains(dstR)) {
145 return false;
146 }
147
148 // If they are requesting a subset, we have to have a temp allocation for full image, and
149 // then copy the subset into their allocation
150 SkBitmap full;
151 SkPixmap fullPM;
152 const SkPixmap* dstPM = &pmap;
153 if (srcR != dstR) {
154 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
155 return false;
156 }
157 if (!full.peekPixels(&fullPM)) {
158 return false;
159 }
160 dstPM = &fullPM;
161 }
162
Brian Osmanc87cfb62018-07-11 09:08:46 -0400163 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400164 return false;
165 }
166
167 if (srcR != dstR) {
168 if (!full.readPixels(pmap, originX, originY)) {
169 return false;
170 }
171 }
172 return true;
173}
174
Brian Osman00766bf2018-10-22 15:59:23 -0400175bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkImage::CachingHint chint) const {
176 auto check_output_bitmap = [bitmap]() {
177 SkASSERT(bitmap->isImmutable());
178 SkASSERT(bitmap->getPixels());
179 (void)bitmap;
180 };
181
182 auto desc = SkBitmapCacheDesc::Make(this);
183 if (SkBitmapCache::Find(desc, bitmap)) {
184 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400185 return true;
186 }
187
Brian Osmandf7e0752017-04-26 16:20:28 -0400188 if (SkImage::kAllow_CachingHint == chint) {
Brian Osman00766bf2018-10-22 15:59:23 -0400189 SkPixmap pmap;
190 SkBitmapCache::RecPtr cacheRec = SkBitmapCache::Alloc(desc, fInfo, &pmap);
191 if (!cacheRec ||
192 !generate_pixels(ScopedGenerator(fSharedGenerator), pmap,
193 fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400194 return false;
195 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400196 SkBitmapCache::Add(std::move(cacheRec), bitmap);
Mike Reed30301c42018-07-19 09:39:21 -0400197 this->notifyAddedToRasterCache();
Brian Osmandf7e0752017-04-26 16:20:28 -0400198 } else {
Brian Osman00766bf2018-10-22 15:59:23 -0400199 if (!bitmap->tryAllocPixels(fInfo) ||
200 !generate_pixels(ScopedGenerator(fSharedGenerator), bitmap->pixmap(),
201 fOrigin.x(), fOrigin.y())) {
202 return false;
203 }
Brian Osmana0dc3d22018-10-09 15:17:38 -0400204 bitmap->setImmutable();
Brian Osmandf7e0752017-04-26 16:20:28 -0400205 }
206
Brian Osman00766bf2018-10-22 15:59:23 -0400207 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400208 return true;
209}
210
211//////////////////////////////////////////////////////////////////////////////////////////////////
212
Brian Osmanf1b43822017-04-20 13:43:23 -0400213bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
214 int srcX, int srcY, CachingHint chint) const {
reed85d91782015-09-10 14:33:38 -0700215 SkBitmap bm;
Brian Osmane50cdf02018-10-19 13:02:14 -0400216 if (this->getROPixels(&bm, chint)) {
reed85d91782015-09-10 14:33:38 -0700217 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
218 }
219 return false;
220}
221
Ben Wagnerbdf54332018-05-15 14:12:14 -0400222sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
223 ScopedGenerator generator(fSharedGenerator);
224 return generator->refEncodedData();
225}
reed85d91782015-09-10 14:33:38 -0700226
Brian Osman5bbd0762017-05-08 11:07:42 -0400227bool SkImage_Lazy::onIsValid(GrContext* context) const {
228 ScopedGenerator generator(fSharedGenerator);
229 return generator->isValid(context);
230}
231
Brian Osmandf7e0752017-04-26 16:20:28 -0400232///////////////////////////////////////////////////////////////////////////////////////////////////
233
Robert Phillipsb726d582017-03-09 16:36:32 -0500234#if SK_SUPPORT_GPU
Brian Osmanf1b43822017-04-20 13:43:23 -0400235sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400236 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400237 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400238 if (!context) {
239 return nullptr;
240 }
241
242 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
Brian Osman6064e1c2018-10-19 14:27:54 -0400243 return textureMaker.refTextureProxyForParams(params, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500244}
245#endif
246
Brian Osmanf1b43822017-04-20 13:43:23 -0400247sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400248 SkASSERT(fInfo.bounds().contains(subset));
249 SkASSERT(fInfo.bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700250
Brian Osmandf7e0752017-04-26 16:20:28 -0400251 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700252 Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400253 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700254}
255
Brian Osman15f0f292018-10-01 14:14:46 -0400256sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target) const {
Christopher Camerond4b67872017-07-13 15:18:08 -0700257 SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex);
Brian Osmanb4ae4992018-10-18 11:16:14 -0400258 if (fOnMakeColorSpaceTarget &&
Christopher Camerond4b67872017-07-13 15:18:08 -0700259 SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) {
260 return fOnMakeColorSpaceResult;
261 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700262 const SkIRect generatorSubset =
263 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
264 Validator validator(fSharedGenerator, &generatorSubset, target);
Christopher Camerond4b67872017-07-13 15:18:08 -0700265 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
266 if (result) {
267 fOnMakeColorSpaceTarget = target;
268 fOnMakeColorSpaceResult = result;
269 }
270 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400271}
272
Mike Reed185130c2017-02-15 15:14:16 -0500273sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
274 const SkIRect* subset) {
Christopher Cameron77e96662017-07-08 01:47:47 -0700275 SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700276
Brian Osmanf1b43822017-04-20 13:43:23 -0400277 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700278}
Brian Osmandf7e0752017-04-26 16:20:28 -0400279
280//////////////////////////////////////////////////////////////////////////////////////////////////
281
Brian Osmandf7e0752017-04-26 16:20:28 -0400282#if SK_SUPPORT_GPU
283
Brian Osmanbd659552018-09-11 10:03:19 -0400284void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey,
285 GrUniqueKey* cacheKey) const {
Brian Osman10494e32018-09-10 12:45:18 -0400286 SkASSERT(!cacheKey->isValid());
287 if (origKey.isValid()) {
288 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
289 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image");
290 }
291}
292
Brian Osmandf7e0752017-04-26 16:20:28 -0400293class Generator_GrYUVProvider : public GrYUVProvider {
Brian Osmandf7e0752017-04-26 16:20:28 -0400294public:
295 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
296
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400297private:
298 uint32_t onGetID() const override { return fGen->uniqueID(); }
Jim Van Verthe24b5872018-10-29 16:26:02 -0400299 bool onQueryYUVA8(SkYUVASizeInfo* sizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400300 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
301 SkYUVColorSpace* colorSpace) const override {
302 return fGen->queryYUVA8(sizeInfo, yuvaIndices, colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -0400303 }
Jim Van Verthe24b5872018-10-29 16:26:02 -0400304 bool onGetYUVA8Planes(const SkYUVASizeInfo& sizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400305 const SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
306 void* planes[]) override {
307 return fGen->getYUVA8Planes(sizeInfo, yuvaIndices, planes);
Brian Osmandf7e0752017-04-26 16:20:28 -0400308 }
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400309
310 SkImageGenerator* fGen;
311
312 typedef GrYUVProvider INHERITED;
Brian Osmandf7e0752017-04-26 16:20:28 -0400313};
314
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500315static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000316 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
317 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400318 if (key.isValid()) {
Greg Danielf6f7b672018-02-15 13:06:26 -0500319 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
320 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400321 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
322 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500323 // If we had an originalProxy with a valid key, that means there already is a proxy in
324 // the cache which matches the key, but it does not have mip levels and we require them.
325 // Thus we must remove the unique key from that proxy.
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500326 proxyProvider->removeUniqueKeyFromProxy(key, originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000327 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500328 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400329 }
330}
331
Jim Van Verthe24b5872018-10-29 16:26:02 -0400332sk_sp<SkCachedData> SkImage_Lazy::getPlanes(SkYUVASizeInfo* yuvaSizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400333 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400334 SkYUVColorSpace* yuvColorSpace,
Jim Van Verthe24b5872018-10-29 16:26:02 -0400335 const void* planes[SkYUVASizeInfo::kMaxCount]) {
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400336 ScopedGenerator generator(fSharedGenerator);
337 Generator_GrYUVProvider provider(generator);
338
Jim Van Verth8f11e432018-10-18 14:36:59 -0400339 sk_sp<SkCachedData> data = provider.getPlanes(yuvaSizeInfo, yuvaIndices, yuvColorSpace, planes);
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400340 if (!data) {
341 return nullptr;
342 }
343
344 return data;
345}
346
347
Brian Osmandf7e0752017-04-26 16:20:28 -0400348/*
349 * We have 4 ways to try to return a texture (in sorted order)
350 *
351 * 1. Check the cache for a pre-existing one
352 * 2. Ask the generator to natively create one
353 * 3. Ask the generator to return YUV planes, which the GPU can convert
354 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
355 */
Brian Osmanbd659552018-09-11 10:03:19 -0400356sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(
357 GrContext* ctx,
358 const GrUniqueKey& origKey,
359 SkImage::CachingHint chint,
360 bool willBeMipped,
Brian Osmanbd659552018-09-11 10:03:19 -0400361 GrTextureMaker::AllowedTexGenType genType) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400362 // Values representing the various texture lock paths we can take. Used for logging the path
363 // taken to a histogram.
364 enum LockTexturePath {
365 kFailure_LockTexturePath,
366 kPreExisting_LockTexturePath,
367 kNative_LockTexturePath,
368 kCompressed_LockTexturePath, // Deprecated
369 kYUV_LockTexturePath,
370 kRGBA_LockTexturePath,
371 };
372
373 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
374
Brian Osman10494e32018-09-10 12:45:18 -0400375 // Build our texture key.
Greg Daniel25ceb1c2018-09-27 17:01:41 -0400376 // Even though some proxies created here may have a specific origin and use that origin, we do
377 // not include that in the key. Since SkImages are meant to be immutable, a given SkImage will
378 // always have an associated proxy that is always one origin or the other. It never can change
379 // origins. Thus we don't need to include that info in the key iteself.
Brian Osman10494e32018-09-10 12:45:18 -0400380 GrUniqueKey key;
381 this->makeCacheKeyFromOrigKey(origKey, &key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400382
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500383 GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000384 sk_sp<GrTextureProxy> proxy;
385
Brian Osmandf7e0752017-04-26 16:20:28 -0400386 // 1. Check the cache for a pre-existing one
387 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500388 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000389 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400390 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
391 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400392 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000393 return proxy;
394 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400395 }
396 }
397
Brian Osmandf7e0752017-04-26 16:20:28 -0400398 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000399 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400400 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400401 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
402 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
403 return nullptr;
404 }
Brian Osmanb3f38302018-09-07 15:24:44 -0400405 if ((proxy = generator->generateTexture(ctx, fInfo, fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400406 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
407 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500408 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400409 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Daniel2ad08202018-09-07 09:13:36 -0400410 *fUniqueKeyInvalidatedMessages.append() =
411 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Greg Danielfc5060d2017-10-04 18:36:15 +0000412 return proxy;
413 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400414 }
415 }
416
Greg Daniel3e70fa32017-10-05 16:27:06 -0400417 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
418 // the texture we fall through here and have the CPU generate the mip maps for us.
419 if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) {
Brian Osmanb3f38302018-09-07 15:24:44 -0400420 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(fInfo);
Greg Daniel4065d452018-11-16 15:43:41 -0500421
422 SkColorType colorType = fInfo.colorType();
423 GrBackendFormat format =
424 ctx->contextPriv().caps()->getBackendFormatFromColorType(colorType);
425
Brian Osmandf7e0752017-04-26 16:20:28 -0400426 ScopedGenerator generator(fSharedGenerator);
427 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700428
429 // The pixels in the texture will be in the generator's color space. If onMakeColorSpace
430 // has been called then this will not match this image's color space. To correct this, apply
431 // a color space conversion from the generator's color space to this image's color space.
Brian Osman861ea5b2018-06-14 09:14:03 -0400432 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
433 SkColorSpace* thisColorSpace = fInfo.colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700434
Mike Kleinae5e8642018-10-03 17:00:41 -0400435 // TODO: Update to create the mipped surface in the YUV generator and draw the base
436 // layer directly into the mipped surface.
Greg Daniel4065d452018-11-16 15:43:41 -0500437 proxy = provider.refAsTextureProxy(ctx, format, desc, generatorColorSpace, thisColorSpace);
Mike Kleinae5e8642018-10-03 17:00:41 -0400438 if (proxy) {
439 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
440 kLockTexturePathCount);
441 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
442 *fUniqueKeyInvalidatedMessages.append() =
443 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
444 return proxy;
Brian Osmandf7e0752017-04-26 16:20:28 -0400445 }
446 }
447
448 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
449 SkBitmap bitmap;
Brian Osman00766bf2018-10-22 15:59:23 -0400450 if (!proxy && this->getROPixels(&bitmap, chint)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400451 if (willBeMipped) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400452 proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400453 }
454 if (!proxy) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400455 proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400456 }
Greg Daniele252f082017-10-23 16:05:23 -0400457 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400458 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
459 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500460 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniel2ad08202018-09-07 09:13:36 -0400461 *fUniqueKeyInvalidatedMessages.append() =
462 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Brian Osmandf7e0752017-04-26 16:20:28 -0400463 return proxy;
464 }
465 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000466
467 if (proxy) {
468 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
469 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
470 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
471 // generate the rest of the mips.
472 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400473 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniel2ad08202018-09-07 09:13:36 -0400474 *fUniqueKeyInvalidatedMessages.append() =
475 new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID());
Greg Daniele1da1d92017-10-06 15:59:27 -0400476 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500477 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000478 return mippedProxy;
479 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400480 // We failed to make a mipped proxy with the base copied into it. This could have
481 // been from failure to make the proxy or failure to do the copy. Thus we will fall
482 // back to just using the non mipped proxy; See skbug.com/7094.
483 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000484 }
485
Brian Osmandf7e0752017-04-26 16:20:28 -0400486 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
487 kLockTexturePathCount);
488 return nullptr;
489}
490
491///////////////////////////////////////////////////////////////////////////////////////////////////
492
493#endif