blob: 402acf5d73039e1bb6d3799484dff0b53d1686ec [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkImage_Base.h"
reed85d91782015-09-10 14:33:38 -07009#include "SkImageCacherator.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040010
11#include "SkBitmap.h"
12#include "SkBitmapCache.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040013#include "SkData.h"
14#include "SkImageGenerator.h"
reed85d91782015-09-10 14:33:38 -070015#include "SkImagePriv.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040016#include "SkNextID.h"
reed85d91782015-09-10 14:33:38 -070017#include "SkPixelRef.h"
reed85d91782015-09-10 14:33:38 -070018
Brian Osmandf7e0752017-04-26 16:20:28 -040019#if SK_SUPPORT_GPU
20#include "GrContext.h"
21#include "GrContextPriv.h"
22#include "GrGpuResourcePriv.h"
23#include "GrImageTextureMaker.h"
24#include "GrResourceKey.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050025#include "GrProxyProvider.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040026#include "GrSamplerState.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040027#include "GrYUVProvider.h"
28#include "SkGr.h"
29#endif
reed85d91782015-09-10 14:33:38 -070030
Brian Osmandf7e0752017-04-26 16:20:28 -040031// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
32class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
33public:
34 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
35 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
36 }
37
Matt Sarettb2004f72017-05-18 09:26:50 -040038 // This is thread safe. It is a const field set in the constructor.
39 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
40
Brian Osmandf7e0752017-04-26 16:20:28 -040041private:
42 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
43 : fGenerator(std::move(gen)) {
44 SkASSERT(fGenerator);
45 }
46
47 friend class ScopedGenerator;
48 friend class SkImage_Lazy;
49
50 std::unique_ptr<SkImageGenerator> fGenerator;
51 SkMutex fMutex;
52};
53
54class SkImage_Lazy : public SkImage_Base, public SkImageCacherator {
55public:
56 struct Validator {
Christopher Cameron77e96662017-07-08 01:47:47 -070057 Validator(sk_sp<SharedGenerator>, const SkIRect* subset, sk_sp<SkColorSpace> colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -040058
59 operator bool() const { return fSharedGenerator.get(); }
60
61 sk_sp<SharedGenerator> fSharedGenerator;
62 SkImageInfo fInfo;
63 SkIPoint fOrigin;
Christopher Cameron77e96662017-07-08 01:47:47 -070064 sk_sp<SkColorSpace> fColorSpace;
Brian Osmandf7e0752017-04-26 16:20:28 -040065 uint32_t fUniqueID;
66 };
67
68 SkImage_Lazy(Validator* validator);
69
70 SkImageInfo onImageInfo() const override {
71 return fInfo;
herba7c9d632016-04-19 12:30:22 -070072 }
Greg Daniel56008aa2018-03-14 15:33:42 -040073 SkColorType onColorType() const override {
74 return kUnknown_SkColorType;
75 }
brianosman69c166d2016-08-17 14:01:05 -070076 SkAlphaType onAlphaType() const override {
Brian Osmandf7e0752017-04-26 16:20:28 -040077 return fInfo.alphaType();
brianosman69c166d2016-08-17 14:01:05 -070078 }
herba7c9d632016-04-19 12:30:22 -070079
Mike Reedf2c73642018-05-29 15:41:27 -040080 SkIRect onGetSubset() const override {
81 return SkIRect::MakeXYWH(fOrigin.fX, fOrigin.fY, fInfo.width(), fInfo.height());
82 }
83
Robert Phillipsb726d582017-03-09 16:36:32 -050084 bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY,
85 CachingHint) const override;
86#if SK_SUPPORT_GPU
Brian Salomon2bbdcc42017-09-07 12:36:34 -040087 sk_sp<GrTextureProxy> asTextureProxyRef(GrContext*,
88 const GrSamplerState&, SkColorSpace*,
89 sk_sp<SkColorSpace>*,
Robert Phillipsb726d582017-03-09 16:36:32 -050090 SkScalar scaleAdjust[2]) const override;
91#endif
Ben Wagnerbdf54332018-05-15 14:12:14 -040092 sk_sp<SkData> onRefEncoded() const override;
reed7fb4f8b2016-03-11 04:33:52 -080093 sk_sp<SkImage> onMakeSubset(const SkIRect&) const override;
Brian Osman61624f02016-12-09 14:51:59 -050094 bool getROPixels(SkBitmap*, SkColorSpace* dstColorSpace, CachingHint) const override;
reed85d91782015-09-10 14:33:38 -070095 bool onIsLazyGenerated() const override { return true; }
Mike Reed7f1d0202017-05-08 16:13:39 -040096 bool onCanLazyGenerateOnGPU() const override;
Brian Osmanb62f50c2018-07-12 14:44:27 -040097 sk_sp<SkImage> onMakeColorSpace(sk_sp<SkColorSpace>, SkColorType) const override;
reed85d91782015-09-10 14:33:38 -070098
Brian Osman5bbd0762017-05-08 11:07:42 -040099 bool onIsValid(GrContext*) const override;
100
Brian Osmandf7e0752017-04-26 16:20:28 -0400101 SkImageCacherator* peekCacherator() const override {
102 return const_cast<SkImage_Lazy*>(this);
103 }
104
105 // Only return true if the generate has already been cached.
106 bool lockAsBitmapOnlyIfAlreadyCached(SkBitmap*, CachedFormat) const;
107 // Call the underlying generator directly
108 bool directGeneratePixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400109 int srcX, int srcY) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400110
111 // SkImageCacherator interface
112#if SK_SUPPORT_GPU
113 // Returns the texture proxy. If the cacherator is generating the texture and wants to cache it,
114 // it should use the passed in key (if the key is valid).
115 sk_sp<GrTextureProxy> lockTextureProxy(GrContext*,
116 const GrUniqueKey& key,
117 SkImage::CachingHint,
118 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400119 SkColorSpace* dstColorSpace,
120 GrTextureMaker::AllowedTexGenType genType) override;
Brian Osmandf7e0752017-04-26 16:20:28 -0400121
122 // Returns the color space of the texture that would be returned if you called lockTexture.
123 // Separate code path to allow querying of the color space for textures that cached (even
124 // externally).
125 sk_sp<SkColorSpace> getColorSpace(GrContext*, SkColorSpace* dstColorSpace) override;
126 void makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, CachedFormat,
127 GrUniqueKey* cacheKey) override;
128#endif
129
Brian Osmandf7e0752017-04-26 16:20:28 -0400130 SkImageInfo buildCacheInfo(CachedFormat) const override;
131
reed85d91782015-09-10 14:33:38 -0700132private:
Brian Osmandf7e0752017-04-26 16:20:28 -0400133 class ScopedGenerator;
134
135 /**
136 * On success (true), bitmap will point to the pixels for this generator. If this returns
137 * false, the bitmap will be reset to empty.
138 */
Brian Osmanc87cfb62018-07-11 09:08:46 -0400139 bool lockAsBitmap(SkBitmap*, SkImage::CachingHint, CachedFormat, const SkImageInfo&) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400140
141 sk_sp<SharedGenerator> fSharedGenerator;
Christopher Cameron77e96662017-07-08 01:47:47 -0700142 // Note that fInfo is not necessarily the info from the generator. It may be cropped by
143 // onMakeSubset and its color space may be changed by onMakeColorSpace.
Brian Osmandf7e0752017-04-26 16:20:28 -0400144 const SkImageInfo fInfo;
145 const SkIPoint fOrigin;
146
147 struct IDRec {
148 SkOnce fOnce;
149 uint32_t fUniqueID;
150 };
151 mutable IDRec fIDRecs[kNumCachedFormats];
152
153 uint32_t getUniqueID(CachedFormat) const;
reed85d91782015-09-10 14:33:38 -0700154
Christopher Camerond4b67872017-07-13 15:18:08 -0700155 // Repeated calls to onMakeColorSpace will result in a proliferation of unique IDs and
156 // SkImage_Lazy instances. Cache the result of the last successful onMakeColorSpace call.
157 mutable SkMutex fOnMakeColorSpaceMutex;
158 mutable sk_sp<SkColorSpace> fOnMakeColorSpaceTarget;
159 mutable sk_sp<SkImage> fOnMakeColorSpaceResult;
160
reed85d91782015-09-10 14:33:38 -0700161 typedef SkImage_Base INHERITED;
162};
163
164///////////////////////////////////////////////////////////////////////////////
165
Christopher Cameron77e96662017-07-08 01:47:47 -0700166SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
167 sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -0400168 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400169 if (!fSharedGenerator) {
170 return;
171 }
172
173 // The following generator accessors are safe without acquiring the mutex (const getters).
174 // TODO: refactor to use a ScopedGenerator instead, for clarity.
175 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
176 if (info.isEmpty()) {
177 fSharedGenerator.reset();
178 return;
179 }
180
181 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
182 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
183 if (subset) {
184 if (!bounds.contains(*subset)) {
185 fSharedGenerator.reset();
186 return;
187 }
188 if (*subset != bounds) {
189 // we need a different uniqueID since we really are a subset of the raw generator
190 fUniqueID = SkNextID::ImageID();
191 }
192 } else {
193 subset = &bounds;
194 }
195
196 fInfo = info.makeWH(subset->width(), subset->height());
197 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700198 if (colorSpace) {
199 fInfo = fInfo.makeColorSpace(colorSpace);
200 fUniqueID = SkNextID::ImageID();
201 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400202}
203
204///////////////////////////////////////////////////////////////////////////////
205
206// Helper for exclusive access to a shared generator.
207class SkImage_Lazy::ScopedGenerator {
208public:
209 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
210 : fSharedGenerator(gen)
211 , fAutoAquire(gen->fMutex) {}
212
213 SkImageGenerator* operator->() const {
214 fSharedGenerator->fMutex.assertHeld();
215 return fSharedGenerator->fGenerator.get();
216 }
217
218 operator SkImageGenerator*() const {
219 fSharedGenerator->fMutex.assertHeld();
220 return fSharedGenerator->fGenerator.get();
221 }
222
223private:
224 const sk_sp<SharedGenerator>& fSharedGenerator;
225 SkAutoExclusive fAutoAquire;
226};
227
228///////////////////////////////////////////////////////////////////////////////
229
230SkImage_Lazy::SkImage_Lazy(Validator* validator)
231 : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID)
232 , fSharedGenerator(std::move(validator->fSharedGenerator))
233 , fInfo(validator->fInfo)
234 , fOrigin(validator->fOrigin) {
235 SkASSERT(fSharedGenerator);
Brian Osmandf7e0752017-04-26 16:20:28 -0400236 // We explicit set the legacy format slot, but leave the others uninitialized (via SkOnce)
237 // and only resolove them to IDs as needed (by calling getUniqueID()).
238 fIDRecs[kLegacy_CachedFormat].fOnce([this, validator] {
239 fIDRecs[kLegacy_CachedFormat].fUniqueID = validator->fUniqueID;
240 });
241}
242
243uint32_t SkImage_Lazy::getUniqueID(CachedFormat format) const {
244 IDRec* rec = &fIDRecs[format];
245 rec->fOnce([rec] {
246 rec->fUniqueID = SkNextID::ImageID();
247 });
248 return rec->fUniqueID;
249}
250
251//////////////////////////////////////////////////////////////////////////////////////////////////
252
Brian Osmandf7e0752017-04-26 16:20:28 -0400253SkImageInfo SkImage_Lazy::buildCacheInfo(CachedFormat format) const {
Brian Osmanbfc33e52018-06-27 14:24:11 -0400254 if (kGray_8_SkColorType == fInfo.colorType()) {
255 return fInfo.makeColorSpace(nullptr);
256 } else {
257 return fInfo;
258 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400259}
260
261//////////////////////////////////////////////////////////////////////////////////////////////////
262
263static bool check_output_bitmap(const SkBitmap& bitmap, uint32_t expectedID) {
264 SkASSERT(bitmap.getGenerationID() == expectedID);
265 SkASSERT(bitmap.isImmutable());
266 SkASSERT(bitmap.getPixels());
267 return true;
268}
269
270bool SkImage_Lazy::directGeneratePixels(const SkImageInfo& info, void* pixels, size_t rb,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400271 int srcX, int srcY) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400272 ScopedGenerator generator(fSharedGenerator);
273 const SkImageInfo& genInfo = generator->getInfo();
274 // Currently generators do not natively handle subsets, so check that first.
275 if (srcX || srcY || genInfo.width() != info.width() || genInfo.height() != info.height()) {
276 return false;
277 }
278
Brian Osmanc87cfb62018-07-11 09:08:46 -0400279 return generator->getPixels(info, pixels, rb);
Brian Osmandf7e0752017-04-26 16:20:28 -0400280}
281
282//////////////////////////////////////////////////////////////////////////////////////////////////
283
284bool SkImage_Lazy::lockAsBitmapOnlyIfAlreadyCached(SkBitmap* bitmap, CachedFormat format) const {
285 uint32_t uniqueID = this->getUniqueID(format);
286 return SkBitmapCache::Find(SkBitmapCacheDesc::Make(uniqueID,
287 fInfo.width(), fInfo.height()), bitmap) &&
288 check_output_bitmap(*bitmap, uniqueID);
289}
290
Brian Osmanc87cfb62018-07-11 09:08:46 -0400291static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400292 const int genW = gen->getInfo().width();
293 const int genH = gen->getInfo().height();
294 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
295 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
296 if (!srcR.contains(dstR)) {
297 return false;
298 }
299
300 // If they are requesting a subset, we have to have a temp allocation for full image, and
301 // then copy the subset into their allocation
302 SkBitmap full;
303 SkPixmap fullPM;
304 const SkPixmap* dstPM = &pmap;
305 if (srcR != dstR) {
306 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
307 return false;
308 }
309 if (!full.peekPixels(&fullPM)) {
310 return false;
311 }
312 dstPM = &fullPM;
313 }
314
Brian Osmanc87cfb62018-07-11 09:08:46 -0400315 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400316 return false;
317 }
318
319 if (srcR != dstR) {
320 if (!full.readPixels(pmap, originX, originY)) {
321 return false;
322 }
323 }
324 return true;
325}
326
Christopher Cameron77e96662017-07-08 01:47:47 -0700327bool SkImage_Lazy::lockAsBitmap(SkBitmap* bitmap, SkImage::CachingHint chint, CachedFormat format,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400328 const SkImageInfo& info) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400329 if (this->lockAsBitmapOnlyIfAlreadyCached(bitmap, format)) {
330 return true;
331 }
332
333 uint32_t uniqueID = this->getUniqueID(format);
334
335 SkBitmap tmpBitmap;
336 SkBitmapCache::RecPtr cacheRec;
337 SkPixmap pmap;
338 if (SkImage::kAllow_CachingHint == chint) {
339 auto desc = SkBitmapCacheDesc::Make(uniqueID, info.width(), info.height());
340 cacheRec = SkBitmapCache::Alloc(desc, info, &pmap);
341 if (!cacheRec) {
342 return false;
343 }
344 } else {
345 if (!tmpBitmap.tryAllocPixels(info)) {
346 return false;
347 }
348 if (!tmpBitmap.peekPixels(&pmap)) {
349 return false;
350 }
351 }
352
353 ScopedGenerator generator(fSharedGenerator);
Brian Osmanc87cfb62018-07-11 09:08:46 -0400354 if (!generate_pixels(generator, pmap, fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400355 return false;
356 }
357
358 if (cacheRec) {
359 SkBitmapCache::Add(std::move(cacheRec), bitmap);
360 SkASSERT(bitmap->getPixels()); // we're locked
361 SkASSERT(bitmap->isImmutable());
362 SkASSERT(bitmap->getGenerationID() == uniqueID);
363 this->notifyAddedToCache();
364 } else {
365 *bitmap = tmpBitmap;
366 bitmap->pixelRef()->setImmutableWithID(uniqueID);
367 }
368
369 check_output_bitmap(*bitmap, uniqueID);
370 return true;
371}
372
373//////////////////////////////////////////////////////////////////////////////////////////////////
374
Brian Osmanf1b43822017-04-20 13:43:23 -0400375bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
376 int srcX, int srcY, CachingHint chint) const {
Brian Osman61624f02016-12-09 14:51:59 -0500377 SkColorSpace* dstColorSpace = dstInfo.colorSpace();
reed85d91782015-09-10 14:33:38 -0700378 SkBitmap bm;
reed6868c3f2015-11-24 11:44:47 -0800379 if (kDisallow_CachingHint == chint) {
Brian Osmanc87cfb62018-07-11 09:08:46 -0400380 CachedFormat cacheFormat = kLegacy_CachedFormat;
Brian Osmandf7e0752017-04-26 16:20:28 -0400381 if (this->lockAsBitmapOnlyIfAlreadyCached(&bm, cacheFormat)) {
reed6868c3f2015-11-24 11:44:47 -0800382 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
383 } else {
384 // Try passing the caller's buffer directly down to the generator. If this fails we
385 // may still succeed in the general case, as the generator may prefer some other
386 // config, which we could then convert via SkBitmap::readPixels.
Brian Osmanc87cfb62018-07-11 09:08:46 -0400387 if (this->directGeneratePixels(dstInfo, dstPixels, dstRB, srcX, srcY)) {
reed6868c3f2015-11-24 11:44:47 -0800388 return true;
389 }
390 // else fall through
391 }
392 }
393
Brian Osman61624f02016-12-09 14:51:59 -0500394 if (this->getROPixels(&bm, dstColorSpace, chint)) {
reed85d91782015-09-10 14:33:38 -0700395 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
396 }
397 return false;
398}
399
Ben Wagnerbdf54332018-05-15 14:12:14 -0400400sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
401 ScopedGenerator generator(fSharedGenerator);
402 return generator->refEncodedData();
403}
reed85d91782015-09-10 14:33:38 -0700404
Brian Osmanf1b43822017-04-20 13:43:23 -0400405bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkColorSpace* dstColorSpace,
406 CachingHint chint) const {
Brian Osmanc87cfb62018-07-11 09:08:46 -0400407 CachedFormat cacheFormat = kLegacy_CachedFormat;
Christopher Cameron77e96662017-07-08 01:47:47 -0700408 const SkImageInfo cacheInfo = this->buildCacheInfo(cacheFormat);
Brian Osmanc87cfb62018-07-11 09:08:46 -0400409 return this->lockAsBitmap(bitmap, chint, cacheFormat, cacheInfo);
reed85d91782015-09-10 14:33:38 -0700410}
411
Brian Osman5bbd0762017-05-08 11:07:42 -0400412bool SkImage_Lazy::onIsValid(GrContext* context) const {
413 ScopedGenerator generator(fSharedGenerator);
414 return generator->isValid(context);
415}
416
Mike Reed7f1d0202017-05-08 16:13:39 -0400417bool SkImage_Lazy::onCanLazyGenerateOnGPU() const {
418#if SK_SUPPORT_GPU
419 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400420 return SkImageGenerator::TexGenType::kNone != generator->onCanGenerateTexture();
Mike Reed7f1d0202017-05-08 16:13:39 -0400421#else
422 return false;
423#endif
424}
425
Brian Osmandf7e0752017-04-26 16:20:28 -0400426///////////////////////////////////////////////////////////////////////////////////////////////////
427
Robert Phillipsb726d582017-03-09 16:36:32 -0500428#if SK_SUPPORT_GPU
Brian Osmanf1b43822017-04-20 13:43:23 -0400429sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400430 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400431 SkColorSpace* dstColorSpace,
432 sk_sp<SkColorSpace>* texColorSpace,
433 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400434 if (!context) {
435 return nullptr;
436 }
437
438 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
439 return textureMaker.refTextureProxyForParams(params, dstColorSpace, texColorSpace, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500440}
441#endif
442
Brian Osmanf1b43822017-04-20 13:43:23 -0400443sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400444 SkASSERT(fInfo.bounds().contains(subset));
445 SkASSERT(fInfo.bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700446
Brian Osmandf7e0752017-04-26 16:20:28 -0400447 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700448 Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400449 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700450}
451
Matt Sarett9f3dcb32017-05-04 08:53:32 -0400452sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target,
Brian Osmanb62f50c2018-07-12 14:44:27 -0400453 SkColorType targetColorType) const {
Christopher Camerond4b67872017-07-13 15:18:08 -0700454 SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex);
455 if (target && fOnMakeColorSpaceTarget &&
456 SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) {
457 return fOnMakeColorSpaceResult;
458 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700459 const SkIRect generatorSubset =
460 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
461 Validator validator(fSharedGenerator, &generatorSubset, target);
Christopher Camerond4b67872017-07-13 15:18:08 -0700462 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
463 if (result) {
464 fOnMakeColorSpaceTarget = target;
465 fOnMakeColorSpaceResult = result;
466 }
467 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400468}
469
Mike Reed185130c2017-02-15 15:14:16 -0500470sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
471 const SkIRect* subset) {
Christopher Cameron77e96662017-07-08 01:47:47 -0700472 SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700473
Brian Osmanf1b43822017-04-20 13:43:23 -0400474 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700475}
Brian Osmandf7e0752017-04-26 16:20:28 -0400476
477//////////////////////////////////////////////////////////////////////////////////////////////////
478
479/**
480 * Implementation of SkImageCacherator interface, as needed by GrImageTextureMaker
481 */
482
483#if SK_SUPPORT_GPU
484
485void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, CachedFormat format,
486 GrUniqueKey* cacheKey) {
487 SkASSERT(!cacheKey->isValid());
488 if (origKey.isValid()) {
489 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
Derek Sollenbergere1c60d62018-04-04 11:53:35 -0400490 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 1, "Image");
Brian Osmandf7e0752017-04-26 16:20:28 -0400491 builder[0] = format;
492 }
493}
494
495class Generator_GrYUVProvider : public GrYUVProvider {
496 SkImageGenerator* fGen;
497
498public:
499 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
500
501 uint32_t onGetID() override { return fGen->uniqueID(); }
502 bool onQueryYUV8(SkYUVSizeInfo* sizeInfo, SkYUVColorSpace* colorSpace) const override {
503 return fGen->queryYUV8(sizeInfo, colorSpace);
504 }
505 bool onGetYUV8Planes(const SkYUVSizeInfo& sizeInfo, void* planes[3]) override {
506 return fGen->getYUV8Planes(sizeInfo, planes);
507 }
508};
509
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500510static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000511 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
512 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400513 if (key.isValid()) {
Robert Phillips8a90f502017-07-24 15:09:56 -0400514 SkASSERT(proxy->origin() == kTopLeft_GrSurfaceOrigin);
Greg Danielf6f7b672018-02-15 13:06:26 -0500515 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
516 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400517 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
518 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500519 // If we had an originalProxy with a valid key, that means there already is a proxy in
520 // the cache which matches the key, but it does not have mip levels and we require them.
521 // Thus we must remove the unique key from that proxy.
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500522 proxyProvider->removeUniqueKeyFromProxy(key, originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000523 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500524 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400525 }
526}
527
528sk_sp<SkColorSpace> SkImage_Lazy::getColorSpace(GrContext* ctx, SkColorSpace* dstColorSpace) {
Brian Osmana8ac9242017-09-07 10:19:08 -0400529 if (!dstColorSpace) {
530 // In legacy mode, we do no modification to the image's color space or encoding.
531 // Subsequent legacy drawing is likely to ignore the color space, but some clients
532 // may want to know what space the image data is in, so return it.
533 return fInfo.refColorSpace();
534 } else {
Brian Osmanc87cfb62018-07-11 09:08:46 -0400535 CachedFormat format = kLegacy_CachedFormat;
Brian Osmana8ac9242017-09-07 10:19:08 -0400536 SkImageInfo cacheInfo = this->buildCacheInfo(format);
537 return cacheInfo.refColorSpace();
538 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400539}
540
541/*
542 * We have 4 ways to try to return a texture (in sorted order)
543 *
544 * 1. Check the cache for a pre-existing one
545 * 2. Ask the generator to natively create one
546 * 3. Ask the generator to return YUV planes, which the GPU can convert
547 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
548 */
549sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(GrContext* ctx,
550 const GrUniqueKey& origKey,
551 SkImage::CachingHint chint,
552 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400553 SkColorSpace* dstColorSpace,
554 GrTextureMaker::AllowedTexGenType genType) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400555 // Values representing the various texture lock paths we can take. Used for logging the path
556 // taken to a histogram.
557 enum LockTexturePath {
558 kFailure_LockTexturePath,
559 kPreExisting_LockTexturePath,
560 kNative_LockTexturePath,
561 kCompressed_LockTexturePath, // Deprecated
562 kYUV_LockTexturePath,
563 kRGBA_LockTexturePath,
564 };
565
566 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
567
568 // Determine which cached format we're going to use (which may involve decoding to a different
569 // info than the generator provides).
Brian Osmanc87cfb62018-07-11 09:08:46 -0400570 CachedFormat format = kLegacy_CachedFormat;
Brian Osmandf7e0752017-04-26 16:20:28 -0400571
572 // Fold the cache format into our texture key
573 GrUniqueKey key;
574 this->makeCacheKeyFromOrigKey(origKey, format, &key);
575
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500576 GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000577 sk_sp<GrTextureProxy> proxy;
578
Brian Osmandf7e0752017-04-26 16:20:28 -0400579 // 1. Check the cache for a pre-existing one
580 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500581 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000582 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400583 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
584 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400585 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000586 return proxy;
587 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400588 }
589 }
590
591 // The CachedFormat is both an index for which cache "slot" we'll use to store this particular
592 // decoded variant of the encoded data, and also a recipe for how to transform the original
593 // info to get the one that we're going to decode to.
Christopher Cameron77e96662017-07-08 01:47:47 -0700594 const SkImageInfo cacheInfo = this->buildCacheInfo(format);
Brian Osmandf7e0752017-04-26 16:20:28 -0400595
596 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000597 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400598 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400599 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
600 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
601 return nullptr;
602 }
Brian Osmanc87cfb62018-07-11 09:08:46 -0400603 if ((proxy = generator->generateTexture(ctx, cacheInfo, fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400604 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
605 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500606 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400607 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000608 return proxy;
609 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400610 }
611 }
612
Greg Daniel3e70fa32017-10-05 16:27:06 -0400613 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
614 // the texture we fall through here and have the CPU generate the mip maps for us.
615 if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400616 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(cacheInfo);
Brian Osmandf7e0752017-04-26 16:20:28 -0400617 ScopedGenerator generator(fSharedGenerator);
618 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700619
620 // The pixels in the texture will be in the generator's color space. If onMakeColorSpace
621 // has been called then this will not match this image's color space. To correct this, apply
622 // a color space conversion from the generator's color space to this image's color space.
Brian Osman56893cd2018-06-08 14:11:37 -0400623 // Note that we can only do this conversion (on the GPU) if both color spaces are XYZ type.
Brian Osman861ea5b2018-06-14 09:14:03 -0400624 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
625 SkColorSpace* thisColorSpace = fInfo.colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700626
Brian Osman56893cd2018-06-08 14:11:37 -0400627 if ((!generatorColorSpace || generatorColorSpace->toXYZD50()) &&
628 (!thisColorSpace || thisColorSpace->toXYZD50())) {
629 // TODO: Update to create the mipped surface in the YUV generator and draw the base
630 // layer directly into the mipped surface.
631 proxy = provider.refAsTextureProxy(ctx, desc, generatorColorSpace, thisColorSpace);
632 if (proxy) {
633 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
634 kLockTexturePathCount);
635 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
636 return proxy;
637 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400638 }
639 }
640
641 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
642 SkBitmap bitmap;
Brian Osmanc87cfb62018-07-11 09:08:46 -0400643 if (!proxy && this->lockAsBitmap(&bitmap, chint, format, cacheInfo)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400644 if (willBeMipped) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400645 proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400646 }
647 if (!proxy) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400648 proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400649 }
Greg Daniele252f082017-10-23 16:05:23 -0400650 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400651 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
652 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500653 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400654 return proxy;
655 }
656 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000657
658 if (proxy) {
659 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
660 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
661 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
662 // generate the rest of the mips.
663 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400664 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniele1da1d92017-10-06 15:59:27 -0400665 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500666 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000667 return mippedProxy;
668 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400669 // We failed to make a mipped proxy with the base copied into it. This could have
670 // been from failure to make the proxy or failure to do the copy. Thus we will fall
671 // back to just using the non mipped proxy; See skbug.com/7094.
672 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000673 }
674
Brian Osmandf7e0752017-04-26 16:20:28 -0400675 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
676 kLockTexturePathCount);
677 return nullptr;
678}
679
680///////////////////////////////////////////////////////////////////////////////////////////////////
681
682#endif