blob: 8d8e95219dbae0aff4cb8cfc885a82dbeefe1b28 [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkImage_Base.h"
reed85d91782015-09-10 14:33:38 -07009#include "SkImageCacherator.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040010
11#include "SkBitmap.h"
12#include "SkBitmapCache.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040013#include "SkData.h"
14#include "SkImageGenerator.h"
reed85d91782015-09-10 14:33:38 -070015#include "SkImagePriv.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040016#include "SkNextID.h"
reed85d91782015-09-10 14:33:38 -070017#include "SkPixelRef.h"
reed85d91782015-09-10 14:33:38 -070018
Brian Osmandf7e0752017-04-26 16:20:28 -040019#if SK_SUPPORT_GPU
20#include "GrContext.h"
21#include "GrContextPriv.h"
22#include "GrGpuResourcePriv.h"
23#include "GrImageTextureMaker.h"
24#include "GrResourceKey.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050025#include "GrProxyProvider.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040026#include "GrSamplerState.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040027#include "GrYUVProvider.h"
28#include "SkGr.h"
29#endif
reed85d91782015-09-10 14:33:38 -070030
Brian Osmandf7e0752017-04-26 16:20:28 -040031// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
32class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
33public:
34 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
35 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
36 }
37
Matt Sarettb2004f72017-05-18 09:26:50 -040038 // This is thread safe. It is a const field set in the constructor.
39 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
40
Brian Osmandf7e0752017-04-26 16:20:28 -040041private:
42 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
43 : fGenerator(std::move(gen)) {
44 SkASSERT(fGenerator);
45 }
46
47 friend class ScopedGenerator;
48 friend class SkImage_Lazy;
49
50 std::unique_ptr<SkImageGenerator> fGenerator;
51 SkMutex fMutex;
52};
53
54class SkImage_Lazy : public SkImage_Base, public SkImageCacherator {
55public:
56 struct Validator {
Christopher Cameron77e96662017-07-08 01:47:47 -070057 Validator(sk_sp<SharedGenerator>, const SkIRect* subset, sk_sp<SkColorSpace> colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -040058
59 operator bool() const { return fSharedGenerator.get(); }
60
61 sk_sp<SharedGenerator> fSharedGenerator;
62 SkImageInfo fInfo;
63 SkIPoint fOrigin;
Christopher Cameron77e96662017-07-08 01:47:47 -070064 sk_sp<SkColorSpace> fColorSpace;
Brian Osmandf7e0752017-04-26 16:20:28 -040065 uint32_t fUniqueID;
66 };
67
68 SkImage_Lazy(Validator* validator);
69
70 SkImageInfo onImageInfo() const override {
71 return fInfo;
herba7c9d632016-04-19 12:30:22 -070072 }
Greg Daniel56008aa2018-03-14 15:33:42 -040073 SkColorType onColorType() const override {
74 return kUnknown_SkColorType;
75 }
brianosman69c166d2016-08-17 14:01:05 -070076 SkAlphaType onAlphaType() const override {
Brian Osmandf7e0752017-04-26 16:20:28 -040077 return fInfo.alphaType();
brianosman69c166d2016-08-17 14:01:05 -070078 }
herba7c9d632016-04-19 12:30:22 -070079
Mike Reedf2c73642018-05-29 15:41:27 -040080 SkIRect onGetSubset() const override {
81 return SkIRect::MakeXYWH(fOrigin.fX, fOrigin.fY, fInfo.width(), fInfo.height());
82 }
83
Robert Phillipsb726d582017-03-09 16:36:32 -050084 bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY,
85 CachingHint) const override;
86#if SK_SUPPORT_GPU
Brian Salomon2bbdcc42017-09-07 12:36:34 -040087 sk_sp<GrTextureProxy> asTextureProxyRef(GrContext*,
88 const GrSamplerState&, SkColorSpace*,
89 sk_sp<SkColorSpace>*,
Robert Phillipsb726d582017-03-09 16:36:32 -050090 SkScalar scaleAdjust[2]) const override;
91#endif
Ben Wagnerbdf54332018-05-15 14:12:14 -040092 sk_sp<SkData> onRefEncoded() const override;
reed7fb4f8b2016-03-11 04:33:52 -080093 sk_sp<SkImage> onMakeSubset(const SkIRect&) const override;
Brian Osman61624f02016-12-09 14:51:59 -050094 bool getROPixels(SkBitmap*, SkColorSpace* dstColorSpace, CachingHint) const override;
reed85d91782015-09-10 14:33:38 -070095 bool onIsLazyGenerated() const override { return true; }
Mike Reed7f1d0202017-05-08 16:13:39 -040096 bool onCanLazyGenerateOnGPU() const override;
Matt Sarett9f3dcb32017-05-04 08:53:32 -040097 sk_sp<SkImage> onMakeColorSpace(sk_sp<SkColorSpace>, SkColorType,
98 SkTransferFunctionBehavior) const override;
reed85d91782015-09-10 14:33:38 -070099
Brian Osman5bbd0762017-05-08 11:07:42 -0400100 bool onIsValid(GrContext*) const override;
101
Brian Osmandf7e0752017-04-26 16:20:28 -0400102 SkImageCacherator* peekCacherator() const override {
103 return const_cast<SkImage_Lazy*>(this);
104 }
105
106 // Only return true if the generate has already been cached.
107 bool lockAsBitmapOnlyIfAlreadyCached(SkBitmap*, CachedFormat) const;
108 // Call the underlying generator directly
109 bool directGeneratePixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400110 int srcX, int srcY) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400111
112 // SkImageCacherator interface
113#if SK_SUPPORT_GPU
114 // Returns the texture proxy. If the cacherator is generating the texture and wants to cache it,
115 // it should use the passed in key (if the key is valid).
116 sk_sp<GrTextureProxy> lockTextureProxy(GrContext*,
117 const GrUniqueKey& key,
118 SkImage::CachingHint,
119 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400120 SkColorSpace* dstColorSpace,
121 GrTextureMaker::AllowedTexGenType genType) override;
Brian Osmandf7e0752017-04-26 16:20:28 -0400122
123 // Returns the color space of the texture that would be returned if you called lockTexture.
124 // Separate code path to allow querying of the color space for textures that cached (even
125 // externally).
126 sk_sp<SkColorSpace> getColorSpace(GrContext*, SkColorSpace* dstColorSpace) override;
127 void makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, CachedFormat,
128 GrUniqueKey* cacheKey) override;
129#endif
130
Brian Osmandf7e0752017-04-26 16:20:28 -0400131 SkImageInfo buildCacheInfo(CachedFormat) const override;
132
reed85d91782015-09-10 14:33:38 -0700133private:
Brian Osmandf7e0752017-04-26 16:20:28 -0400134 class ScopedGenerator;
135
136 /**
137 * On success (true), bitmap will point to the pixels for this generator. If this returns
138 * false, the bitmap will be reset to empty.
139 */
Brian Osmanc87cfb62018-07-11 09:08:46 -0400140 bool lockAsBitmap(SkBitmap*, SkImage::CachingHint, CachedFormat, const SkImageInfo&) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400141
142 sk_sp<SharedGenerator> fSharedGenerator;
Christopher Cameron77e96662017-07-08 01:47:47 -0700143 // Note that fInfo is not necessarily the info from the generator. It may be cropped by
144 // onMakeSubset and its color space may be changed by onMakeColorSpace.
Brian Osmandf7e0752017-04-26 16:20:28 -0400145 const SkImageInfo fInfo;
146 const SkIPoint fOrigin;
147
148 struct IDRec {
149 SkOnce fOnce;
150 uint32_t fUniqueID;
151 };
152 mutable IDRec fIDRecs[kNumCachedFormats];
153
154 uint32_t getUniqueID(CachedFormat) const;
reed85d91782015-09-10 14:33:38 -0700155
Christopher Camerond4b67872017-07-13 15:18:08 -0700156 // Repeated calls to onMakeColorSpace will result in a proliferation of unique IDs and
157 // SkImage_Lazy instances. Cache the result of the last successful onMakeColorSpace call.
158 mutable SkMutex fOnMakeColorSpaceMutex;
159 mutable sk_sp<SkColorSpace> fOnMakeColorSpaceTarget;
160 mutable sk_sp<SkImage> fOnMakeColorSpaceResult;
161
reed85d91782015-09-10 14:33:38 -0700162 typedef SkImage_Base INHERITED;
163};
164
165///////////////////////////////////////////////////////////////////////////////
166
Christopher Cameron77e96662017-07-08 01:47:47 -0700167SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
168 sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -0400169 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400170 if (!fSharedGenerator) {
171 return;
172 }
173
174 // The following generator accessors are safe without acquiring the mutex (const getters).
175 // TODO: refactor to use a ScopedGenerator instead, for clarity.
176 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
177 if (info.isEmpty()) {
178 fSharedGenerator.reset();
179 return;
180 }
181
182 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
183 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
184 if (subset) {
185 if (!bounds.contains(*subset)) {
186 fSharedGenerator.reset();
187 return;
188 }
189 if (*subset != bounds) {
190 // we need a different uniqueID since we really are a subset of the raw generator
191 fUniqueID = SkNextID::ImageID();
192 }
193 } else {
194 subset = &bounds;
195 }
196
197 fInfo = info.makeWH(subset->width(), subset->height());
198 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700199 if (colorSpace) {
200 fInfo = fInfo.makeColorSpace(colorSpace);
201 fUniqueID = SkNextID::ImageID();
202 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400203}
204
205///////////////////////////////////////////////////////////////////////////////
206
207// Helper for exclusive access to a shared generator.
208class SkImage_Lazy::ScopedGenerator {
209public:
210 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
211 : fSharedGenerator(gen)
212 , fAutoAquire(gen->fMutex) {}
213
214 SkImageGenerator* operator->() const {
215 fSharedGenerator->fMutex.assertHeld();
216 return fSharedGenerator->fGenerator.get();
217 }
218
219 operator SkImageGenerator*() const {
220 fSharedGenerator->fMutex.assertHeld();
221 return fSharedGenerator->fGenerator.get();
222 }
223
224private:
225 const sk_sp<SharedGenerator>& fSharedGenerator;
226 SkAutoExclusive fAutoAquire;
227};
228
229///////////////////////////////////////////////////////////////////////////////
230
231SkImage_Lazy::SkImage_Lazy(Validator* validator)
232 : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID)
233 , fSharedGenerator(std::move(validator->fSharedGenerator))
234 , fInfo(validator->fInfo)
235 , fOrigin(validator->fOrigin) {
236 SkASSERT(fSharedGenerator);
Brian Osmandf7e0752017-04-26 16:20:28 -0400237 // We explicit set the legacy format slot, but leave the others uninitialized (via SkOnce)
238 // and only resolove them to IDs as needed (by calling getUniqueID()).
239 fIDRecs[kLegacy_CachedFormat].fOnce([this, validator] {
240 fIDRecs[kLegacy_CachedFormat].fUniqueID = validator->fUniqueID;
241 });
242}
243
244uint32_t SkImage_Lazy::getUniqueID(CachedFormat format) const {
245 IDRec* rec = &fIDRecs[format];
246 rec->fOnce([rec] {
247 rec->fUniqueID = SkNextID::ImageID();
248 });
249 return rec->fUniqueID;
250}
251
252//////////////////////////////////////////////////////////////////////////////////////////////////
253
Brian Osmandf7e0752017-04-26 16:20:28 -0400254SkImageInfo SkImage_Lazy::buildCacheInfo(CachedFormat format) const {
Brian Osmanbfc33e52018-06-27 14:24:11 -0400255 if (kGray_8_SkColorType == fInfo.colorType()) {
256 return fInfo.makeColorSpace(nullptr);
257 } else {
258 return fInfo;
259 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400260}
261
262//////////////////////////////////////////////////////////////////////////////////////////////////
263
264static bool check_output_bitmap(const SkBitmap& bitmap, uint32_t expectedID) {
265 SkASSERT(bitmap.getGenerationID() == expectedID);
266 SkASSERT(bitmap.isImmutable());
267 SkASSERT(bitmap.getPixels());
268 return true;
269}
270
271bool SkImage_Lazy::directGeneratePixels(const SkImageInfo& info, void* pixels, size_t rb,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400272 int srcX, int srcY) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400273 ScopedGenerator generator(fSharedGenerator);
274 const SkImageInfo& genInfo = generator->getInfo();
275 // Currently generators do not natively handle subsets, so check that first.
276 if (srcX || srcY || genInfo.width() != info.width() || genInfo.height() != info.height()) {
277 return false;
278 }
279
Brian Osmanc87cfb62018-07-11 09:08:46 -0400280 return generator->getPixels(info, pixels, rb);
Brian Osmandf7e0752017-04-26 16:20:28 -0400281}
282
283//////////////////////////////////////////////////////////////////////////////////////////////////
284
285bool SkImage_Lazy::lockAsBitmapOnlyIfAlreadyCached(SkBitmap* bitmap, CachedFormat format) const {
286 uint32_t uniqueID = this->getUniqueID(format);
287 return SkBitmapCache::Find(SkBitmapCacheDesc::Make(uniqueID,
288 fInfo.width(), fInfo.height()), bitmap) &&
289 check_output_bitmap(*bitmap, uniqueID);
290}
291
Brian Osmanc87cfb62018-07-11 09:08:46 -0400292static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400293 const int genW = gen->getInfo().width();
294 const int genH = gen->getInfo().height();
295 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
296 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
297 if (!srcR.contains(dstR)) {
298 return false;
299 }
300
301 // If they are requesting a subset, we have to have a temp allocation for full image, and
302 // then copy the subset into their allocation
303 SkBitmap full;
304 SkPixmap fullPM;
305 const SkPixmap* dstPM = &pmap;
306 if (srcR != dstR) {
307 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
308 return false;
309 }
310 if (!full.peekPixels(&fullPM)) {
311 return false;
312 }
313 dstPM = &fullPM;
314 }
315
Brian Osmanc87cfb62018-07-11 09:08:46 -0400316 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400317 return false;
318 }
319
320 if (srcR != dstR) {
321 if (!full.readPixels(pmap, originX, originY)) {
322 return false;
323 }
324 }
325 return true;
326}
327
Christopher Cameron77e96662017-07-08 01:47:47 -0700328bool SkImage_Lazy::lockAsBitmap(SkBitmap* bitmap, SkImage::CachingHint chint, CachedFormat format,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400329 const SkImageInfo& info) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400330 if (this->lockAsBitmapOnlyIfAlreadyCached(bitmap, format)) {
331 return true;
332 }
333
334 uint32_t uniqueID = this->getUniqueID(format);
335
336 SkBitmap tmpBitmap;
337 SkBitmapCache::RecPtr cacheRec;
338 SkPixmap pmap;
339 if (SkImage::kAllow_CachingHint == chint) {
340 auto desc = SkBitmapCacheDesc::Make(uniqueID, info.width(), info.height());
341 cacheRec = SkBitmapCache::Alloc(desc, info, &pmap);
342 if (!cacheRec) {
343 return false;
344 }
345 } else {
346 if (!tmpBitmap.tryAllocPixels(info)) {
347 return false;
348 }
349 if (!tmpBitmap.peekPixels(&pmap)) {
350 return false;
351 }
352 }
353
354 ScopedGenerator generator(fSharedGenerator);
Brian Osmanc87cfb62018-07-11 09:08:46 -0400355 if (!generate_pixels(generator, pmap, fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400356 return false;
357 }
358
359 if (cacheRec) {
360 SkBitmapCache::Add(std::move(cacheRec), bitmap);
361 SkASSERT(bitmap->getPixels()); // we're locked
362 SkASSERT(bitmap->isImmutable());
363 SkASSERT(bitmap->getGenerationID() == uniqueID);
364 this->notifyAddedToCache();
365 } else {
366 *bitmap = tmpBitmap;
367 bitmap->pixelRef()->setImmutableWithID(uniqueID);
368 }
369
370 check_output_bitmap(*bitmap, uniqueID);
371 return true;
372}
373
374//////////////////////////////////////////////////////////////////////////////////////////////////
375
Brian Osmanf1b43822017-04-20 13:43:23 -0400376bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
377 int srcX, int srcY, CachingHint chint) const {
Brian Osman61624f02016-12-09 14:51:59 -0500378 SkColorSpace* dstColorSpace = dstInfo.colorSpace();
reed85d91782015-09-10 14:33:38 -0700379 SkBitmap bm;
reed6868c3f2015-11-24 11:44:47 -0800380 if (kDisallow_CachingHint == chint) {
Brian Osmanc87cfb62018-07-11 09:08:46 -0400381 CachedFormat cacheFormat = kLegacy_CachedFormat;
Brian Osmandf7e0752017-04-26 16:20:28 -0400382 if (this->lockAsBitmapOnlyIfAlreadyCached(&bm, cacheFormat)) {
reed6868c3f2015-11-24 11:44:47 -0800383 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
384 } else {
385 // Try passing the caller's buffer directly down to the generator. If this fails we
386 // may still succeed in the general case, as the generator may prefer some other
387 // config, which we could then convert via SkBitmap::readPixels.
Brian Osmanc87cfb62018-07-11 09:08:46 -0400388 if (this->directGeneratePixels(dstInfo, dstPixels, dstRB, srcX, srcY)) {
reed6868c3f2015-11-24 11:44:47 -0800389 return true;
390 }
391 // else fall through
392 }
393 }
394
Brian Osman61624f02016-12-09 14:51:59 -0500395 if (this->getROPixels(&bm, dstColorSpace, chint)) {
reed85d91782015-09-10 14:33:38 -0700396 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
397 }
398 return false;
399}
400
Ben Wagnerbdf54332018-05-15 14:12:14 -0400401sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
402 ScopedGenerator generator(fSharedGenerator);
403 return generator->refEncodedData();
404}
reed85d91782015-09-10 14:33:38 -0700405
Brian Osmanf1b43822017-04-20 13:43:23 -0400406bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkColorSpace* dstColorSpace,
407 CachingHint chint) const {
Brian Osmanc87cfb62018-07-11 09:08:46 -0400408 CachedFormat cacheFormat = kLegacy_CachedFormat;
Christopher Cameron77e96662017-07-08 01:47:47 -0700409 const SkImageInfo cacheInfo = this->buildCacheInfo(cacheFormat);
Brian Osmanc87cfb62018-07-11 09:08:46 -0400410 return this->lockAsBitmap(bitmap, chint, cacheFormat, cacheInfo);
reed85d91782015-09-10 14:33:38 -0700411}
412
Brian Osman5bbd0762017-05-08 11:07:42 -0400413bool SkImage_Lazy::onIsValid(GrContext* context) const {
414 ScopedGenerator generator(fSharedGenerator);
415 return generator->isValid(context);
416}
417
Mike Reed7f1d0202017-05-08 16:13:39 -0400418bool SkImage_Lazy::onCanLazyGenerateOnGPU() const {
419#if SK_SUPPORT_GPU
420 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400421 return SkImageGenerator::TexGenType::kNone != generator->onCanGenerateTexture();
Mike Reed7f1d0202017-05-08 16:13:39 -0400422#else
423 return false;
424#endif
425}
426
Brian Osmandf7e0752017-04-26 16:20:28 -0400427///////////////////////////////////////////////////////////////////////////////////////////////////
428
Robert Phillipsb726d582017-03-09 16:36:32 -0500429#if SK_SUPPORT_GPU
Brian Osmanf1b43822017-04-20 13:43:23 -0400430sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400431 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400432 SkColorSpace* dstColorSpace,
433 sk_sp<SkColorSpace>* texColorSpace,
434 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400435 if (!context) {
436 return nullptr;
437 }
438
439 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
440 return textureMaker.refTextureProxyForParams(params, dstColorSpace, texColorSpace, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500441}
442#endif
443
Brian Osmanf1b43822017-04-20 13:43:23 -0400444sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400445 SkASSERT(fInfo.bounds().contains(subset));
446 SkASSERT(fInfo.bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700447
Brian Osmandf7e0752017-04-26 16:20:28 -0400448 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700449 Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400450 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700451}
452
Matt Sarett9f3dcb32017-05-04 08:53:32 -0400453sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target,
454 SkColorType targetColorType,
455 SkTransferFunctionBehavior premulBehavior) const {
Christopher Camerond4b67872017-07-13 15:18:08 -0700456 SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex);
457 if (target && fOnMakeColorSpaceTarget &&
458 SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) {
459 return fOnMakeColorSpaceResult;
460 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700461 const SkIRect generatorSubset =
462 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
463 Validator validator(fSharedGenerator, &generatorSubset, target);
Christopher Camerond4b67872017-07-13 15:18:08 -0700464 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
465 if (result) {
466 fOnMakeColorSpaceTarget = target;
467 fOnMakeColorSpaceResult = result;
468 }
469 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400470}
471
Mike Reed185130c2017-02-15 15:14:16 -0500472sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
473 const SkIRect* subset) {
Christopher Cameron77e96662017-07-08 01:47:47 -0700474 SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700475
Brian Osmanf1b43822017-04-20 13:43:23 -0400476 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700477}
Brian Osmandf7e0752017-04-26 16:20:28 -0400478
479//////////////////////////////////////////////////////////////////////////////////////////////////
480
481/**
482 * Implementation of SkImageCacherator interface, as needed by GrImageTextureMaker
483 */
484
485#if SK_SUPPORT_GPU
486
487void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, CachedFormat format,
488 GrUniqueKey* cacheKey) {
489 SkASSERT(!cacheKey->isValid());
490 if (origKey.isValid()) {
491 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
Derek Sollenbergere1c60d62018-04-04 11:53:35 -0400492 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 1, "Image");
Brian Osmandf7e0752017-04-26 16:20:28 -0400493 builder[0] = format;
494 }
495}
496
497class Generator_GrYUVProvider : public GrYUVProvider {
498 SkImageGenerator* fGen;
499
500public:
501 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
502
503 uint32_t onGetID() override { return fGen->uniqueID(); }
504 bool onQueryYUV8(SkYUVSizeInfo* sizeInfo, SkYUVColorSpace* colorSpace) const override {
505 return fGen->queryYUV8(sizeInfo, colorSpace);
506 }
507 bool onGetYUV8Planes(const SkYUVSizeInfo& sizeInfo, void* planes[3]) override {
508 return fGen->getYUV8Planes(sizeInfo, planes);
509 }
510};
511
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500512static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000513 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
514 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400515 if (key.isValid()) {
Robert Phillips8a90f502017-07-24 15:09:56 -0400516 SkASSERT(proxy->origin() == kTopLeft_GrSurfaceOrigin);
Greg Danielf6f7b672018-02-15 13:06:26 -0500517 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
518 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400519 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
520 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500521 // If we had an originalProxy with a valid key, that means there already is a proxy in
522 // the cache which matches the key, but it does not have mip levels and we require them.
523 // Thus we must remove the unique key from that proxy.
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500524 proxyProvider->removeUniqueKeyFromProxy(key, originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000525 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500526 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400527 }
528}
529
530sk_sp<SkColorSpace> SkImage_Lazy::getColorSpace(GrContext* ctx, SkColorSpace* dstColorSpace) {
Brian Osmana8ac9242017-09-07 10:19:08 -0400531 if (!dstColorSpace) {
532 // In legacy mode, we do no modification to the image's color space or encoding.
533 // Subsequent legacy drawing is likely to ignore the color space, but some clients
534 // may want to know what space the image data is in, so return it.
535 return fInfo.refColorSpace();
536 } else {
Brian Osmanc87cfb62018-07-11 09:08:46 -0400537 CachedFormat format = kLegacy_CachedFormat;
Brian Osmana8ac9242017-09-07 10:19:08 -0400538 SkImageInfo cacheInfo = this->buildCacheInfo(format);
539 return cacheInfo.refColorSpace();
540 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400541}
542
543/*
544 * We have 4 ways to try to return a texture (in sorted order)
545 *
546 * 1. Check the cache for a pre-existing one
547 * 2. Ask the generator to natively create one
548 * 3. Ask the generator to return YUV planes, which the GPU can convert
549 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
550 */
551sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(GrContext* ctx,
552 const GrUniqueKey& origKey,
553 SkImage::CachingHint chint,
554 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400555 SkColorSpace* dstColorSpace,
556 GrTextureMaker::AllowedTexGenType genType) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400557 // Values representing the various texture lock paths we can take. Used for logging the path
558 // taken to a histogram.
559 enum LockTexturePath {
560 kFailure_LockTexturePath,
561 kPreExisting_LockTexturePath,
562 kNative_LockTexturePath,
563 kCompressed_LockTexturePath, // Deprecated
564 kYUV_LockTexturePath,
565 kRGBA_LockTexturePath,
566 };
567
568 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
569
570 // Determine which cached format we're going to use (which may involve decoding to a different
571 // info than the generator provides).
Brian Osmanc87cfb62018-07-11 09:08:46 -0400572 CachedFormat format = kLegacy_CachedFormat;
Brian Osmandf7e0752017-04-26 16:20:28 -0400573
574 // Fold the cache format into our texture key
575 GrUniqueKey key;
576 this->makeCacheKeyFromOrigKey(origKey, format, &key);
577
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500578 GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000579 sk_sp<GrTextureProxy> proxy;
580
Brian Osmandf7e0752017-04-26 16:20:28 -0400581 // 1. Check the cache for a pre-existing one
582 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500583 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000584 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400585 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
586 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400587 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000588 return proxy;
589 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400590 }
591 }
592
593 // The CachedFormat is both an index for which cache "slot" we'll use to store this particular
594 // decoded variant of the encoded data, and also a recipe for how to transform the original
595 // info to get the one that we're going to decode to.
Christopher Cameron77e96662017-07-08 01:47:47 -0700596 const SkImageInfo cacheInfo = this->buildCacheInfo(format);
Brian Osmandf7e0752017-04-26 16:20:28 -0400597
598 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000599 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400600 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400601 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
602 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
603 return nullptr;
604 }
Brian Osmanc87cfb62018-07-11 09:08:46 -0400605 if ((proxy = generator->generateTexture(ctx, cacheInfo, fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400606 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
607 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500608 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400609 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000610 return proxy;
611 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400612 }
613 }
614
Greg Daniel3e70fa32017-10-05 16:27:06 -0400615 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
616 // the texture we fall through here and have the CPU generate the mip maps for us.
617 if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400618 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(cacheInfo);
Brian Osmandf7e0752017-04-26 16:20:28 -0400619 ScopedGenerator generator(fSharedGenerator);
620 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700621
622 // The pixels in the texture will be in the generator's color space. If onMakeColorSpace
623 // has been called then this will not match this image's color space. To correct this, apply
624 // a color space conversion from the generator's color space to this image's color space.
Brian Osman56893cd2018-06-08 14:11:37 -0400625 // Note that we can only do this conversion (on the GPU) if both color spaces are XYZ type.
Brian Osman861ea5b2018-06-14 09:14:03 -0400626 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
627 SkColorSpace* thisColorSpace = fInfo.colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700628
Brian Osman56893cd2018-06-08 14:11:37 -0400629 if ((!generatorColorSpace || generatorColorSpace->toXYZD50()) &&
630 (!thisColorSpace || thisColorSpace->toXYZD50())) {
631 // TODO: Update to create the mipped surface in the YUV generator and draw the base
632 // layer directly into the mipped surface.
633 proxy = provider.refAsTextureProxy(ctx, desc, generatorColorSpace, thisColorSpace);
634 if (proxy) {
635 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
636 kLockTexturePathCount);
637 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
638 return proxy;
639 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400640 }
641 }
642
643 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
644 SkBitmap bitmap;
Brian Osmanc87cfb62018-07-11 09:08:46 -0400645 if (!proxy && this->lockAsBitmap(&bitmap, chint, format, cacheInfo)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400646 if (willBeMipped) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400647 proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400648 }
649 if (!proxy) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400650 proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400651 }
Greg Daniele252f082017-10-23 16:05:23 -0400652 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400653 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
654 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500655 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400656 return proxy;
657 }
658 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000659
660 if (proxy) {
661 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
662 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
663 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
664 // generate the rest of the mips.
665 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400666 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniele1da1d92017-10-06 15:59:27 -0400667 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500668 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000669 return mippedProxy;
670 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400671 // We failed to make a mipped proxy with the base copied into it. This could have
672 // been from failure to make the proxy or failure to do the copy. Thus we will fall
673 // back to just using the non mipped proxy; See skbug.com/7094.
674 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000675 }
676
Brian Osmandf7e0752017-04-26 16:20:28 -0400677 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
678 kLockTexturePathCount);
679 return nullptr;
680}
681
682///////////////////////////////////////////////////////////////////////////////////////////////////
683
684#endif