blob: 6ccc73fa6852708ec84a3a320094db40cd65dfdf [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkImage_Base.h"
reed85d91782015-09-10 14:33:38 -07009#include "SkImageCacherator.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040010
11#include "SkBitmap.h"
12#include "SkBitmapCache.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040013#include "SkData.h"
14#include "SkImageGenerator.h"
reed85d91782015-09-10 14:33:38 -070015#include "SkImagePriv.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040016#include "SkNextID.h"
reed85d91782015-09-10 14:33:38 -070017#include "SkPixelRef.h"
reed85d91782015-09-10 14:33:38 -070018
Brian Osmandf7e0752017-04-26 16:20:28 -040019#if SK_SUPPORT_GPU
20#include "GrContext.h"
21#include "GrContextPriv.h"
22#include "GrGpuResourcePriv.h"
23#include "GrImageTextureMaker.h"
24#include "GrResourceKey.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050025#include "GrProxyProvider.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040026#include "GrSamplerState.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040027#include "GrYUVProvider.h"
28#include "SkGr.h"
29#endif
reed85d91782015-09-10 14:33:38 -070030
Brian Osmandf7e0752017-04-26 16:20:28 -040031// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
32class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
33public:
34 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
35 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
36 }
37
Matt Sarettb2004f72017-05-18 09:26:50 -040038 // This is thread safe. It is a const field set in the constructor.
39 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
40
Brian Osmandf7e0752017-04-26 16:20:28 -040041private:
42 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
43 : fGenerator(std::move(gen)) {
44 SkASSERT(fGenerator);
45 }
46
47 friend class ScopedGenerator;
48 friend class SkImage_Lazy;
49
50 std::unique_ptr<SkImageGenerator> fGenerator;
51 SkMutex fMutex;
52};
53
54class SkImage_Lazy : public SkImage_Base, public SkImageCacherator {
55public:
56 struct Validator {
Christopher Cameron77e96662017-07-08 01:47:47 -070057 Validator(sk_sp<SharedGenerator>, const SkIRect* subset, sk_sp<SkColorSpace> colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -040058
59 operator bool() const { return fSharedGenerator.get(); }
60
61 sk_sp<SharedGenerator> fSharedGenerator;
62 SkImageInfo fInfo;
63 SkIPoint fOrigin;
Christopher Cameron77e96662017-07-08 01:47:47 -070064 sk_sp<SkColorSpace> fColorSpace;
Brian Osmandf7e0752017-04-26 16:20:28 -040065 uint32_t fUniqueID;
66 };
67
68 SkImage_Lazy(Validator* validator);
69
70 SkImageInfo onImageInfo() const override {
71 return fInfo;
herba7c9d632016-04-19 12:30:22 -070072 }
Greg Daniel56008aa2018-03-14 15:33:42 -040073 SkColorType onColorType() const override {
74 return kUnknown_SkColorType;
75 }
brianosman69c166d2016-08-17 14:01:05 -070076 SkAlphaType onAlphaType() const override {
Brian Osmandf7e0752017-04-26 16:20:28 -040077 return fInfo.alphaType();
brianosman69c166d2016-08-17 14:01:05 -070078 }
herba7c9d632016-04-19 12:30:22 -070079
Mike Reedf2c73642018-05-29 15:41:27 -040080 SkIRect onGetSubset() const override {
81 return SkIRect::MakeXYWH(fOrigin.fX, fOrigin.fY, fInfo.width(), fInfo.height());
82 }
83
Robert Phillipsb726d582017-03-09 16:36:32 -050084 bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY,
85 CachingHint) const override;
86#if SK_SUPPORT_GPU
Brian Salomon2bbdcc42017-09-07 12:36:34 -040087 sk_sp<GrTextureProxy> asTextureProxyRef(GrContext*,
88 const GrSamplerState&, SkColorSpace*,
89 sk_sp<SkColorSpace>*,
Robert Phillipsb726d582017-03-09 16:36:32 -050090 SkScalar scaleAdjust[2]) const override;
91#endif
Ben Wagnerbdf54332018-05-15 14:12:14 -040092 sk_sp<SkData> onRefEncoded() const override;
reed7fb4f8b2016-03-11 04:33:52 -080093 sk_sp<SkImage> onMakeSubset(const SkIRect&) const override;
Brian Osman61624f02016-12-09 14:51:59 -050094 bool getROPixels(SkBitmap*, SkColorSpace* dstColorSpace, CachingHint) const override;
reed85d91782015-09-10 14:33:38 -070095 bool onIsLazyGenerated() const override { return true; }
Mike Reed7f1d0202017-05-08 16:13:39 -040096 bool onCanLazyGenerateOnGPU() const override;
Brian Osmanb62f50c2018-07-12 14:44:27 -040097 sk_sp<SkImage> onMakeColorSpace(sk_sp<SkColorSpace>, SkColorType) const override;
reed85d91782015-09-10 14:33:38 -070098
Brian Osman5bbd0762017-05-08 11:07:42 -040099 bool onIsValid(GrContext*) const override;
100
Brian Osmandf7e0752017-04-26 16:20:28 -0400101 SkImageCacherator* peekCacherator() const override {
102 return const_cast<SkImage_Lazy*>(this);
103 }
104
105 // Only return true if the generate has already been cached.
Brian Osmaneb7e5292018-08-08 14:32:06 -0400106 bool lockAsBitmapOnlyIfAlreadyCached(SkBitmap*) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400107 // Call the underlying generator directly
108 bool directGeneratePixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400109 int srcX, int srcY) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400110
111 // SkImageCacherator interface
112#if SK_SUPPORT_GPU
113 // Returns the texture proxy. If the cacherator is generating the texture and wants to cache it,
114 // it should use the passed in key (if the key is valid).
115 sk_sp<GrTextureProxy> lockTextureProxy(GrContext*,
116 const GrUniqueKey& key,
117 SkImage::CachingHint,
118 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400119 SkColorSpace* dstColorSpace,
120 GrTextureMaker::AllowedTexGenType genType) override;
Brian Osmandf7e0752017-04-26 16:20:28 -0400121
122 // Returns the color space of the texture that would be returned if you called lockTexture.
123 // Separate code path to allow querying of the color space for textures that cached (even
124 // externally).
125 sk_sp<SkColorSpace> getColorSpace(GrContext*, SkColorSpace* dstColorSpace) override;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400126
127 // TODO: Need to pass in dstColorSpace to fold into key here?
128 void makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, GrUniqueKey* cacheKey) override;
Brian Osmandf7e0752017-04-26 16:20:28 -0400129#endif
130
reed85d91782015-09-10 14:33:38 -0700131private:
Brian Osmandf7e0752017-04-26 16:20:28 -0400132 class ScopedGenerator;
133
134 /**
135 * On success (true), bitmap will point to the pixels for this generator. If this returns
136 * false, the bitmap will be reset to empty.
Brian Osmaneb7e5292018-08-08 14:32:06 -0400137 * TODO: Pass in dstColorSpace to ensure bitmap is compatible?
Brian Osmandf7e0752017-04-26 16:20:28 -0400138 */
Brian Osmaneb7e5292018-08-08 14:32:06 -0400139 bool lockAsBitmap(SkBitmap*, SkImage::CachingHint, const SkImageInfo&) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400140
141 sk_sp<SharedGenerator> fSharedGenerator;
Christopher Cameron77e96662017-07-08 01:47:47 -0700142 // Note that fInfo is not necessarily the info from the generator. It may be cropped by
143 // onMakeSubset and its color space may be changed by onMakeColorSpace.
Brian Osmandf7e0752017-04-26 16:20:28 -0400144 const SkImageInfo fInfo;
145 const SkIPoint fOrigin;
146
Brian Osmaneb7e5292018-08-08 14:32:06 -0400147 uint32_t fUniqueID;
reed85d91782015-09-10 14:33:38 -0700148
Christopher Camerond4b67872017-07-13 15:18:08 -0700149 // Repeated calls to onMakeColorSpace will result in a proliferation of unique IDs and
150 // SkImage_Lazy instances. Cache the result of the last successful onMakeColorSpace call.
151 mutable SkMutex fOnMakeColorSpaceMutex;
152 mutable sk_sp<SkColorSpace> fOnMakeColorSpaceTarget;
153 mutable sk_sp<SkImage> fOnMakeColorSpaceResult;
154
reed85d91782015-09-10 14:33:38 -0700155 typedef SkImage_Base INHERITED;
156};
157
158///////////////////////////////////////////////////////////////////////////////
159
Christopher Cameron77e96662017-07-08 01:47:47 -0700160SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
161 sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -0400162 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400163 if (!fSharedGenerator) {
164 return;
165 }
166
167 // The following generator accessors are safe without acquiring the mutex (const getters).
168 // TODO: refactor to use a ScopedGenerator instead, for clarity.
169 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
170 if (info.isEmpty()) {
171 fSharedGenerator.reset();
172 return;
173 }
174
175 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
176 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
177 if (subset) {
178 if (!bounds.contains(*subset)) {
179 fSharedGenerator.reset();
180 return;
181 }
182 if (*subset != bounds) {
183 // we need a different uniqueID since we really are a subset of the raw generator
184 fUniqueID = SkNextID::ImageID();
185 }
186 } else {
187 subset = &bounds;
188 }
189
190 fInfo = info.makeWH(subset->width(), subset->height());
191 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700192 if (colorSpace) {
193 fInfo = fInfo.makeColorSpace(colorSpace);
194 fUniqueID = SkNextID::ImageID();
195 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400196}
197
198///////////////////////////////////////////////////////////////////////////////
199
200// Helper for exclusive access to a shared generator.
201class SkImage_Lazy::ScopedGenerator {
202public:
203 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
204 : fSharedGenerator(gen)
205 , fAutoAquire(gen->fMutex) {}
206
207 SkImageGenerator* operator->() const {
208 fSharedGenerator->fMutex.assertHeld();
209 return fSharedGenerator->fGenerator.get();
210 }
211
212 operator SkImageGenerator*() const {
213 fSharedGenerator->fMutex.assertHeld();
214 return fSharedGenerator->fGenerator.get();
215 }
216
217private:
218 const sk_sp<SharedGenerator>& fSharedGenerator;
219 SkAutoExclusive fAutoAquire;
220};
221
222///////////////////////////////////////////////////////////////////////////////
223
224SkImage_Lazy::SkImage_Lazy(Validator* validator)
225 : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID)
226 , fSharedGenerator(std::move(validator->fSharedGenerator))
227 , fInfo(validator->fInfo)
228 , fOrigin(validator->fOrigin) {
229 SkASSERT(fSharedGenerator);
Brian Osmaneb7e5292018-08-08 14:32:06 -0400230 fUniqueID = validator->fUniqueID;
Brian Osmandf7e0752017-04-26 16:20:28 -0400231}
232
233//////////////////////////////////////////////////////////////////////////////////////////////////
234
Brian Osmandf7e0752017-04-26 16:20:28 -0400235static bool check_output_bitmap(const SkBitmap& bitmap, uint32_t expectedID) {
236 SkASSERT(bitmap.getGenerationID() == expectedID);
237 SkASSERT(bitmap.isImmutable());
238 SkASSERT(bitmap.getPixels());
239 return true;
240}
241
242bool SkImage_Lazy::directGeneratePixels(const SkImageInfo& info, void* pixels, size_t rb,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400243 int srcX, int srcY) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400244 ScopedGenerator generator(fSharedGenerator);
245 const SkImageInfo& genInfo = generator->getInfo();
246 // Currently generators do not natively handle subsets, so check that first.
247 if (srcX || srcY || genInfo.width() != info.width() || genInfo.height() != info.height()) {
248 return false;
249 }
250
Brian Osmanc87cfb62018-07-11 09:08:46 -0400251 return generator->getPixels(info, pixels, rb);
Brian Osmandf7e0752017-04-26 16:20:28 -0400252}
253
254//////////////////////////////////////////////////////////////////////////////////////////////////
255
Brian Osmaneb7e5292018-08-08 14:32:06 -0400256bool SkImage_Lazy::lockAsBitmapOnlyIfAlreadyCached(SkBitmap* bitmap) const {
257 return SkBitmapCache::Find(SkBitmapCacheDesc::Make(fUniqueID,
Brian Osmandf7e0752017-04-26 16:20:28 -0400258 fInfo.width(), fInfo.height()), bitmap) &&
Brian Osmaneb7e5292018-08-08 14:32:06 -0400259 check_output_bitmap(*bitmap, fUniqueID);
Brian Osmandf7e0752017-04-26 16:20:28 -0400260}
261
Brian Osmanc87cfb62018-07-11 09:08:46 -0400262static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400263 const int genW = gen->getInfo().width();
264 const int genH = gen->getInfo().height();
265 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
266 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
267 if (!srcR.contains(dstR)) {
268 return false;
269 }
270
271 // If they are requesting a subset, we have to have a temp allocation for full image, and
272 // then copy the subset into their allocation
273 SkBitmap full;
274 SkPixmap fullPM;
275 const SkPixmap* dstPM = &pmap;
276 if (srcR != dstR) {
277 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
278 return false;
279 }
280 if (!full.peekPixels(&fullPM)) {
281 return false;
282 }
283 dstPM = &fullPM;
284 }
285
Brian Osmanc87cfb62018-07-11 09:08:46 -0400286 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400287 return false;
288 }
289
290 if (srcR != dstR) {
291 if (!full.readPixels(pmap, originX, originY)) {
292 return false;
293 }
294 }
295 return true;
296}
297
Brian Osmaneb7e5292018-08-08 14:32:06 -0400298bool SkImage_Lazy::lockAsBitmap(SkBitmap* bitmap, SkImage::CachingHint chint,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400299 const SkImageInfo& info) const {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400300 // TODO: Verify dstColorSpace here
301 if (this->lockAsBitmapOnlyIfAlreadyCached(bitmap)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400302 return true;
303 }
304
Brian Osmandf7e0752017-04-26 16:20:28 -0400305 SkBitmap tmpBitmap;
306 SkBitmapCache::RecPtr cacheRec;
307 SkPixmap pmap;
308 if (SkImage::kAllow_CachingHint == chint) {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400309 auto desc = SkBitmapCacheDesc::Make(fUniqueID, info.width(), info.height());
Brian Osmandf7e0752017-04-26 16:20:28 -0400310 cacheRec = SkBitmapCache::Alloc(desc, info, &pmap);
311 if (!cacheRec) {
312 return false;
313 }
314 } else {
315 if (!tmpBitmap.tryAllocPixels(info)) {
316 return false;
317 }
318 if (!tmpBitmap.peekPixels(&pmap)) {
319 return false;
320 }
321 }
322
323 ScopedGenerator generator(fSharedGenerator);
Brian Osmanc87cfb62018-07-11 09:08:46 -0400324 if (!generate_pixels(generator, pmap, fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400325 return false;
326 }
327
328 if (cacheRec) {
329 SkBitmapCache::Add(std::move(cacheRec), bitmap);
330 SkASSERT(bitmap->getPixels()); // we're locked
331 SkASSERT(bitmap->isImmutable());
Brian Osmaneb7e5292018-08-08 14:32:06 -0400332 SkASSERT(bitmap->getGenerationID() == fUniqueID);
Mike Reed30301c42018-07-19 09:39:21 -0400333 this->notifyAddedToRasterCache();
Brian Osmandf7e0752017-04-26 16:20:28 -0400334 } else {
335 *bitmap = tmpBitmap;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400336 bitmap->pixelRef()->setImmutableWithID(fUniqueID);
Brian Osmandf7e0752017-04-26 16:20:28 -0400337 }
338
Brian Osmaneb7e5292018-08-08 14:32:06 -0400339 check_output_bitmap(*bitmap, fUniqueID);
Brian Osmandf7e0752017-04-26 16:20:28 -0400340 return true;
341}
342
343//////////////////////////////////////////////////////////////////////////////////////////////////
344
Brian Osmanf1b43822017-04-20 13:43:23 -0400345bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
346 int srcX, int srcY, CachingHint chint) const {
Brian Osman61624f02016-12-09 14:51:59 -0500347 SkColorSpace* dstColorSpace = dstInfo.colorSpace();
reed85d91782015-09-10 14:33:38 -0700348 SkBitmap bm;
reed6868c3f2015-11-24 11:44:47 -0800349 if (kDisallow_CachingHint == chint) {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400350 if (this->lockAsBitmapOnlyIfAlreadyCached(&bm)) {
reed6868c3f2015-11-24 11:44:47 -0800351 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
352 } else {
353 // Try passing the caller's buffer directly down to the generator. If this fails we
354 // may still succeed in the general case, as the generator may prefer some other
355 // config, which we could then convert via SkBitmap::readPixels.
Brian Osmanc87cfb62018-07-11 09:08:46 -0400356 if (this->directGeneratePixels(dstInfo, dstPixels, dstRB, srcX, srcY)) {
reed6868c3f2015-11-24 11:44:47 -0800357 return true;
358 }
359 // else fall through
360 }
361 }
362
Brian Osman61624f02016-12-09 14:51:59 -0500363 if (this->getROPixels(&bm, dstColorSpace, chint)) {
reed85d91782015-09-10 14:33:38 -0700364 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
365 }
366 return false;
367}
368
Ben Wagnerbdf54332018-05-15 14:12:14 -0400369sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
370 ScopedGenerator generator(fSharedGenerator);
371 return generator->refEncodedData();
372}
reed85d91782015-09-10 14:33:38 -0700373
Brian Osmanf1b43822017-04-20 13:43:23 -0400374bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkColorSpace* dstColorSpace,
375 CachingHint chint) const {
Brian Osmana81c2be2018-08-28 16:56:08 -0400376 return this->lockAsBitmap(bitmap, chint, fInfo);
reed85d91782015-09-10 14:33:38 -0700377}
378
Brian Osman5bbd0762017-05-08 11:07:42 -0400379bool SkImage_Lazy::onIsValid(GrContext* context) const {
380 ScopedGenerator generator(fSharedGenerator);
381 return generator->isValid(context);
382}
383
Mike Reed7f1d0202017-05-08 16:13:39 -0400384bool SkImage_Lazy::onCanLazyGenerateOnGPU() const {
385#if SK_SUPPORT_GPU
386 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400387 return SkImageGenerator::TexGenType::kNone != generator->onCanGenerateTexture();
Mike Reed7f1d0202017-05-08 16:13:39 -0400388#else
389 return false;
390#endif
391}
392
Brian Osmandf7e0752017-04-26 16:20:28 -0400393///////////////////////////////////////////////////////////////////////////////////////////////////
394
Robert Phillipsb726d582017-03-09 16:36:32 -0500395#if SK_SUPPORT_GPU
Brian Osmanf1b43822017-04-20 13:43:23 -0400396sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400397 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400398 SkColorSpace* dstColorSpace,
399 sk_sp<SkColorSpace>* texColorSpace,
400 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400401 if (!context) {
402 return nullptr;
403 }
404
405 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
406 return textureMaker.refTextureProxyForParams(params, dstColorSpace, texColorSpace, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500407}
408#endif
409
Brian Osmanf1b43822017-04-20 13:43:23 -0400410sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400411 SkASSERT(fInfo.bounds().contains(subset));
412 SkASSERT(fInfo.bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700413
Brian Osmandf7e0752017-04-26 16:20:28 -0400414 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700415 Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400416 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700417}
418
Matt Sarett9f3dcb32017-05-04 08:53:32 -0400419sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target,
Brian Osmanb62f50c2018-07-12 14:44:27 -0400420 SkColorType targetColorType) const {
Christopher Camerond4b67872017-07-13 15:18:08 -0700421 SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex);
422 if (target && fOnMakeColorSpaceTarget &&
423 SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) {
424 return fOnMakeColorSpaceResult;
425 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700426 const SkIRect generatorSubset =
427 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
428 Validator validator(fSharedGenerator, &generatorSubset, target);
Christopher Camerond4b67872017-07-13 15:18:08 -0700429 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
430 if (result) {
431 fOnMakeColorSpaceTarget = target;
432 fOnMakeColorSpaceResult = result;
433 }
434 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400435}
436
Mike Reed185130c2017-02-15 15:14:16 -0500437sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
438 const SkIRect* subset) {
Christopher Cameron77e96662017-07-08 01:47:47 -0700439 SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700440
Brian Osmanf1b43822017-04-20 13:43:23 -0400441 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700442}
Brian Osmandf7e0752017-04-26 16:20:28 -0400443
444//////////////////////////////////////////////////////////////////////////////////////////////////
445
446/**
447 * Implementation of SkImageCacherator interface, as needed by GrImageTextureMaker
448 */
449
450#if SK_SUPPORT_GPU
451
Brian Osmaneb7e5292018-08-08 14:32:06 -0400452void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, GrUniqueKey* cacheKey) {
453 // TODO: Take dstColorSpace, include hash in key
Brian Osmandf7e0752017-04-26 16:20:28 -0400454 SkASSERT(!cacheKey->isValid());
455 if (origKey.isValid()) {
456 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
Brian Osmaneb7e5292018-08-08 14:32:06 -0400457 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image");
Brian Osmandf7e0752017-04-26 16:20:28 -0400458 }
459}
460
461class Generator_GrYUVProvider : public GrYUVProvider {
462 SkImageGenerator* fGen;
463
464public:
465 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
466
467 uint32_t onGetID() override { return fGen->uniqueID(); }
468 bool onQueryYUV8(SkYUVSizeInfo* sizeInfo, SkYUVColorSpace* colorSpace) const override {
469 return fGen->queryYUV8(sizeInfo, colorSpace);
470 }
471 bool onGetYUV8Planes(const SkYUVSizeInfo& sizeInfo, void* planes[3]) override {
472 return fGen->getYUV8Planes(sizeInfo, planes);
473 }
474};
475
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500476static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000477 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
478 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400479 if (key.isValid()) {
Robert Phillips8a90f502017-07-24 15:09:56 -0400480 SkASSERT(proxy->origin() == kTopLeft_GrSurfaceOrigin);
Greg Danielf6f7b672018-02-15 13:06:26 -0500481 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
482 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400483 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
484 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500485 // If we had an originalProxy with a valid key, that means there already is a proxy in
486 // the cache which matches the key, but it does not have mip levels and we require them.
487 // Thus we must remove the unique key from that proxy.
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500488 proxyProvider->removeUniqueKeyFromProxy(key, originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000489 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500490 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400491 }
492}
493
494sk_sp<SkColorSpace> SkImage_Lazy::getColorSpace(GrContext* ctx, SkColorSpace* dstColorSpace) {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400495 // TODO: Is this ever needed? Is the output of this function going to be:
496 // return dstColorSpace ? fInfo.refColorSpace() : dstColorSpace;
Brian Osmana81c2be2018-08-28 16:56:08 -0400497
498 // In legacy mode, we do no modification to the image's color space or encoding.
499 // Subsequent legacy drawing is likely to ignore the color space, but some clients
500 // may want to know what space the image data is in, so return it.
501 return fInfo.refColorSpace();
Brian Osmandf7e0752017-04-26 16:20:28 -0400502}
503
504/*
505 * We have 4 ways to try to return a texture (in sorted order)
506 *
507 * 1. Check the cache for a pre-existing one
508 * 2. Ask the generator to natively create one
509 * 3. Ask the generator to return YUV planes, which the GPU can convert
510 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
511 */
512sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(GrContext* ctx,
513 const GrUniqueKey& origKey,
514 SkImage::CachingHint chint,
515 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400516 SkColorSpace* dstColorSpace,
517 GrTextureMaker::AllowedTexGenType genType) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400518 // Values representing the various texture lock paths we can take. Used for logging the path
519 // taken to a histogram.
520 enum LockTexturePath {
521 kFailure_LockTexturePath,
522 kPreExisting_LockTexturePath,
523 kNative_LockTexturePath,
524 kCompressed_LockTexturePath, // Deprecated
525 kYUV_LockTexturePath,
526 kRGBA_LockTexturePath,
527 };
528
529 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
530
Brian Osmaneb7e5292018-08-08 14:32:06 -0400531 // Build our texture key.
532 // TODO: This needs to include the dstColorSpace.
Brian Osmandf7e0752017-04-26 16:20:28 -0400533 GrUniqueKey key;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400534 this->makeCacheKeyFromOrigKey(origKey, &key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400535
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500536 GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000537 sk_sp<GrTextureProxy> proxy;
538
Brian Osmandf7e0752017-04-26 16:20:28 -0400539 // 1. Check the cache for a pre-existing one
540 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500541 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000542 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400543 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
544 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400545 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000546 return proxy;
547 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400548 }
549 }
550
Brian Osmaneb7e5292018-08-08 14:32:06 -0400551 // What format are we going to ask the generator to create?
552 // TODO: Based on the dstColorSpace?
Brian Osmana81c2be2018-08-28 16:56:08 -0400553 const SkImageInfo cacheInfo = fInfo;
Brian Osmandf7e0752017-04-26 16:20:28 -0400554
555 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000556 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400557 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400558 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
559 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
560 return nullptr;
561 }
Brian Osmanc87cfb62018-07-11 09:08:46 -0400562 if ((proxy = generator->generateTexture(ctx, cacheInfo, fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400563 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
564 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500565 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400566 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000567 return proxy;
568 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400569 }
570 }
571
Greg Daniel3e70fa32017-10-05 16:27:06 -0400572 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
573 // the texture we fall through here and have the CPU generate the mip maps for us.
574 if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400575 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(cacheInfo);
Brian Osmandf7e0752017-04-26 16:20:28 -0400576 ScopedGenerator generator(fSharedGenerator);
577 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700578
579 // The pixels in the texture will be in the generator's color space. If onMakeColorSpace
580 // has been called then this will not match this image's color space. To correct this, apply
581 // a color space conversion from the generator's color space to this image's color space.
Brian Osman56893cd2018-06-08 14:11:37 -0400582 // Note that we can only do this conversion (on the GPU) if both color spaces are XYZ type.
Brian Osman861ea5b2018-06-14 09:14:03 -0400583 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
584 SkColorSpace* thisColorSpace = fInfo.colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700585
Brian Osman56893cd2018-06-08 14:11:37 -0400586 if ((!generatorColorSpace || generatorColorSpace->toXYZD50()) &&
587 (!thisColorSpace || thisColorSpace->toXYZD50())) {
588 // TODO: Update to create the mipped surface in the YUV generator and draw the base
589 // layer directly into the mipped surface.
590 proxy = provider.refAsTextureProxy(ctx, desc, generatorColorSpace, thisColorSpace);
591 if (proxy) {
592 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
593 kLockTexturePathCount);
594 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
595 return proxy;
596 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400597 }
598 }
599
600 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
601 SkBitmap bitmap;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400602 if (!proxy && this->lockAsBitmap(&bitmap, chint, cacheInfo)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400603 if (willBeMipped) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400604 proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400605 }
606 if (!proxy) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400607 proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400608 }
Greg Daniele252f082017-10-23 16:05:23 -0400609 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400610 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
611 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500612 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400613 return proxy;
614 }
615 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000616
617 if (proxy) {
618 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
619 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
620 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
621 // generate the rest of the mips.
622 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400623 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniele1da1d92017-10-06 15:59:27 -0400624 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500625 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000626 return mippedProxy;
627 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400628 // We failed to make a mipped proxy with the base copied into it. This could have
629 // been from failure to make the proxy or failure to do the copy. Thus we will fall
630 // back to just using the non mipped proxy; See skbug.com/7094.
631 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000632 }
633
Brian Osmandf7e0752017-04-26 16:20:28 -0400634 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
635 kLockTexturePathCount);
636 return nullptr;
637}
638
639///////////////////////////////////////////////////////////////////////////////////////////////////
640
641#endif