blob: 2410864236f6beeaddea852008192bdaeb2d4a8b [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkImage_Base.h"
reed85d91782015-09-10 14:33:38 -07009#include "SkImageCacherator.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040010
11#include "SkBitmap.h"
12#include "SkBitmapCache.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040013#include "SkData.h"
14#include "SkImageGenerator.h"
reed85d91782015-09-10 14:33:38 -070015#include "SkImagePriv.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040016#include "SkNextID.h"
reed85d91782015-09-10 14:33:38 -070017#include "SkPixelRef.h"
reed85d91782015-09-10 14:33:38 -070018
Brian Osmandf7e0752017-04-26 16:20:28 -040019#if SK_SUPPORT_GPU
20#include "GrContext.h"
21#include "GrContextPriv.h"
22#include "GrGpuResourcePriv.h"
23#include "GrImageTextureMaker.h"
24#include "GrResourceKey.h"
Robert Phillips1afd4cd2018-01-08 13:40:32 -050025#include "GrProxyProvider.h"
Brian Salomon2bbdcc42017-09-07 12:36:34 -040026#include "GrSamplerState.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040027#include "GrYUVProvider.h"
28#include "SkGr.h"
29#endif
reed85d91782015-09-10 14:33:38 -070030
Brian Osmandf7e0752017-04-26 16:20:28 -040031// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
32class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
33public:
34 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
35 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
36 }
37
Matt Sarettb2004f72017-05-18 09:26:50 -040038 // This is thread safe. It is a const field set in the constructor.
39 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
40
Brian Osmandf7e0752017-04-26 16:20:28 -040041private:
42 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
43 : fGenerator(std::move(gen)) {
44 SkASSERT(fGenerator);
45 }
46
47 friend class ScopedGenerator;
48 friend class SkImage_Lazy;
49
50 std::unique_ptr<SkImageGenerator> fGenerator;
51 SkMutex fMutex;
52};
53
54class SkImage_Lazy : public SkImage_Base, public SkImageCacherator {
55public:
56 struct Validator {
Christopher Cameron77e96662017-07-08 01:47:47 -070057 Validator(sk_sp<SharedGenerator>, const SkIRect* subset, sk_sp<SkColorSpace> colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -040058
59 operator bool() const { return fSharedGenerator.get(); }
60
61 sk_sp<SharedGenerator> fSharedGenerator;
62 SkImageInfo fInfo;
63 SkIPoint fOrigin;
Christopher Cameron77e96662017-07-08 01:47:47 -070064 sk_sp<SkColorSpace> fColorSpace;
Brian Osmandf7e0752017-04-26 16:20:28 -040065 uint32_t fUniqueID;
66 };
67
68 SkImage_Lazy(Validator* validator);
69
70 SkImageInfo onImageInfo() const override {
71 return fInfo;
herba7c9d632016-04-19 12:30:22 -070072 }
Greg Daniel56008aa2018-03-14 15:33:42 -040073 SkColorType onColorType() const override {
74 return kUnknown_SkColorType;
75 }
brianosman69c166d2016-08-17 14:01:05 -070076 SkAlphaType onAlphaType() const override {
Brian Osmandf7e0752017-04-26 16:20:28 -040077 return fInfo.alphaType();
brianosman69c166d2016-08-17 14:01:05 -070078 }
herba7c9d632016-04-19 12:30:22 -070079
Mike Reedf2c73642018-05-29 15:41:27 -040080 SkIRect onGetSubset() const override {
81 return SkIRect::MakeXYWH(fOrigin.fX, fOrigin.fY, fInfo.width(), fInfo.height());
82 }
83
Robert Phillipsb726d582017-03-09 16:36:32 -050084 bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY,
85 CachingHint) const override;
86#if SK_SUPPORT_GPU
Brian Salomon2bbdcc42017-09-07 12:36:34 -040087 sk_sp<GrTextureProxy> asTextureProxyRef(GrContext*,
88 const GrSamplerState&, SkColorSpace*,
89 sk_sp<SkColorSpace>*,
Robert Phillipsb726d582017-03-09 16:36:32 -050090 SkScalar scaleAdjust[2]) const override;
91#endif
Ben Wagnerbdf54332018-05-15 14:12:14 -040092 sk_sp<SkData> onRefEncoded() const override;
reed7fb4f8b2016-03-11 04:33:52 -080093 sk_sp<SkImage> onMakeSubset(const SkIRect&) const override;
Brian Osman61624f02016-12-09 14:51:59 -050094 bool getROPixels(SkBitmap*, SkColorSpace* dstColorSpace, CachingHint) const override;
reed85d91782015-09-10 14:33:38 -070095 bool onIsLazyGenerated() const override { return true; }
Brian Osmanb62f50c2018-07-12 14:44:27 -040096 sk_sp<SkImage> onMakeColorSpace(sk_sp<SkColorSpace>, SkColorType) const override;
reed85d91782015-09-10 14:33:38 -070097
Brian Osman5bbd0762017-05-08 11:07:42 -040098 bool onIsValid(GrContext*) const override;
99
Brian Osmandf7e0752017-04-26 16:20:28 -0400100 SkImageCacherator* peekCacherator() const override {
101 return const_cast<SkImage_Lazy*>(this);
102 }
103
104 // Only return true if the generate has already been cached.
Brian Osmaneb7e5292018-08-08 14:32:06 -0400105 bool lockAsBitmapOnlyIfAlreadyCached(SkBitmap*) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400106 // Call the underlying generator directly
107 bool directGeneratePixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400108 int srcX, int srcY) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400109
110 // SkImageCacherator interface
111#if SK_SUPPORT_GPU
112 // Returns the texture proxy. If the cacherator is generating the texture and wants to cache it,
113 // it should use the passed in key (if the key is valid).
114 sk_sp<GrTextureProxy> lockTextureProxy(GrContext*,
115 const GrUniqueKey& key,
116 SkImage::CachingHint,
117 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400118 SkColorSpace* dstColorSpace,
119 GrTextureMaker::AllowedTexGenType genType) override;
Brian Osmandf7e0752017-04-26 16:20:28 -0400120
121 // Returns the color space of the texture that would be returned if you called lockTexture.
122 // Separate code path to allow querying of the color space for textures that cached (even
123 // externally).
124 sk_sp<SkColorSpace> getColorSpace(GrContext*, SkColorSpace* dstColorSpace) override;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400125
126 // TODO: Need to pass in dstColorSpace to fold into key here?
127 void makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, GrUniqueKey* cacheKey) override;
Brian Osmandf7e0752017-04-26 16:20:28 -0400128#endif
129
reed85d91782015-09-10 14:33:38 -0700130private:
Brian Osmandf7e0752017-04-26 16:20:28 -0400131 class ScopedGenerator;
132
133 /**
134 * On success (true), bitmap will point to the pixels for this generator. If this returns
135 * false, the bitmap will be reset to empty.
Brian Osmaneb7e5292018-08-08 14:32:06 -0400136 * TODO: Pass in dstColorSpace to ensure bitmap is compatible?
Brian Osmandf7e0752017-04-26 16:20:28 -0400137 */
Brian Osmaneb7e5292018-08-08 14:32:06 -0400138 bool lockAsBitmap(SkBitmap*, SkImage::CachingHint, const SkImageInfo&) const;
Brian Osmandf7e0752017-04-26 16:20:28 -0400139
140 sk_sp<SharedGenerator> fSharedGenerator;
Christopher Cameron77e96662017-07-08 01:47:47 -0700141 // Note that fInfo is not necessarily the info from the generator. It may be cropped by
142 // onMakeSubset and its color space may be changed by onMakeColorSpace.
Brian Osmandf7e0752017-04-26 16:20:28 -0400143 const SkImageInfo fInfo;
144 const SkIPoint fOrigin;
145
Brian Osmaneb7e5292018-08-08 14:32:06 -0400146 uint32_t fUniqueID;
reed85d91782015-09-10 14:33:38 -0700147
Christopher Camerond4b67872017-07-13 15:18:08 -0700148 // Repeated calls to onMakeColorSpace will result in a proliferation of unique IDs and
149 // SkImage_Lazy instances. Cache the result of the last successful onMakeColorSpace call.
150 mutable SkMutex fOnMakeColorSpaceMutex;
151 mutable sk_sp<SkColorSpace> fOnMakeColorSpaceTarget;
152 mutable sk_sp<SkImage> fOnMakeColorSpaceResult;
153
reed85d91782015-09-10 14:33:38 -0700154 typedef SkImage_Base INHERITED;
155};
156
157///////////////////////////////////////////////////////////////////////////////
158
Christopher Cameron77e96662017-07-08 01:47:47 -0700159SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
160 sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -0400161 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400162 if (!fSharedGenerator) {
163 return;
164 }
165
166 // The following generator accessors are safe without acquiring the mutex (const getters).
167 // TODO: refactor to use a ScopedGenerator instead, for clarity.
168 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
169 if (info.isEmpty()) {
170 fSharedGenerator.reset();
171 return;
172 }
173
174 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
175 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
176 if (subset) {
177 if (!bounds.contains(*subset)) {
178 fSharedGenerator.reset();
179 return;
180 }
181 if (*subset != bounds) {
182 // we need a different uniqueID since we really are a subset of the raw generator
183 fUniqueID = SkNextID::ImageID();
184 }
185 } else {
186 subset = &bounds;
187 }
188
189 fInfo = info.makeWH(subset->width(), subset->height());
190 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700191 if (colorSpace) {
192 fInfo = fInfo.makeColorSpace(colorSpace);
193 fUniqueID = SkNextID::ImageID();
194 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400195}
196
197///////////////////////////////////////////////////////////////////////////////
198
199// Helper for exclusive access to a shared generator.
200class SkImage_Lazy::ScopedGenerator {
201public:
202 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
203 : fSharedGenerator(gen)
204 , fAutoAquire(gen->fMutex) {}
205
206 SkImageGenerator* operator->() const {
207 fSharedGenerator->fMutex.assertHeld();
208 return fSharedGenerator->fGenerator.get();
209 }
210
211 operator SkImageGenerator*() const {
212 fSharedGenerator->fMutex.assertHeld();
213 return fSharedGenerator->fGenerator.get();
214 }
215
216private:
217 const sk_sp<SharedGenerator>& fSharedGenerator;
218 SkAutoExclusive fAutoAquire;
219};
220
221///////////////////////////////////////////////////////////////////////////////
222
223SkImage_Lazy::SkImage_Lazy(Validator* validator)
224 : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID)
225 , fSharedGenerator(std::move(validator->fSharedGenerator))
226 , fInfo(validator->fInfo)
227 , fOrigin(validator->fOrigin) {
228 SkASSERT(fSharedGenerator);
Brian Osmaneb7e5292018-08-08 14:32:06 -0400229 fUniqueID = validator->fUniqueID;
Brian Osmandf7e0752017-04-26 16:20:28 -0400230}
231
232//////////////////////////////////////////////////////////////////////////////////////////////////
233
Brian Osmandf7e0752017-04-26 16:20:28 -0400234static bool check_output_bitmap(const SkBitmap& bitmap, uint32_t expectedID) {
235 SkASSERT(bitmap.getGenerationID() == expectedID);
236 SkASSERT(bitmap.isImmutable());
237 SkASSERT(bitmap.getPixels());
238 return true;
239}
240
241bool SkImage_Lazy::directGeneratePixels(const SkImageInfo& info, void* pixels, size_t rb,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400242 int srcX, int srcY) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400243 ScopedGenerator generator(fSharedGenerator);
244 const SkImageInfo& genInfo = generator->getInfo();
245 // Currently generators do not natively handle subsets, so check that first.
246 if (srcX || srcY || genInfo.width() != info.width() || genInfo.height() != info.height()) {
247 return false;
248 }
249
Brian Osmanc87cfb62018-07-11 09:08:46 -0400250 return generator->getPixels(info, pixels, rb);
Brian Osmandf7e0752017-04-26 16:20:28 -0400251}
252
253//////////////////////////////////////////////////////////////////////////////////////////////////
254
Brian Osmaneb7e5292018-08-08 14:32:06 -0400255bool SkImage_Lazy::lockAsBitmapOnlyIfAlreadyCached(SkBitmap* bitmap) const {
256 return SkBitmapCache::Find(SkBitmapCacheDesc::Make(fUniqueID,
Brian Osmandf7e0752017-04-26 16:20:28 -0400257 fInfo.width(), fInfo.height()), bitmap) &&
Brian Osmaneb7e5292018-08-08 14:32:06 -0400258 check_output_bitmap(*bitmap, fUniqueID);
Brian Osmandf7e0752017-04-26 16:20:28 -0400259}
260
Brian Osmanc87cfb62018-07-11 09:08:46 -0400261static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400262 const int genW = gen->getInfo().width();
263 const int genH = gen->getInfo().height();
264 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
265 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
266 if (!srcR.contains(dstR)) {
267 return false;
268 }
269
270 // If they are requesting a subset, we have to have a temp allocation for full image, and
271 // then copy the subset into their allocation
272 SkBitmap full;
273 SkPixmap fullPM;
274 const SkPixmap* dstPM = &pmap;
275 if (srcR != dstR) {
276 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
277 return false;
278 }
279 if (!full.peekPixels(&fullPM)) {
280 return false;
281 }
282 dstPM = &fullPM;
283 }
284
Brian Osmanc87cfb62018-07-11 09:08:46 -0400285 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400286 return false;
287 }
288
289 if (srcR != dstR) {
290 if (!full.readPixels(pmap, originX, originY)) {
291 return false;
292 }
293 }
294 return true;
295}
296
Brian Osmaneb7e5292018-08-08 14:32:06 -0400297bool SkImage_Lazy::lockAsBitmap(SkBitmap* bitmap, SkImage::CachingHint chint,
Brian Osmanc87cfb62018-07-11 09:08:46 -0400298 const SkImageInfo& info) const {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400299 // TODO: Verify dstColorSpace here
300 if (this->lockAsBitmapOnlyIfAlreadyCached(bitmap)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400301 return true;
302 }
303
Brian Osmandf7e0752017-04-26 16:20:28 -0400304 SkBitmap tmpBitmap;
305 SkBitmapCache::RecPtr cacheRec;
306 SkPixmap pmap;
307 if (SkImage::kAllow_CachingHint == chint) {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400308 auto desc = SkBitmapCacheDesc::Make(fUniqueID, info.width(), info.height());
Brian Osmandf7e0752017-04-26 16:20:28 -0400309 cacheRec = SkBitmapCache::Alloc(desc, info, &pmap);
310 if (!cacheRec) {
311 return false;
312 }
313 } else {
314 if (!tmpBitmap.tryAllocPixels(info)) {
315 return false;
316 }
317 if (!tmpBitmap.peekPixels(&pmap)) {
318 return false;
319 }
320 }
321
322 ScopedGenerator generator(fSharedGenerator);
Brian Osmanc87cfb62018-07-11 09:08:46 -0400323 if (!generate_pixels(generator, pmap, fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400324 return false;
325 }
326
327 if (cacheRec) {
328 SkBitmapCache::Add(std::move(cacheRec), bitmap);
329 SkASSERT(bitmap->getPixels()); // we're locked
330 SkASSERT(bitmap->isImmutable());
Brian Osmaneb7e5292018-08-08 14:32:06 -0400331 SkASSERT(bitmap->getGenerationID() == fUniqueID);
Mike Reed30301c42018-07-19 09:39:21 -0400332 this->notifyAddedToRasterCache();
Brian Osmandf7e0752017-04-26 16:20:28 -0400333 } else {
334 *bitmap = tmpBitmap;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400335 bitmap->pixelRef()->setImmutableWithID(fUniqueID);
Brian Osmandf7e0752017-04-26 16:20:28 -0400336 }
337
Brian Osmaneb7e5292018-08-08 14:32:06 -0400338 check_output_bitmap(*bitmap, fUniqueID);
Brian Osmandf7e0752017-04-26 16:20:28 -0400339 return true;
340}
341
342//////////////////////////////////////////////////////////////////////////////////////////////////
343
Brian Osmanf1b43822017-04-20 13:43:23 -0400344bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
345 int srcX, int srcY, CachingHint chint) const {
Brian Osman61624f02016-12-09 14:51:59 -0500346 SkColorSpace* dstColorSpace = dstInfo.colorSpace();
reed85d91782015-09-10 14:33:38 -0700347 SkBitmap bm;
reed6868c3f2015-11-24 11:44:47 -0800348 if (kDisallow_CachingHint == chint) {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400349 if (this->lockAsBitmapOnlyIfAlreadyCached(&bm)) {
reed6868c3f2015-11-24 11:44:47 -0800350 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
351 } else {
352 // Try passing the caller's buffer directly down to the generator. If this fails we
353 // may still succeed in the general case, as the generator may prefer some other
354 // config, which we could then convert via SkBitmap::readPixels.
Brian Osmanc87cfb62018-07-11 09:08:46 -0400355 if (this->directGeneratePixels(dstInfo, dstPixels, dstRB, srcX, srcY)) {
reed6868c3f2015-11-24 11:44:47 -0800356 return true;
357 }
358 // else fall through
359 }
360 }
361
Brian Osman61624f02016-12-09 14:51:59 -0500362 if (this->getROPixels(&bm, dstColorSpace, chint)) {
reed85d91782015-09-10 14:33:38 -0700363 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
364 }
365 return false;
366}
367
Ben Wagnerbdf54332018-05-15 14:12:14 -0400368sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
369 ScopedGenerator generator(fSharedGenerator);
370 return generator->refEncodedData();
371}
reed85d91782015-09-10 14:33:38 -0700372
Brian Osmanf1b43822017-04-20 13:43:23 -0400373bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkColorSpace* dstColorSpace,
374 CachingHint chint) const {
Brian Osmana81c2be2018-08-28 16:56:08 -0400375 return this->lockAsBitmap(bitmap, chint, fInfo);
reed85d91782015-09-10 14:33:38 -0700376}
377
Brian Osman5bbd0762017-05-08 11:07:42 -0400378bool SkImage_Lazy::onIsValid(GrContext* context) const {
379 ScopedGenerator generator(fSharedGenerator);
380 return generator->isValid(context);
381}
382
Brian Osmandf7e0752017-04-26 16:20:28 -0400383///////////////////////////////////////////////////////////////////////////////////////////////////
384
Robert Phillipsb726d582017-03-09 16:36:32 -0500385#if SK_SUPPORT_GPU
Brian Osmanf1b43822017-04-20 13:43:23 -0400386sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400387 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400388 SkColorSpace* dstColorSpace,
389 sk_sp<SkColorSpace>* texColorSpace,
390 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400391 if (!context) {
392 return nullptr;
393 }
394
395 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
396 return textureMaker.refTextureProxyForParams(params, dstColorSpace, texColorSpace, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500397}
398#endif
399
Brian Osmanf1b43822017-04-20 13:43:23 -0400400sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400401 SkASSERT(fInfo.bounds().contains(subset));
402 SkASSERT(fInfo.bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700403
Brian Osmandf7e0752017-04-26 16:20:28 -0400404 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Christopher Cameron77e96662017-07-08 01:47:47 -0700405 Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400406 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700407}
408
Matt Sarett9f3dcb32017-05-04 08:53:32 -0400409sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target,
Brian Osmanb62f50c2018-07-12 14:44:27 -0400410 SkColorType targetColorType) const {
Christopher Camerond4b67872017-07-13 15:18:08 -0700411 SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex);
412 if (target && fOnMakeColorSpaceTarget &&
413 SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) {
414 return fOnMakeColorSpaceResult;
415 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700416 const SkIRect generatorSubset =
417 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height());
418 Validator validator(fSharedGenerator, &generatorSubset, target);
Christopher Camerond4b67872017-07-13 15:18:08 -0700419 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
420 if (result) {
421 fOnMakeColorSpaceTarget = target;
422 fOnMakeColorSpaceResult = result;
423 }
424 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400425}
426
Mike Reed185130c2017-02-15 15:14:16 -0500427sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
428 const SkIRect* subset) {
Christopher Cameron77e96662017-07-08 01:47:47 -0700429 SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700430
Brian Osmanf1b43822017-04-20 13:43:23 -0400431 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700432}
Brian Osmandf7e0752017-04-26 16:20:28 -0400433
434//////////////////////////////////////////////////////////////////////////////////////////////////
435
436/**
437 * Implementation of SkImageCacherator interface, as needed by GrImageTextureMaker
438 */
439
440#if SK_SUPPORT_GPU
441
Brian Osmaneb7e5292018-08-08 14:32:06 -0400442void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, GrUniqueKey* cacheKey) {
443 // TODO: Take dstColorSpace, include hash in key
Brian Osmandf7e0752017-04-26 16:20:28 -0400444 SkASSERT(!cacheKey->isValid());
445 if (origKey.isValid()) {
446 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
Brian Osmaneb7e5292018-08-08 14:32:06 -0400447 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image");
Brian Osmandf7e0752017-04-26 16:20:28 -0400448 }
449}
450
451class Generator_GrYUVProvider : public GrYUVProvider {
452 SkImageGenerator* fGen;
453
454public:
455 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
456
457 uint32_t onGetID() override { return fGen->uniqueID(); }
458 bool onQueryYUV8(SkYUVSizeInfo* sizeInfo, SkYUVColorSpace* colorSpace) const override {
459 return fGen->queryYUV8(sizeInfo, colorSpace);
460 }
461 bool onGetYUV8Planes(const SkYUVSizeInfo& sizeInfo, void* planes[3]) override {
462 return fGen->getYUV8Planes(sizeInfo, planes);
463 }
464};
465
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500466static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000467 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
468 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400469 if (key.isValid()) {
Robert Phillips8a90f502017-07-24 15:09:56 -0400470 SkASSERT(proxy->origin() == kTopLeft_GrSurfaceOrigin);
Greg Danielf6f7b672018-02-15 13:06:26 -0500471 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
472 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400473 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
474 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500475 // If we had an originalProxy with a valid key, that means there already is a proxy in
476 // the cache which matches the key, but it does not have mip levels and we require them.
477 // Thus we must remove the unique key from that proxy.
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500478 proxyProvider->removeUniqueKeyFromProxy(key, originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000479 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500480 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400481 }
482}
483
484sk_sp<SkColorSpace> SkImage_Lazy::getColorSpace(GrContext* ctx, SkColorSpace* dstColorSpace) {
Brian Osmaneb7e5292018-08-08 14:32:06 -0400485 // TODO: Is this ever needed? Is the output of this function going to be:
486 // return dstColorSpace ? fInfo.refColorSpace() : dstColorSpace;
Brian Osmana81c2be2018-08-28 16:56:08 -0400487
488 // In legacy mode, we do no modification to the image's color space or encoding.
489 // Subsequent legacy drawing is likely to ignore the color space, but some clients
490 // may want to know what space the image data is in, so return it.
491 return fInfo.refColorSpace();
Brian Osmandf7e0752017-04-26 16:20:28 -0400492}
493
494/*
495 * We have 4 ways to try to return a texture (in sorted order)
496 *
497 * 1. Check the cache for a pre-existing one
498 * 2. Ask the generator to natively create one
499 * 3. Ask the generator to return YUV planes, which the GPU can convert
500 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
501 */
502sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(GrContext* ctx,
503 const GrUniqueKey& origKey,
504 SkImage::CachingHint chint,
505 bool willBeMipped,
Stan Ilievba81af22017-06-08 15:16:53 -0400506 SkColorSpace* dstColorSpace,
507 GrTextureMaker::AllowedTexGenType genType) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400508 // Values representing the various texture lock paths we can take. Used for logging the path
509 // taken to a histogram.
510 enum LockTexturePath {
511 kFailure_LockTexturePath,
512 kPreExisting_LockTexturePath,
513 kNative_LockTexturePath,
514 kCompressed_LockTexturePath, // Deprecated
515 kYUV_LockTexturePath,
516 kRGBA_LockTexturePath,
517 };
518
519 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
520
Brian Osmaneb7e5292018-08-08 14:32:06 -0400521 // Build our texture key.
522 // TODO: This needs to include the dstColorSpace.
Brian Osmandf7e0752017-04-26 16:20:28 -0400523 GrUniqueKey key;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400524 this->makeCacheKeyFromOrigKey(origKey, &key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400525
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500526 GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000527 sk_sp<GrTextureProxy> proxy;
528
Brian Osmandf7e0752017-04-26 16:20:28 -0400529 // 1. Check the cache for a pre-existing one
530 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500531 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000532 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400533 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
534 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400535 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000536 return proxy;
537 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400538 }
539 }
540
Brian Osmaneb7e5292018-08-08 14:32:06 -0400541 // What format are we going to ask the generator to create?
542 // TODO: Based on the dstColorSpace?
Brian Osmana81c2be2018-08-28 16:56:08 -0400543 const SkImageInfo cacheInfo = fInfo;
Brian Osmandf7e0752017-04-26 16:20:28 -0400544
545 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000546 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400547 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400548 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
549 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
550 return nullptr;
551 }
Brian Osmanc87cfb62018-07-11 09:08:46 -0400552 if ((proxy = generator->generateTexture(ctx, cacheInfo, fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400553 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
554 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500555 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400556 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000557 return proxy;
558 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400559 }
560 }
561
Greg Daniel3e70fa32017-10-05 16:27:06 -0400562 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
563 // the texture we fall through here and have the CPU generate the mip maps for us.
564 if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400565 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(cacheInfo);
Brian Osmandf7e0752017-04-26 16:20:28 -0400566 ScopedGenerator generator(fSharedGenerator);
567 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700568
569 // The pixels in the texture will be in the generator's color space. If onMakeColorSpace
570 // has been called then this will not match this image's color space. To correct this, apply
571 // a color space conversion from the generator's color space to this image's color space.
Brian Osman56893cd2018-06-08 14:11:37 -0400572 // Note that we can only do this conversion (on the GPU) if both color spaces are XYZ type.
Brian Osman861ea5b2018-06-14 09:14:03 -0400573 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
574 SkColorSpace* thisColorSpace = fInfo.colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700575
Brian Osman56893cd2018-06-08 14:11:37 -0400576 if ((!generatorColorSpace || generatorColorSpace->toXYZD50()) &&
577 (!thisColorSpace || thisColorSpace->toXYZD50())) {
578 // TODO: Update to create the mipped surface in the YUV generator and draw the base
579 // layer directly into the mipped surface.
580 proxy = provider.refAsTextureProxy(ctx, desc, generatorColorSpace, thisColorSpace);
581 if (proxy) {
582 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
583 kLockTexturePathCount);
584 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
585 return proxy;
586 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400587 }
588 }
589
590 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
591 SkBitmap bitmap;
Brian Osmaneb7e5292018-08-08 14:32:06 -0400592 if (!proxy && this->lockAsBitmap(&bitmap, chint, cacheInfo)) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400593 if (willBeMipped) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400594 proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400595 }
596 if (!proxy) {
Brian Osman2b23c4b2018-06-01 12:25:08 -0400597 proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap);
Brian Osmandf7e0752017-04-26 16:20:28 -0400598 }
Greg Daniele252f082017-10-23 16:05:23 -0400599 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400600 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
601 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500602 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400603 return proxy;
604 }
605 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000606
607 if (proxy) {
608 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
609 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
610 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
611 // generate the rest of the mips.
612 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400613 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniele1da1d92017-10-06 15:59:27 -0400614 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500615 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000616 return mippedProxy;
617 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400618 // We failed to make a mipped proxy with the base copied into it. This could have
619 // been from failure to make the proxy or failure to do the copy. Thus we will fall
620 // back to just using the non mipped proxy; See skbug.com/7094.
621 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000622 }
623
Brian Osmandf7e0752017-04-26 16:20:28 -0400624 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
625 kLockTexturePathCount);
626 return nullptr;
627}
628
629///////////////////////////////////////////////////////////////////////////////////////////////////
630
631#endif