blob: a05eddd5e8b7a8c6beef37a980936dbad5bc37f9 [file] [log] [blame]
reed85d91782015-09-10 14:33:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/image/SkImage_Lazy.h"
Brian Osmandf7e0752017-04-26 16:20:28 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkBitmap.h"
11#include "include/core/SkData.h"
12#include "include/core/SkImageGenerator.h"
13#include "src/core/SkBitmapCache.h"
14#include "src/core/SkCachedData.h"
15#include "src/core/SkImagePriv.h"
16#include "src/core/SkNextID.h"
reed85d91782015-09-10 14:33:38 -070017
Brian Osmandf7e0752017-04-26 16:20:28 -040018#if SK_SUPPORT_GPU
Mike Kleinc0bd9f92019-04-23 12:05:21 -050019#include "include/gpu/GrSamplerState.h"
20#include "include/private/GrRecordingContext.h"
21#include "include/private/GrResourceKey.h"
22#include "src/gpu/GrCaps.h"
23#include "src/gpu/GrGpuResourcePriv.h"
24#include "src/gpu/GrImageTextureMaker.h"
25#include "src/gpu/GrProxyProvider.h"
26#include "src/gpu/GrRecordingContextPriv.h"
27#include "src/gpu/GrYUVProvider.h"
28#include "src/gpu/SkGr.h"
Brian Osmandf7e0752017-04-26 16:20:28 -040029#endif
reed85d91782015-09-10 14:33:38 -070030
Brian Osmandf7e0752017-04-26 16:20:28 -040031// Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images
Mike Klein408ef212018-10-30 15:23:00 +000032class SharedGenerator final : public SkNVRefCnt<SharedGenerator> {
Brian Osmandf7e0752017-04-26 16:20:28 -040033public:
34 static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) {
35 return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr;
36 }
37
Matt Sarettb2004f72017-05-18 09:26:50 -040038 // This is thread safe. It is a const field set in the constructor.
39 const SkImageInfo& getInfo() { return fGenerator->getInfo(); }
40
Brian Osmandf7e0752017-04-26 16:20:28 -040041private:
42 explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen)
43 : fGenerator(std::move(gen)) {
44 SkASSERT(fGenerator);
45 }
46
47 friend class ScopedGenerator;
48 friend class SkImage_Lazy;
49
50 std::unique_ptr<SkImageGenerator> fGenerator;
51 SkMutex fMutex;
52};
53
reed85d91782015-09-10 14:33:38 -070054///////////////////////////////////////////////////////////////////////////////
55
Christopher Cameron77e96662017-07-08 01:47:47 -070056SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset,
Brian Osmanf48c9962019-01-14 11:15:50 -050057 const SkColorType* colorType, sk_sp<SkColorSpace> colorSpace)
Brian Osmandf7e0752017-04-26 16:20:28 -040058 : fSharedGenerator(std::move(gen)) {
Brian Osmandf7e0752017-04-26 16:20:28 -040059 if (!fSharedGenerator) {
60 return;
61 }
62
63 // The following generator accessors are safe without acquiring the mutex (const getters).
64 // TODO: refactor to use a ScopedGenerator instead, for clarity.
65 const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo();
66 if (info.isEmpty()) {
67 fSharedGenerator.reset();
68 return;
69 }
70
71 fUniqueID = fSharedGenerator->fGenerator->uniqueID();
72 const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height());
73 if (subset) {
74 if (!bounds.contains(*subset)) {
75 fSharedGenerator.reset();
76 return;
77 }
78 if (*subset != bounds) {
79 // we need a different uniqueID since we really are a subset of the raw generator
80 fUniqueID = SkNextID::ImageID();
81 }
82 } else {
83 subset = &bounds;
84 }
85
86 fInfo = info.makeWH(subset->width(), subset->height());
87 fOrigin = SkIPoint::Make(subset->x(), subset->y());
Brian Osmanf48c9962019-01-14 11:15:50 -050088 if (colorType || colorSpace) {
89 if (colorType) {
90 fInfo = fInfo.makeColorType(*colorType);
91 }
92 if (colorSpace) {
93 fInfo = fInfo.makeColorSpace(colorSpace);
94 }
Christopher Cameron77e96662017-07-08 01:47:47 -070095 fUniqueID = SkNextID::ImageID();
96 }
Brian Osmandf7e0752017-04-26 16:20:28 -040097}
98
99///////////////////////////////////////////////////////////////////////////////
100
101// Helper for exclusive access to a shared generator.
102class SkImage_Lazy::ScopedGenerator {
103public:
104 ScopedGenerator(const sk_sp<SharedGenerator>& gen)
105 : fSharedGenerator(gen)
106 , fAutoAquire(gen->fMutex) {}
107
108 SkImageGenerator* operator->() const {
109 fSharedGenerator->fMutex.assertHeld();
110 return fSharedGenerator->fGenerator.get();
111 }
112
113 operator SkImageGenerator*() const {
114 fSharedGenerator->fMutex.assertHeld();
115 return fSharedGenerator->fGenerator.get();
116 }
117
118private:
119 const sk_sp<SharedGenerator>& fSharedGenerator;
Herb Derby9b869552019-05-10 12:16:17 -0400120 SkAutoMutexExclusive fAutoAquire;
Brian Osmandf7e0752017-04-26 16:20:28 -0400121};
122
123///////////////////////////////////////////////////////////////////////////////
124
125SkImage_Lazy::SkImage_Lazy(Validator* validator)
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400126 : INHERITED(validator->fInfo, validator->fUniqueID)
Brian Osmandf7e0752017-04-26 16:20:28 -0400127 , fSharedGenerator(std::move(validator->fSharedGenerator))
Brian Osmandf7e0752017-04-26 16:20:28 -0400128 , fOrigin(validator->fOrigin) {
129 SkASSERT(fSharedGenerator);
Brian Osmaneb7e5292018-08-08 14:32:06 -0400130 fUniqueID = validator->fUniqueID;
Brian Osmandf7e0752017-04-26 16:20:28 -0400131}
132
Brian Osmanbd659552018-09-11 10:03:19 -0400133SkImage_Lazy::~SkImage_Lazy() {
134#if SK_SUPPORT_GPU
135 for (int i = 0; i < fUniqueKeyInvalidatedMessages.count(); ++i) {
136 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(*fUniqueKeyInvalidatedMessages[i]);
137 }
138 fUniqueKeyInvalidatedMessages.deleteAll();
139#endif
140}
141
Brian Osmandf7e0752017-04-26 16:20:28 -0400142//////////////////////////////////////////////////////////////////////////////////////////////////
143
Brian Osmanc87cfb62018-07-11 09:08:46 -0400144static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400145 const int genW = gen->getInfo().width();
146 const int genH = gen->getInfo().height();
147 const SkIRect srcR = SkIRect::MakeWH(genW, genH);
148 const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height());
149 if (!srcR.contains(dstR)) {
150 return false;
151 }
152
153 // If they are requesting a subset, we have to have a temp allocation for full image, and
154 // then copy the subset into their allocation
155 SkBitmap full;
156 SkPixmap fullPM;
157 const SkPixmap* dstPM = &pmap;
158 if (srcR != dstR) {
159 if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) {
160 return false;
161 }
162 if (!full.peekPixels(&fullPM)) {
163 return false;
164 }
165 dstPM = &fullPM;
166 }
167
Brian Osmanc87cfb62018-07-11 09:08:46 -0400168 if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400169 return false;
170 }
171
172 if (srcR != dstR) {
173 if (!full.readPixels(pmap, originX, originY)) {
174 return false;
175 }
176 }
177 return true;
178}
179
Brian Osman00766bf2018-10-22 15:59:23 -0400180bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkImage::CachingHint chint) const {
181 auto check_output_bitmap = [bitmap]() {
182 SkASSERT(bitmap->isImmutable());
183 SkASSERT(bitmap->getPixels());
184 (void)bitmap;
185 };
186
187 auto desc = SkBitmapCacheDesc::Make(this);
188 if (SkBitmapCache::Find(desc, bitmap)) {
189 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400190 return true;
191 }
192
Brian Osmandf7e0752017-04-26 16:20:28 -0400193 if (SkImage::kAllow_CachingHint == chint) {
Brian Osman00766bf2018-10-22 15:59:23 -0400194 SkPixmap pmap;
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400195 SkBitmapCache::RecPtr cacheRec = SkBitmapCache::Alloc(desc, this->imageInfo(), &pmap);
Brian Osman00766bf2018-10-22 15:59:23 -0400196 if (!cacheRec ||
197 !generate_pixels(ScopedGenerator(fSharedGenerator), pmap,
198 fOrigin.x(), fOrigin.y())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400199 return false;
200 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400201 SkBitmapCache::Add(std::move(cacheRec), bitmap);
Mike Reed30301c42018-07-19 09:39:21 -0400202 this->notifyAddedToRasterCache();
Brian Osmandf7e0752017-04-26 16:20:28 -0400203 } else {
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400204 if (!bitmap->tryAllocPixels(this->imageInfo()) ||
205 !generate_pixels(ScopedGenerator(fSharedGenerator), bitmap->pixmap(), fOrigin.x(),
206 fOrigin.y())) {
Brian Osman00766bf2018-10-22 15:59:23 -0400207 return false;
208 }
Brian Osmana0dc3d22018-10-09 15:17:38 -0400209 bitmap->setImmutable();
Brian Osmandf7e0752017-04-26 16:20:28 -0400210 }
211
Brian Osman00766bf2018-10-22 15:59:23 -0400212 check_output_bitmap();
Brian Osmandf7e0752017-04-26 16:20:28 -0400213 return true;
214}
215
216//////////////////////////////////////////////////////////////////////////////////////////////////
217
Brian Osmanf1b43822017-04-20 13:43:23 -0400218bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
219 int srcX, int srcY, CachingHint chint) const {
reed85d91782015-09-10 14:33:38 -0700220 SkBitmap bm;
Brian Osmane50cdf02018-10-19 13:02:14 -0400221 if (this->getROPixels(&bm, chint)) {
reed85d91782015-09-10 14:33:38 -0700222 return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY);
223 }
224 return false;
225}
226
Ben Wagnerbdf54332018-05-15 14:12:14 -0400227sk_sp<SkData> SkImage_Lazy::onRefEncoded() const {
228 ScopedGenerator generator(fSharedGenerator);
229 return generator->refEncodedData();
230}
reed85d91782015-09-10 14:33:38 -0700231
Brian Osman5bbd0762017-05-08 11:07:42 -0400232bool SkImage_Lazy::onIsValid(GrContext* context) const {
233 ScopedGenerator generator(fSharedGenerator);
234 return generator->isValid(context);
235}
236
Brian Osmandf7e0752017-04-26 16:20:28 -0400237///////////////////////////////////////////////////////////////////////////////////////////////////
238
Robert Phillipsb726d582017-03-09 16:36:32 -0500239#if SK_SUPPORT_GPU
Robert Phillips9338c602019-02-19 12:52:29 -0500240sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrRecordingContext* context,
Brian Salomon2bbdcc42017-09-07 12:36:34 -0400241 const GrSamplerState& params,
Brian Osmanf1b43822017-04-20 13:43:23 -0400242 SkScalar scaleAdjust[2]) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400243 if (!context) {
244 return nullptr;
245 }
246
247 GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint);
Brian Osman6064e1c2018-10-19 14:27:54 -0400248 return textureMaker.refTextureProxyForParams(params, scaleAdjust);
Robert Phillipsb726d582017-03-09 16:36:32 -0500249}
250#endif
251
Robert Phillips6603a172019-03-05 12:35:44 -0500252sk_sp<SkImage> SkImage_Lazy::onMakeSubset(GrRecordingContext* context,
253 const SkIRect& subset) const {
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400254 SkASSERT(this->bounds().contains(subset));
255 SkASSERT(this->bounds() != subset);
reed7b6945b2015-09-24 00:50:58 -0700256
Brian Osmandf7e0752017-04-26 16:20:28 -0400257 const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y());
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400258 const SkColorType colorType = this->colorType();
259 Validator validator(fSharedGenerator, &generatorSubset, &colorType, this->refColorSpace());
Brian Osmanf1b43822017-04-20 13:43:23 -0400260 return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
reed7b6945b2015-09-24 00:50:58 -0700261}
262
Robert Phillips6603a172019-03-05 12:35:44 -0500263sk_sp<SkImage> SkImage_Lazy::onMakeColorTypeAndColorSpace(GrRecordingContext*,
264 SkColorType targetCT,
Brian Osmanf48c9962019-01-14 11:15:50 -0500265 sk_sp<SkColorSpace> targetCS) const {
Herb Derby9b869552019-05-10 12:16:17 -0400266 SkAutoMutexExclusive autoAquire(fOnMakeColorTypeAndSpaceMutex);
Brian Osmanf48c9962019-01-14 11:15:50 -0500267 if (fOnMakeColorTypeAndSpaceResult &&
268 targetCT == fOnMakeColorTypeAndSpaceResult->colorType() &&
269 SkColorSpace::Equals(targetCS.get(), fOnMakeColorTypeAndSpaceResult->colorSpace())) {
270 return fOnMakeColorTypeAndSpaceResult;
Christopher Camerond4b67872017-07-13 15:18:08 -0700271 }
Christopher Cameron77e96662017-07-08 01:47:47 -0700272 const SkIRect generatorSubset =
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400273 SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), this->width(), this->height());
Brian Osmanf48c9962019-01-14 11:15:50 -0500274 Validator validator(fSharedGenerator, &generatorSubset, &targetCT, targetCS);
Christopher Camerond4b67872017-07-13 15:18:08 -0700275 sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr;
276 if (result) {
Brian Osmanf48c9962019-01-14 11:15:50 -0500277 fOnMakeColorTypeAndSpaceResult = result;
Christopher Camerond4b67872017-07-13 15:18:08 -0700278 }
279 return result;
Matt Sarett6de13102017-03-14 14:10:48 -0400280}
281
Mike Reed185130c2017-02-15 15:14:16 -0500282sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator,
283 const SkIRect* subset) {
Brian Osmanf48c9962019-01-14 11:15:50 -0500284 SkImage_Lazy::Validator
285 validator(SharedGenerator::Make(std::move(generator)), subset, nullptr, nullptr);
fmalita7929e3a2016-10-27 08:15:44 -0700286
Brian Osmanf1b43822017-04-20 13:43:23 -0400287 return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr;
reed85d91782015-09-10 14:33:38 -0700288}
Brian Osmandf7e0752017-04-26 16:20:28 -0400289
290//////////////////////////////////////////////////////////////////////////////////////////////////
291
Brian Osmandf7e0752017-04-26 16:20:28 -0400292#if SK_SUPPORT_GPU
293
Brian Osmanbd659552018-09-11 10:03:19 -0400294void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey,
295 GrUniqueKey* cacheKey) const {
Brian Osman10494e32018-09-10 12:45:18 -0400296 SkASSERT(!cacheKey->isValid());
297 if (origKey.isValid()) {
298 static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
299 GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image");
300 }
301}
302
Brian Osmandf7e0752017-04-26 16:20:28 -0400303class Generator_GrYUVProvider : public GrYUVProvider {
Brian Osmandf7e0752017-04-26 16:20:28 -0400304public:
305 Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {}
306
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400307private:
308 uint32_t onGetID() const override { return fGen->uniqueID(); }
Jim Van Verthe24b5872018-10-29 16:26:02 -0400309 bool onQueryYUVA8(SkYUVASizeInfo* sizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400310 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
311 SkYUVColorSpace* colorSpace) const override {
312 return fGen->queryYUVA8(sizeInfo, yuvaIndices, colorSpace);
Brian Osmandf7e0752017-04-26 16:20:28 -0400313 }
Jim Van Verthe24b5872018-10-29 16:26:02 -0400314 bool onGetYUVA8Planes(const SkYUVASizeInfo& sizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400315 const SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
316 void* planes[]) override {
317 return fGen->getYUVA8Planes(sizeInfo, yuvaIndices, planes);
Brian Osmandf7e0752017-04-26 16:20:28 -0400318 }
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400319
320 SkImageGenerator* fGen;
321
322 typedef GrYUVProvider INHERITED;
Brian Osmandf7e0752017-04-26 16:20:28 -0400323};
324
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500325static void set_key_on_proxy(GrProxyProvider* proxyProvider,
Greg Danielfc5060d2017-10-04 18:36:15 +0000326 GrTextureProxy* proxy, GrTextureProxy* originalProxy,
327 const GrUniqueKey& key) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400328 if (key.isValid()) {
Greg Danielf6f7b672018-02-15 13:06:26 -0500329 if (originalProxy && originalProxy->getUniqueKey().isValid()) {
330 SkASSERT(originalProxy->getUniqueKey() == key);
Greg Daniele252f082017-10-23 16:05:23 -0400331 SkASSERT(GrMipMapped::kYes == proxy->mipMapped() &&
332 GrMipMapped::kNo == originalProxy->mipMapped());
Greg Danielf6f7b672018-02-15 13:06:26 -0500333 // If we had an originalProxy with a valid key, that means there already is a proxy in
334 // the cache which matches the key, but it does not have mip levels and we require them.
335 // Thus we must remove the unique key from that proxy.
Chris Dalton2de13dd2019-01-03 15:11:59 -0700336 SkASSERT(originalProxy->getUniqueKey() == key);
337 proxyProvider->removeUniqueKeyFromProxy(originalProxy);
Greg Danielfc5060d2017-10-04 18:36:15 +0000338 }
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500339 proxyProvider->assignUniqueKeyToProxy(key, proxy);
Brian Osmandf7e0752017-04-26 16:20:28 -0400340 }
341}
342
Jim Van Verthe24b5872018-10-29 16:26:02 -0400343sk_sp<SkCachedData> SkImage_Lazy::getPlanes(SkYUVASizeInfo* yuvaSizeInfo,
Jim Van Verth8f11e432018-10-18 14:36:59 -0400344 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount],
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400345 SkYUVColorSpace* yuvColorSpace,
Jim Van Verthe24b5872018-10-29 16:26:02 -0400346 const void* planes[SkYUVASizeInfo::kMaxCount]) {
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400347 ScopedGenerator generator(fSharedGenerator);
348 Generator_GrYUVProvider provider(generator);
349
Jim Van Verth8f11e432018-10-18 14:36:59 -0400350 sk_sp<SkCachedData> data = provider.getPlanes(yuvaSizeInfo, yuvaIndices, yuvColorSpace, planes);
Robert Phillipsb4a8eac2018-09-21 08:26:33 -0400351 if (!data) {
352 return nullptr;
353 }
354
355 return data;
356}
357
358
Brian Osmandf7e0752017-04-26 16:20:28 -0400359/*
360 * We have 4 ways to try to return a texture (in sorted order)
361 *
362 * 1. Check the cache for a pre-existing one
363 * 2. Ask the generator to natively create one
364 * 3. Ask the generator to return YUV planes, which the GPU can convert
365 * 4. Ask the generator to return RGB(A) data, which the GPU can convert
366 */
Brian Osmanbd659552018-09-11 10:03:19 -0400367sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(
Robert Phillips9338c602019-02-19 12:52:29 -0500368 GrRecordingContext* ctx,
Brian Osmanbd659552018-09-11 10:03:19 -0400369 const GrUniqueKey& origKey,
370 SkImage::CachingHint chint,
371 bool willBeMipped,
Brian Osmanbd659552018-09-11 10:03:19 -0400372 GrTextureMaker::AllowedTexGenType genType) const {
Brian Osmandf7e0752017-04-26 16:20:28 -0400373 // Values representing the various texture lock paths we can take. Used for logging the path
374 // taken to a histogram.
375 enum LockTexturePath {
376 kFailure_LockTexturePath,
377 kPreExisting_LockTexturePath,
378 kNative_LockTexturePath,
379 kCompressed_LockTexturePath, // Deprecated
380 kYUV_LockTexturePath,
381 kRGBA_LockTexturePath,
382 };
383
384 enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 };
385
Brian Osman10494e32018-09-10 12:45:18 -0400386 // Build our texture key.
Greg Daniel25ceb1c2018-09-27 17:01:41 -0400387 // Even though some proxies created here may have a specific origin and use that origin, we do
388 // not include that in the key. Since SkImages are meant to be immutable, a given SkImage will
389 // always have an associated proxy that is always one origin or the other. It never can change
390 // origins. Thus we don't need to include that info in the key iteself.
Brian Osman10494e32018-09-10 12:45:18 -0400391 GrUniqueKey key;
392 this->makeCacheKeyFromOrigKey(origKey, &key);
Brian Osmandf7e0752017-04-26 16:20:28 -0400393
Robert Phillips9da87e02019-02-04 13:26:26 -0500394 GrProxyProvider* proxyProvider = ctx->priv().proxyProvider();
Greg Danielfc5060d2017-10-04 18:36:15 +0000395 sk_sp<GrTextureProxy> proxy;
396
Brian Osmandf7e0752017-04-26 16:20:28 -0400397 // 1. Check the cache for a pre-existing one
398 if (key.isValid()) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500399 proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin);
Greg Danielfc5060d2017-10-04 18:36:15 +0000400 if (proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400401 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath,
402 kLockTexturePathCount);
Greg Daniele252f082017-10-23 16:05:23 -0400403 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Danielfc5060d2017-10-04 18:36:15 +0000404 return proxy;
405 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400406 }
407 }
408
Brian Osmandf7e0752017-04-26 16:20:28 -0400409 // 2. Ask the generator to natively create one
Greg Danielfc5060d2017-10-04 18:36:15 +0000410 if (!proxy) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400411 ScopedGenerator generator(fSharedGenerator);
Stan Ilievba81af22017-06-08 15:16:53 -0400412 if (GrTextureMaker::AllowedTexGenType::kCheap == genType &&
413 SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) {
414 return nullptr;
415 }
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400416 if ((proxy = generator->generateTexture(ctx, this->imageInfo(), fOrigin, willBeMipped))) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400417 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath,
418 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500419 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniele252f082017-10-23 16:05:23 -0400420 if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) {
Greg Daniel2ad08202018-09-07 09:13:36 -0400421 *fUniqueKeyInvalidatedMessages.append() =
Robert Phillips9da87e02019-02-04 13:26:26 -0500422 new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
Greg Danielfc5060d2017-10-04 18:36:15 +0000423 return proxy;
424 }
Brian Osmandf7e0752017-04-26 16:20:28 -0400425 }
426 }
427
Greg Daniel3e70fa32017-10-05 16:27:06 -0400428 // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping
429 // the texture we fall through here and have the CPU generate the mip maps for us.
Robert Phillips9da87e02019-02-04 13:26:26 -0500430 if (!proxy && !willBeMipped && !ctx->priv().options().fDisableGpuYUVConversion) {
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400431 const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(this->imageInfo());
Greg Daniel4065d452018-11-16 15:43:41 -0500432
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400433 SkColorType colorType = this->colorType();
Greg Daniel4065d452018-11-16 15:43:41 -0500434 GrBackendFormat format =
Robert Phillips9da87e02019-02-04 13:26:26 -0500435 ctx->priv().caps()->getBackendFormatFromColorType(colorType);
Greg Daniel4065d452018-11-16 15:43:41 -0500436
Brian Osmandf7e0752017-04-26 16:20:28 -0400437 ScopedGenerator generator(fSharedGenerator);
438 Generator_GrYUVProvider provider(generator);
Christopher Cameron77e96662017-07-08 01:47:47 -0700439
Brian Osmanf48c9962019-01-14 11:15:50 -0500440 // The pixels in the texture will be in the generator's color space.
441 // If onMakeColorTypeAndColorSpace has been called then this will not match this image's
442 // color space. To correct this, apply a color space conversion from the generator's color
443 // space to this image's color space.
Brian Osman861ea5b2018-06-14 09:14:03 -0400444 SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace();
Brian Salomon5ad6fd32019-03-21 15:30:08 -0400445 SkColorSpace* thisColorSpace = this->colorSpace();
Christopher Cameron77e96662017-07-08 01:47:47 -0700446
Mike Kleinae5e8642018-10-03 17:00:41 -0400447 // TODO: Update to create the mipped surface in the YUV generator and draw the base
448 // layer directly into the mipped surface.
Brian Salomond6287472019-06-24 15:50:07 -0400449 proxy = provider.refAsTextureProxy(ctx, format, desc, SkColorTypeToGrColorType(colorType),
450 generatorColorSpace, thisColorSpace);
Mike Kleinae5e8642018-10-03 17:00:41 -0400451 if (proxy) {
452 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath,
453 kLockTexturePathCount);
454 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
455 *fUniqueKeyInvalidatedMessages.append() =
Robert Phillips9da87e02019-02-04 13:26:26 -0500456 new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
Mike Kleinae5e8642018-10-03 17:00:41 -0400457 return proxy;
Brian Osmandf7e0752017-04-26 16:20:28 -0400458 }
459 }
460
461 // 4. Ask the generator to return RGB(A) data, which the GPU can convert
462 SkBitmap bitmap;
Brian Osman00766bf2018-10-22 15:59:23 -0400463 if (!proxy && this->getROPixels(&bitmap, chint)) {
Brian Osmande496652019-03-22 13:42:33 -0400464 proxy = proxyProvider->createProxyFromBitmap(bitmap, willBeMipped ? GrMipMapped::kYes
465 : GrMipMapped::kNo);
Greg Daniele252f082017-10-23 16:05:23 -0400466 if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) {
Brian Osmandf7e0752017-04-26 16:20:28 -0400467 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath,
468 kLockTexturePathCount);
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500469 set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key);
Greg Daniel2ad08202018-09-07 09:13:36 -0400470 *fUniqueKeyInvalidatedMessages.append() =
Robert Phillips9da87e02019-02-04 13:26:26 -0500471 new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
Brian Osmandf7e0752017-04-26 16:20:28 -0400472 return proxy;
473 }
474 }
Greg Danielfc5060d2017-10-04 18:36:15 +0000475
476 if (proxy) {
477 // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated
478 // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new
479 // mipped surface and copy the original proxy into the base layer. We will then let the gpu
480 // generate the rest of the mips.
481 SkASSERT(willBeMipped);
Greg Daniele252f082017-10-23 16:05:23 -0400482 SkASSERT(GrMipMapped::kNo == proxy->mipMapped());
Greg Daniel2ad08202018-09-07 09:13:36 -0400483 *fUniqueKeyInvalidatedMessages.append() =
Robert Phillips9da87e02019-02-04 13:26:26 -0500484 new GrUniqueKeyInvalidatedMessage(key, ctx->priv().contextID());
Greg Daniele1da1d92017-10-06 15:59:27 -0400485 if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) {
Robert Phillips1afd4cd2018-01-08 13:40:32 -0500486 set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key);
Greg Danielfc5060d2017-10-04 18:36:15 +0000487 return mippedProxy;
488 }
Greg Daniel8f5bbda2018-06-08 17:22:23 -0400489 // We failed to make a mipped proxy with the base copied into it. This could have
490 // been from failure to make the proxy or failure to do the copy. Thus we will fall
491 // back to just using the non mipped proxy; See skbug.com/7094.
492 return proxy;
Greg Danielfc5060d2017-10-04 18:36:15 +0000493 }
494
Brian Osmandf7e0752017-04-26 16:20:28 -0400495 SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath,
496 kLockTexturePathCount);
497 return nullptr;
498}
499
500///////////////////////////////////////////////////////////////////////////////////////////////////
501
502#endif