reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Brian Osman | bd65955 | 2018-09-11 10:03:19 -0400 | [diff] [blame] | 8 | #include "SkImage_Lazy.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 9 | |
| 10 | #include "SkBitmap.h" |
| 11 | #include "SkBitmapCache.h" |
Brian Osman | b70fd91 | 2018-10-22 16:10:44 -0400 | [diff] [blame] | 12 | #include "SkCachedData.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 13 | #include "SkData.h" |
| 14 | #include "SkImageGenerator.h" |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 15 | #include "SkImagePriv.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 16 | #include "SkNextID.h" |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 17 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 18 | #if SK_SUPPORT_GPU |
| 19 | #include "GrContext.h" |
| 20 | #include "GrContextPriv.h" |
| 21 | #include "GrGpuResourcePriv.h" |
| 22 | #include "GrImageTextureMaker.h" |
| 23 | #include "GrResourceKey.h" |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 24 | #include "GrProxyProvider.h" |
Brian Salomon | 2bbdcc4 | 2017-09-07 12:36:34 -0400 | [diff] [blame] | 25 | #include "GrSamplerState.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 26 | #include "GrYUVProvider.h" |
| 27 | #include "SkGr.h" |
| 28 | #endif |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 29 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 30 | // Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images |
Mike Klein | 408ef21 | 2018-10-30 15:23:00 +0000 | [diff] [blame] | 31 | class SharedGenerator final : public SkNVRefCnt<SharedGenerator> { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 32 | public: |
| 33 | static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) { |
| 34 | return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr; |
| 35 | } |
| 36 | |
Matt Sarett | b2004f7 | 2017-05-18 09:26:50 -0400 | [diff] [blame] | 37 | // This is thread safe. It is a const field set in the constructor. |
| 38 | const SkImageInfo& getInfo() { return fGenerator->getInfo(); } |
| 39 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 40 | private: |
| 41 | explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen) |
| 42 | : fGenerator(std::move(gen)) { |
| 43 | SkASSERT(fGenerator); |
| 44 | } |
| 45 | |
| 46 | friend class ScopedGenerator; |
| 47 | friend class SkImage_Lazy; |
| 48 | |
| 49 | std::unique_ptr<SkImageGenerator> fGenerator; |
| 50 | SkMutex fMutex; |
| 51 | }; |
| 52 | |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 53 | /////////////////////////////////////////////////////////////////////////////// |
| 54 | |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 55 | SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset, |
| 56 | sk_sp<SkColorSpace> colorSpace) |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 57 | : fSharedGenerator(std::move(gen)) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 58 | if (!fSharedGenerator) { |
| 59 | return; |
| 60 | } |
| 61 | |
| 62 | // The following generator accessors are safe without acquiring the mutex (const getters). |
| 63 | // TODO: refactor to use a ScopedGenerator instead, for clarity. |
| 64 | const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo(); |
| 65 | if (info.isEmpty()) { |
| 66 | fSharedGenerator.reset(); |
| 67 | return; |
| 68 | } |
| 69 | |
| 70 | fUniqueID = fSharedGenerator->fGenerator->uniqueID(); |
| 71 | const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height()); |
| 72 | if (subset) { |
| 73 | if (!bounds.contains(*subset)) { |
| 74 | fSharedGenerator.reset(); |
| 75 | return; |
| 76 | } |
| 77 | if (*subset != bounds) { |
| 78 | // we need a different uniqueID since we really are a subset of the raw generator |
| 79 | fUniqueID = SkNextID::ImageID(); |
| 80 | } |
| 81 | } else { |
| 82 | subset = &bounds; |
| 83 | } |
| 84 | |
| 85 | fInfo = info.makeWH(subset->width(), subset->height()); |
| 86 | fOrigin = SkIPoint::Make(subset->x(), subset->y()); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 87 | if (colorSpace) { |
| 88 | fInfo = fInfo.makeColorSpace(colorSpace); |
| 89 | fUniqueID = SkNextID::ImageID(); |
| 90 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 91 | } |
| 92 | |
| 93 | /////////////////////////////////////////////////////////////////////////////// |
| 94 | |
| 95 | // Helper for exclusive access to a shared generator. |
| 96 | class SkImage_Lazy::ScopedGenerator { |
| 97 | public: |
| 98 | ScopedGenerator(const sk_sp<SharedGenerator>& gen) |
| 99 | : fSharedGenerator(gen) |
| 100 | , fAutoAquire(gen->fMutex) {} |
| 101 | |
| 102 | SkImageGenerator* operator->() const { |
| 103 | fSharedGenerator->fMutex.assertHeld(); |
| 104 | return fSharedGenerator->fGenerator.get(); |
| 105 | } |
| 106 | |
| 107 | operator SkImageGenerator*() const { |
| 108 | fSharedGenerator->fMutex.assertHeld(); |
| 109 | return fSharedGenerator->fGenerator.get(); |
| 110 | } |
| 111 | |
| 112 | private: |
| 113 | const sk_sp<SharedGenerator>& fSharedGenerator; |
| 114 | SkAutoExclusive fAutoAquire; |
| 115 | }; |
| 116 | |
| 117 | /////////////////////////////////////////////////////////////////////////////// |
| 118 | |
| 119 | SkImage_Lazy::SkImage_Lazy(Validator* validator) |
| 120 | : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID) |
| 121 | , fSharedGenerator(std::move(validator->fSharedGenerator)) |
| 122 | , fInfo(validator->fInfo) |
| 123 | , fOrigin(validator->fOrigin) { |
| 124 | SkASSERT(fSharedGenerator); |
Brian Osman | eb7e529 | 2018-08-08 14:32:06 -0400 | [diff] [blame] | 125 | fUniqueID = validator->fUniqueID; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 126 | } |
| 127 | |
Brian Osman | bd65955 | 2018-09-11 10:03:19 -0400 | [diff] [blame] | 128 | SkImage_Lazy::~SkImage_Lazy() { |
| 129 | #if SK_SUPPORT_GPU |
| 130 | for (int i = 0; i < fUniqueKeyInvalidatedMessages.count(); ++i) { |
| 131 | SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(*fUniqueKeyInvalidatedMessages[i]); |
| 132 | } |
| 133 | fUniqueKeyInvalidatedMessages.deleteAll(); |
| 134 | #endif |
| 135 | } |
| 136 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 137 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 138 | |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 139 | static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 140 | const int genW = gen->getInfo().width(); |
| 141 | const int genH = gen->getInfo().height(); |
| 142 | const SkIRect srcR = SkIRect::MakeWH(genW, genH); |
| 143 | const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height()); |
| 144 | if (!srcR.contains(dstR)) { |
| 145 | return false; |
| 146 | } |
| 147 | |
| 148 | // If they are requesting a subset, we have to have a temp allocation for full image, and |
| 149 | // then copy the subset into their allocation |
| 150 | SkBitmap full; |
| 151 | SkPixmap fullPM; |
| 152 | const SkPixmap* dstPM = &pmap; |
| 153 | if (srcR != dstR) { |
| 154 | if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) { |
| 155 | return false; |
| 156 | } |
| 157 | if (!full.peekPixels(&fullPM)) { |
| 158 | return false; |
| 159 | } |
| 160 | dstPM = &fullPM; |
| 161 | } |
| 162 | |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 163 | if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 164 | return false; |
| 165 | } |
| 166 | |
| 167 | if (srcR != dstR) { |
| 168 | if (!full.readPixels(pmap, originX, originY)) { |
| 169 | return false; |
| 170 | } |
| 171 | } |
| 172 | return true; |
| 173 | } |
| 174 | |
Brian Osman | 00766bf | 2018-10-22 15:59:23 -0400 | [diff] [blame] | 175 | bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkImage::CachingHint chint) const { |
| 176 | auto check_output_bitmap = [bitmap]() { |
| 177 | SkASSERT(bitmap->isImmutable()); |
| 178 | SkASSERT(bitmap->getPixels()); |
| 179 | (void)bitmap; |
| 180 | }; |
| 181 | |
| 182 | auto desc = SkBitmapCacheDesc::Make(this); |
| 183 | if (SkBitmapCache::Find(desc, bitmap)) { |
| 184 | check_output_bitmap(); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 185 | return true; |
| 186 | } |
| 187 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 188 | if (SkImage::kAllow_CachingHint == chint) { |
Brian Osman | 00766bf | 2018-10-22 15:59:23 -0400 | [diff] [blame] | 189 | SkPixmap pmap; |
| 190 | SkBitmapCache::RecPtr cacheRec = SkBitmapCache::Alloc(desc, fInfo, &pmap); |
| 191 | if (!cacheRec || |
| 192 | !generate_pixels(ScopedGenerator(fSharedGenerator), pmap, |
| 193 | fOrigin.x(), fOrigin.y())) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 194 | return false; |
| 195 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 196 | SkBitmapCache::Add(std::move(cacheRec), bitmap); |
Mike Reed | 30301c4 | 2018-07-19 09:39:21 -0400 | [diff] [blame] | 197 | this->notifyAddedToRasterCache(); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 198 | } else { |
Brian Osman | 00766bf | 2018-10-22 15:59:23 -0400 | [diff] [blame] | 199 | if (!bitmap->tryAllocPixels(fInfo) || |
| 200 | !generate_pixels(ScopedGenerator(fSharedGenerator), bitmap->pixmap(), |
| 201 | fOrigin.x(), fOrigin.y())) { |
| 202 | return false; |
| 203 | } |
Brian Osman | a0dc3d2 | 2018-10-09 15:17:38 -0400 | [diff] [blame] | 204 | bitmap->setImmutable(); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 205 | } |
| 206 | |
Brian Osman | 00766bf | 2018-10-22 15:59:23 -0400 | [diff] [blame] | 207 | check_output_bitmap(); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 208 | return true; |
| 209 | } |
| 210 | |
| 211 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 212 | |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 213 | bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB, |
| 214 | int srcX, int srcY, CachingHint chint) const { |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 215 | SkBitmap bm; |
Brian Osman | e50cdf0 | 2018-10-19 13:02:14 -0400 | [diff] [blame] | 216 | if (this->getROPixels(&bm, chint)) { |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 217 | return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY); |
| 218 | } |
| 219 | return false; |
| 220 | } |
| 221 | |
Ben Wagner | bdf5433 | 2018-05-15 14:12:14 -0400 | [diff] [blame] | 222 | sk_sp<SkData> SkImage_Lazy::onRefEncoded() const { |
| 223 | ScopedGenerator generator(fSharedGenerator); |
| 224 | return generator->refEncodedData(); |
| 225 | } |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 226 | |
Brian Osman | 5bbd076 | 2017-05-08 11:07:42 -0400 | [diff] [blame] | 227 | bool SkImage_Lazy::onIsValid(GrContext* context) const { |
| 228 | ScopedGenerator generator(fSharedGenerator); |
| 229 | return generator->isValid(context); |
| 230 | } |
| 231 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 232 | /////////////////////////////////////////////////////////////////////////////////////////////////// |
| 233 | |
Robert Phillips | b726d58 | 2017-03-09 16:36:32 -0500 | [diff] [blame] | 234 | #if SK_SUPPORT_GPU |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 235 | sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context, |
Brian Salomon | 2bbdcc4 | 2017-09-07 12:36:34 -0400 | [diff] [blame] | 236 | const GrSamplerState& params, |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 237 | SkScalar scaleAdjust[2]) const { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 238 | if (!context) { |
| 239 | return nullptr; |
| 240 | } |
| 241 | |
| 242 | GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint); |
Brian Osman | 6064e1c | 2018-10-19 14:27:54 -0400 | [diff] [blame] | 243 | return textureMaker.refTextureProxyForParams(params, scaleAdjust); |
Robert Phillips | b726d58 | 2017-03-09 16:36:32 -0500 | [diff] [blame] | 244 | } |
| 245 | #endif |
| 246 | |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 247 | sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 248 | SkASSERT(fInfo.bounds().contains(subset)); |
| 249 | SkASSERT(fInfo.bounds() != subset); |
reed | 7b6945b | 2015-09-24 00:50:58 -0700 | [diff] [blame] | 250 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 251 | const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y()); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 252 | Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace()); |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 253 | return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr; |
reed | 7b6945b | 2015-09-24 00:50:58 -0700 | [diff] [blame] | 254 | } |
| 255 | |
Brian Osman | 15f0f29 | 2018-10-01 14:14:46 -0400 | [diff] [blame] | 256 | sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target) const { |
Christopher Cameron | d4b6787 | 2017-07-13 15:18:08 -0700 | [diff] [blame] | 257 | SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex); |
Brian Osman | b4ae499 | 2018-10-18 11:16:14 -0400 | [diff] [blame] | 258 | if (fOnMakeColorSpaceTarget && |
Christopher Cameron | d4b6787 | 2017-07-13 15:18:08 -0700 | [diff] [blame] | 259 | SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) { |
| 260 | return fOnMakeColorSpaceResult; |
| 261 | } |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 262 | const SkIRect generatorSubset = |
| 263 | SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height()); |
| 264 | Validator validator(fSharedGenerator, &generatorSubset, target); |
Christopher Cameron | d4b6787 | 2017-07-13 15:18:08 -0700 | [diff] [blame] | 265 | sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr; |
| 266 | if (result) { |
| 267 | fOnMakeColorSpaceTarget = target; |
| 268 | fOnMakeColorSpaceResult = result; |
| 269 | } |
| 270 | return result; |
Matt Sarett | 6de1310 | 2017-03-14 14:10:48 -0400 | [diff] [blame] | 271 | } |
| 272 | |
Mike Reed | 185130c | 2017-02-15 15:14:16 -0500 | [diff] [blame] | 273 | sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator, |
| 274 | const SkIRect* subset) { |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 275 | SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr); |
fmalita | 7929e3a | 2016-10-27 08:15:44 -0700 | [diff] [blame] | 276 | |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 277 | return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr; |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 278 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 279 | |
| 280 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 281 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 282 | #if SK_SUPPORT_GPU |
| 283 | |
Brian Osman | bd65955 | 2018-09-11 10:03:19 -0400 | [diff] [blame] | 284 | void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, |
| 285 | GrUniqueKey* cacheKey) const { |
Brian Osman | 10494e3 | 2018-09-10 12:45:18 -0400 | [diff] [blame] | 286 | SkASSERT(!cacheKey->isValid()); |
| 287 | if (origKey.isValid()) { |
| 288 | static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain(); |
| 289 | GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 0, "Image"); |
| 290 | } |
| 291 | } |
| 292 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 293 | class Generator_GrYUVProvider : public GrYUVProvider { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 294 | public: |
| 295 | Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {} |
| 296 | |
Robert Phillips | b4a8eac | 2018-09-21 08:26:33 -0400 | [diff] [blame] | 297 | private: |
| 298 | uint32_t onGetID() const override { return fGen->uniqueID(); } |
Jim Van Verth | e24b587 | 2018-10-29 16:26:02 -0400 | [diff] [blame] | 299 | bool onQueryYUVA8(SkYUVASizeInfo* sizeInfo, |
Jim Van Verth | 8f11e43 | 2018-10-18 14:36:59 -0400 | [diff] [blame] | 300 | SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount], |
| 301 | SkYUVColorSpace* colorSpace) const override { |
| 302 | return fGen->queryYUVA8(sizeInfo, yuvaIndices, colorSpace); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 303 | } |
Jim Van Verth | e24b587 | 2018-10-29 16:26:02 -0400 | [diff] [blame] | 304 | bool onGetYUVA8Planes(const SkYUVASizeInfo& sizeInfo, |
Jim Van Verth | 8f11e43 | 2018-10-18 14:36:59 -0400 | [diff] [blame] | 305 | const SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount], |
| 306 | void* planes[]) override { |
| 307 | return fGen->getYUVA8Planes(sizeInfo, yuvaIndices, planes); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 308 | } |
Robert Phillips | b4a8eac | 2018-09-21 08:26:33 -0400 | [diff] [blame] | 309 | |
| 310 | SkImageGenerator* fGen; |
| 311 | |
| 312 | typedef GrYUVProvider INHERITED; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 313 | }; |
| 314 | |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 315 | static void set_key_on_proxy(GrProxyProvider* proxyProvider, |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 316 | GrTextureProxy* proxy, GrTextureProxy* originalProxy, |
| 317 | const GrUniqueKey& key) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 318 | if (key.isValid()) { |
Greg Daniel | f6f7b67 | 2018-02-15 13:06:26 -0500 | [diff] [blame] | 319 | if (originalProxy && originalProxy->getUniqueKey().isValid()) { |
| 320 | SkASSERT(originalProxy->getUniqueKey() == key); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 321 | SkASSERT(GrMipMapped::kYes == proxy->mipMapped() && |
| 322 | GrMipMapped::kNo == originalProxy->mipMapped()); |
Greg Daniel | f6f7b67 | 2018-02-15 13:06:26 -0500 | [diff] [blame] | 323 | // If we had an originalProxy with a valid key, that means there already is a proxy in |
| 324 | // the cache which matches the key, but it does not have mip levels and we require them. |
| 325 | // Thus we must remove the unique key from that proxy. |
Chris Dalton | 2de13dd | 2019-01-03 15:11:59 -0700 | [diff] [blame^] | 326 | SkASSERT(originalProxy->getUniqueKey() == key); |
| 327 | proxyProvider->removeUniqueKeyFromProxy(originalProxy); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 328 | } |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 329 | proxyProvider->assignUniqueKeyToProxy(key, proxy); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 330 | } |
| 331 | } |
| 332 | |
Jim Van Verth | e24b587 | 2018-10-29 16:26:02 -0400 | [diff] [blame] | 333 | sk_sp<SkCachedData> SkImage_Lazy::getPlanes(SkYUVASizeInfo* yuvaSizeInfo, |
Jim Van Verth | 8f11e43 | 2018-10-18 14:36:59 -0400 | [diff] [blame] | 334 | SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount], |
Robert Phillips | b4a8eac | 2018-09-21 08:26:33 -0400 | [diff] [blame] | 335 | SkYUVColorSpace* yuvColorSpace, |
Jim Van Verth | e24b587 | 2018-10-29 16:26:02 -0400 | [diff] [blame] | 336 | const void* planes[SkYUVASizeInfo::kMaxCount]) { |
Robert Phillips | b4a8eac | 2018-09-21 08:26:33 -0400 | [diff] [blame] | 337 | ScopedGenerator generator(fSharedGenerator); |
| 338 | Generator_GrYUVProvider provider(generator); |
| 339 | |
Jim Van Verth | 8f11e43 | 2018-10-18 14:36:59 -0400 | [diff] [blame] | 340 | sk_sp<SkCachedData> data = provider.getPlanes(yuvaSizeInfo, yuvaIndices, yuvColorSpace, planes); |
Robert Phillips | b4a8eac | 2018-09-21 08:26:33 -0400 | [diff] [blame] | 341 | if (!data) { |
| 342 | return nullptr; |
| 343 | } |
| 344 | |
| 345 | return data; |
| 346 | } |
| 347 | |
| 348 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 349 | /* |
| 350 | * We have 4 ways to try to return a texture (in sorted order) |
| 351 | * |
| 352 | * 1. Check the cache for a pre-existing one |
| 353 | * 2. Ask the generator to natively create one |
| 354 | * 3. Ask the generator to return YUV planes, which the GPU can convert |
| 355 | * 4. Ask the generator to return RGB(A) data, which the GPU can convert |
| 356 | */ |
Brian Osman | bd65955 | 2018-09-11 10:03:19 -0400 | [diff] [blame] | 357 | sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy( |
| 358 | GrContext* ctx, |
| 359 | const GrUniqueKey& origKey, |
| 360 | SkImage::CachingHint chint, |
| 361 | bool willBeMipped, |
Brian Osman | bd65955 | 2018-09-11 10:03:19 -0400 | [diff] [blame] | 362 | GrTextureMaker::AllowedTexGenType genType) const { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 363 | // Values representing the various texture lock paths we can take. Used for logging the path |
| 364 | // taken to a histogram. |
| 365 | enum LockTexturePath { |
| 366 | kFailure_LockTexturePath, |
| 367 | kPreExisting_LockTexturePath, |
| 368 | kNative_LockTexturePath, |
| 369 | kCompressed_LockTexturePath, // Deprecated |
| 370 | kYUV_LockTexturePath, |
| 371 | kRGBA_LockTexturePath, |
| 372 | }; |
| 373 | |
| 374 | enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 }; |
| 375 | |
Brian Osman | 10494e3 | 2018-09-10 12:45:18 -0400 | [diff] [blame] | 376 | // Build our texture key. |
Greg Daniel | 25ceb1c | 2018-09-27 17:01:41 -0400 | [diff] [blame] | 377 | // Even though some proxies created here may have a specific origin and use that origin, we do |
| 378 | // not include that in the key. Since SkImages are meant to be immutable, a given SkImage will |
| 379 | // always have an associated proxy that is always one origin or the other. It never can change |
| 380 | // origins. Thus we don't need to include that info in the key iteself. |
Brian Osman | 10494e3 | 2018-09-10 12:45:18 -0400 | [diff] [blame] | 381 | GrUniqueKey key; |
| 382 | this->makeCacheKeyFromOrigKey(origKey, &key); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 383 | |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 384 | GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider(); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 385 | sk_sp<GrTextureProxy> proxy; |
| 386 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 387 | // 1. Check the cache for a pre-existing one |
| 388 | if (key.isValid()) { |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 389 | proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 390 | if (proxy) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 391 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath, |
| 392 | kLockTexturePathCount); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 393 | if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) { |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 394 | return proxy; |
| 395 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 396 | } |
| 397 | } |
| 398 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 399 | // 2. Ask the generator to natively create one |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 400 | if (!proxy) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 401 | ScopedGenerator generator(fSharedGenerator); |
Stan Iliev | ba81af2 | 2017-06-08 15:16:53 -0400 | [diff] [blame] | 402 | if (GrTextureMaker::AllowedTexGenType::kCheap == genType && |
| 403 | SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) { |
| 404 | return nullptr; |
| 405 | } |
Brian Osman | b3f3830 | 2018-09-07 15:24:44 -0400 | [diff] [blame] | 406 | if ((proxy = generator->generateTexture(ctx, fInfo, fOrigin, willBeMipped))) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 407 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath, |
| 408 | kLockTexturePathCount); |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 409 | set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 410 | if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) { |
Greg Daniel | 2ad0820 | 2018-09-07 09:13:36 -0400 | [diff] [blame] | 411 | *fUniqueKeyInvalidatedMessages.append() = |
| 412 | new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID()); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 413 | return proxy; |
| 414 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 415 | } |
| 416 | } |
| 417 | |
Greg Daniel | 3e70fa3 | 2017-10-05 16:27:06 -0400 | [diff] [blame] | 418 | // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping |
| 419 | // the texture we fall through here and have the CPU generate the mip maps for us. |
| 420 | if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) { |
Brian Osman | b3f3830 | 2018-09-07 15:24:44 -0400 | [diff] [blame] | 421 | const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(fInfo); |
Greg Daniel | 4065d45 | 2018-11-16 15:43:41 -0500 | [diff] [blame] | 422 | |
| 423 | SkColorType colorType = fInfo.colorType(); |
| 424 | GrBackendFormat format = |
| 425 | ctx->contextPriv().caps()->getBackendFormatFromColorType(colorType); |
| 426 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 427 | ScopedGenerator generator(fSharedGenerator); |
| 428 | Generator_GrYUVProvider provider(generator); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 429 | |
| 430 | // The pixels in the texture will be in the generator's color space. If onMakeColorSpace |
| 431 | // has been called then this will not match this image's color space. To correct this, apply |
| 432 | // a color space conversion from the generator's color space to this image's color space. |
Brian Osman | 861ea5b | 2018-06-14 09:14:03 -0400 | [diff] [blame] | 433 | SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace(); |
| 434 | SkColorSpace* thisColorSpace = fInfo.colorSpace(); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 435 | |
Mike Klein | ae5e864 | 2018-10-03 17:00:41 -0400 | [diff] [blame] | 436 | // TODO: Update to create the mipped surface in the YUV generator and draw the base |
| 437 | // layer directly into the mipped surface. |
Greg Daniel | 4065d45 | 2018-11-16 15:43:41 -0500 | [diff] [blame] | 438 | proxy = provider.refAsTextureProxy(ctx, format, desc, generatorColorSpace, thisColorSpace); |
Mike Klein | ae5e864 | 2018-10-03 17:00:41 -0400 | [diff] [blame] | 439 | if (proxy) { |
| 440 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath, |
| 441 | kLockTexturePathCount); |
| 442 | set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key); |
| 443 | *fUniqueKeyInvalidatedMessages.append() = |
| 444 | new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID()); |
| 445 | return proxy; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 446 | } |
| 447 | } |
| 448 | |
| 449 | // 4. Ask the generator to return RGB(A) data, which the GPU can convert |
| 450 | SkBitmap bitmap; |
Brian Osman | 00766bf | 2018-10-22 15:59:23 -0400 | [diff] [blame] | 451 | if (!proxy && this->getROPixels(&bitmap, chint)) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 452 | if (willBeMipped) { |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 453 | proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 454 | } |
| 455 | if (!proxy) { |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 456 | proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 457 | } |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 458 | if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 459 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath, |
| 460 | kLockTexturePathCount); |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 461 | set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key); |
Greg Daniel | 2ad0820 | 2018-09-07 09:13:36 -0400 | [diff] [blame] | 462 | *fUniqueKeyInvalidatedMessages.append() = |
| 463 | new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID()); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 464 | return proxy; |
| 465 | } |
| 466 | } |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 467 | |
| 468 | if (proxy) { |
| 469 | // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated |
| 470 | // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new |
| 471 | // mipped surface and copy the original proxy into the base layer. We will then let the gpu |
| 472 | // generate the rest of the mips. |
| 473 | SkASSERT(willBeMipped); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 474 | SkASSERT(GrMipMapped::kNo == proxy->mipMapped()); |
Greg Daniel | 2ad0820 | 2018-09-07 09:13:36 -0400 | [diff] [blame] | 475 | *fUniqueKeyInvalidatedMessages.append() = |
| 476 | new GrUniqueKeyInvalidatedMessage(key, ctx->uniqueID()); |
Greg Daniel | e1da1d9 | 2017-10-06 15:59:27 -0400 | [diff] [blame] | 477 | if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) { |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 478 | set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 479 | return mippedProxy; |
| 480 | } |
Greg Daniel | 8f5bbda | 2018-06-08 17:22:23 -0400 | [diff] [blame] | 481 | // We failed to make a mipped proxy with the base copied into it. This could have |
| 482 | // been from failure to make the proxy or failure to do the copy. Thus we will fall |
| 483 | // back to just using the non mipped proxy; See skbug.com/7094. |
| 484 | return proxy; |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 485 | } |
| 486 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 487 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath, |
| 488 | kLockTexturePathCount); |
| 489 | return nullptr; |
| 490 | } |
| 491 | |
| 492 | /////////////////////////////////////////////////////////////////////////////////////////////////// |
| 493 | |
| 494 | #endif |