reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "SkImage_Base.h" |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 9 | #include "SkImageCacherator.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 10 | |
| 11 | #include "SkBitmap.h" |
| 12 | #include "SkBitmapCache.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 13 | #include "SkData.h" |
| 14 | #include "SkImageGenerator.h" |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 15 | #include "SkImagePriv.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 16 | #include "SkNextID.h" |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 17 | #include "SkPixelRef.h" |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 18 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 19 | #if SK_SUPPORT_GPU |
| 20 | #include "GrContext.h" |
| 21 | #include "GrContextPriv.h" |
| 22 | #include "GrGpuResourcePriv.h" |
| 23 | #include "GrImageTextureMaker.h" |
| 24 | #include "GrResourceKey.h" |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 25 | #include "GrProxyProvider.h" |
Brian Salomon | 2bbdcc4 | 2017-09-07 12:36:34 -0400 | [diff] [blame] | 26 | #include "GrSamplerState.h" |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 27 | #include "GrYUVProvider.h" |
| 28 | #include "SkGr.h" |
| 29 | #endif |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 30 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 31 | // Ref-counted tuple(SkImageGenerator, SkMutex) which allows sharing one generator among N images |
| 32 | class SharedGenerator final : public SkNVRefCnt<SharedGenerator> { |
| 33 | public: |
| 34 | static sk_sp<SharedGenerator> Make(std::unique_ptr<SkImageGenerator> gen) { |
| 35 | return gen ? sk_sp<SharedGenerator>(new SharedGenerator(std::move(gen))) : nullptr; |
| 36 | } |
| 37 | |
Matt Sarett | b2004f7 | 2017-05-18 09:26:50 -0400 | [diff] [blame] | 38 | // This is thread safe. It is a const field set in the constructor. |
| 39 | const SkImageInfo& getInfo() { return fGenerator->getInfo(); } |
| 40 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 41 | private: |
| 42 | explicit SharedGenerator(std::unique_ptr<SkImageGenerator> gen) |
| 43 | : fGenerator(std::move(gen)) { |
| 44 | SkASSERT(fGenerator); |
| 45 | } |
| 46 | |
| 47 | friend class ScopedGenerator; |
| 48 | friend class SkImage_Lazy; |
| 49 | |
| 50 | std::unique_ptr<SkImageGenerator> fGenerator; |
| 51 | SkMutex fMutex; |
| 52 | }; |
| 53 | |
| 54 | class SkImage_Lazy : public SkImage_Base, public SkImageCacherator { |
| 55 | public: |
| 56 | struct Validator { |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 57 | Validator(sk_sp<SharedGenerator>, const SkIRect* subset, sk_sp<SkColorSpace> colorSpace); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 58 | |
| 59 | operator bool() const { return fSharedGenerator.get(); } |
| 60 | |
| 61 | sk_sp<SharedGenerator> fSharedGenerator; |
| 62 | SkImageInfo fInfo; |
| 63 | SkIPoint fOrigin; |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 64 | sk_sp<SkColorSpace> fColorSpace; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 65 | uint32_t fUniqueID; |
| 66 | }; |
| 67 | |
| 68 | SkImage_Lazy(Validator* validator); |
| 69 | |
| 70 | SkImageInfo onImageInfo() const override { |
| 71 | return fInfo; |
herb | a7c9d63 | 2016-04-19 12:30:22 -0700 | [diff] [blame] | 72 | } |
Greg Daniel | 56008aa | 2018-03-14 15:33:42 -0400 | [diff] [blame] | 73 | SkColorType onColorType() const override { |
| 74 | return kUnknown_SkColorType; |
| 75 | } |
brianosman | 69c166d | 2016-08-17 14:01:05 -0700 | [diff] [blame] | 76 | SkAlphaType onAlphaType() const override { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 77 | return fInfo.alphaType(); |
brianosman | 69c166d | 2016-08-17 14:01:05 -0700 | [diff] [blame] | 78 | } |
herb | a7c9d63 | 2016-04-19 12:30:22 -0700 | [diff] [blame] | 79 | |
Mike Reed | f2c7364 | 2018-05-29 15:41:27 -0400 | [diff] [blame] | 80 | SkIRect onGetSubset() const override { |
| 81 | return SkIRect::MakeXYWH(fOrigin.fX, fOrigin.fY, fInfo.width(), fInfo.height()); |
| 82 | } |
| 83 | |
Robert Phillips | b726d58 | 2017-03-09 16:36:32 -0500 | [diff] [blame] | 84 | bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY, |
| 85 | CachingHint) const override; |
| 86 | #if SK_SUPPORT_GPU |
Brian Salomon | 2bbdcc4 | 2017-09-07 12:36:34 -0400 | [diff] [blame] | 87 | sk_sp<GrTextureProxy> asTextureProxyRef(GrContext*, |
| 88 | const GrSamplerState&, SkColorSpace*, |
| 89 | sk_sp<SkColorSpace>*, |
Robert Phillips | b726d58 | 2017-03-09 16:36:32 -0500 | [diff] [blame] | 90 | SkScalar scaleAdjust[2]) const override; |
| 91 | #endif |
Ben Wagner | bdf5433 | 2018-05-15 14:12:14 -0400 | [diff] [blame] | 92 | sk_sp<SkData> onRefEncoded() const override; |
reed | 7fb4f8b | 2016-03-11 04:33:52 -0800 | [diff] [blame] | 93 | sk_sp<SkImage> onMakeSubset(const SkIRect&) const override; |
Brian Osman | 61624f0 | 2016-12-09 14:51:59 -0500 | [diff] [blame] | 94 | bool getROPixels(SkBitmap*, SkColorSpace* dstColorSpace, CachingHint) const override; |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 95 | bool onIsLazyGenerated() const override { return true; } |
Mike Reed | 7f1d020 | 2017-05-08 16:13:39 -0400 | [diff] [blame] | 96 | bool onCanLazyGenerateOnGPU() const override; |
Brian Osman | b62f50c | 2018-07-12 14:44:27 -0400 | [diff] [blame^] | 97 | sk_sp<SkImage> onMakeColorSpace(sk_sp<SkColorSpace>, SkColorType) const override; |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 98 | |
Brian Osman | 5bbd076 | 2017-05-08 11:07:42 -0400 | [diff] [blame] | 99 | bool onIsValid(GrContext*) const override; |
| 100 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 101 | SkImageCacherator* peekCacherator() const override { |
| 102 | return const_cast<SkImage_Lazy*>(this); |
| 103 | } |
| 104 | |
| 105 | // Only return true if the generate has already been cached. |
| 106 | bool lockAsBitmapOnlyIfAlreadyCached(SkBitmap*, CachedFormat) const; |
| 107 | // Call the underlying generator directly |
| 108 | bool directGeneratePixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB, |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 109 | int srcX, int srcY) const; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 110 | |
| 111 | // SkImageCacherator interface |
| 112 | #if SK_SUPPORT_GPU |
| 113 | // Returns the texture proxy. If the cacherator is generating the texture and wants to cache it, |
| 114 | // it should use the passed in key (if the key is valid). |
| 115 | sk_sp<GrTextureProxy> lockTextureProxy(GrContext*, |
| 116 | const GrUniqueKey& key, |
| 117 | SkImage::CachingHint, |
| 118 | bool willBeMipped, |
Stan Iliev | ba81af2 | 2017-06-08 15:16:53 -0400 | [diff] [blame] | 119 | SkColorSpace* dstColorSpace, |
| 120 | GrTextureMaker::AllowedTexGenType genType) override; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 121 | |
| 122 | // Returns the color space of the texture that would be returned if you called lockTexture. |
| 123 | // Separate code path to allow querying of the color space for textures that cached (even |
| 124 | // externally). |
| 125 | sk_sp<SkColorSpace> getColorSpace(GrContext*, SkColorSpace* dstColorSpace) override; |
| 126 | void makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, CachedFormat, |
| 127 | GrUniqueKey* cacheKey) override; |
| 128 | #endif |
| 129 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 130 | SkImageInfo buildCacheInfo(CachedFormat) const override; |
| 131 | |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 132 | private: |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 133 | class ScopedGenerator; |
| 134 | |
| 135 | /** |
| 136 | * On success (true), bitmap will point to the pixels for this generator. If this returns |
| 137 | * false, the bitmap will be reset to empty. |
| 138 | */ |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 139 | bool lockAsBitmap(SkBitmap*, SkImage::CachingHint, CachedFormat, const SkImageInfo&) const; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 140 | |
| 141 | sk_sp<SharedGenerator> fSharedGenerator; |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 142 | // Note that fInfo is not necessarily the info from the generator. It may be cropped by |
| 143 | // onMakeSubset and its color space may be changed by onMakeColorSpace. |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 144 | const SkImageInfo fInfo; |
| 145 | const SkIPoint fOrigin; |
| 146 | |
| 147 | struct IDRec { |
| 148 | SkOnce fOnce; |
| 149 | uint32_t fUniqueID; |
| 150 | }; |
| 151 | mutable IDRec fIDRecs[kNumCachedFormats]; |
| 152 | |
| 153 | uint32_t getUniqueID(CachedFormat) const; |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 154 | |
Christopher Cameron | d4b6787 | 2017-07-13 15:18:08 -0700 | [diff] [blame] | 155 | // Repeated calls to onMakeColorSpace will result in a proliferation of unique IDs and |
| 156 | // SkImage_Lazy instances. Cache the result of the last successful onMakeColorSpace call. |
| 157 | mutable SkMutex fOnMakeColorSpaceMutex; |
| 158 | mutable sk_sp<SkColorSpace> fOnMakeColorSpaceTarget; |
| 159 | mutable sk_sp<SkImage> fOnMakeColorSpaceResult; |
| 160 | |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 161 | typedef SkImage_Base INHERITED; |
| 162 | }; |
| 163 | |
| 164 | /////////////////////////////////////////////////////////////////////////////// |
| 165 | |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 166 | SkImage_Lazy::Validator::Validator(sk_sp<SharedGenerator> gen, const SkIRect* subset, |
| 167 | sk_sp<SkColorSpace> colorSpace) |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 168 | : fSharedGenerator(std::move(gen)) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 169 | if (!fSharedGenerator) { |
| 170 | return; |
| 171 | } |
| 172 | |
| 173 | // The following generator accessors are safe without acquiring the mutex (const getters). |
| 174 | // TODO: refactor to use a ScopedGenerator instead, for clarity. |
| 175 | const SkImageInfo& info = fSharedGenerator->fGenerator->getInfo(); |
| 176 | if (info.isEmpty()) { |
| 177 | fSharedGenerator.reset(); |
| 178 | return; |
| 179 | } |
| 180 | |
| 181 | fUniqueID = fSharedGenerator->fGenerator->uniqueID(); |
| 182 | const SkIRect bounds = SkIRect::MakeWH(info.width(), info.height()); |
| 183 | if (subset) { |
| 184 | if (!bounds.contains(*subset)) { |
| 185 | fSharedGenerator.reset(); |
| 186 | return; |
| 187 | } |
| 188 | if (*subset != bounds) { |
| 189 | // we need a different uniqueID since we really are a subset of the raw generator |
| 190 | fUniqueID = SkNextID::ImageID(); |
| 191 | } |
| 192 | } else { |
| 193 | subset = &bounds; |
| 194 | } |
| 195 | |
| 196 | fInfo = info.makeWH(subset->width(), subset->height()); |
| 197 | fOrigin = SkIPoint::Make(subset->x(), subset->y()); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 198 | if (colorSpace) { |
| 199 | fInfo = fInfo.makeColorSpace(colorSpace); |
| 200 | fUniqueID = SkNextID::ImageID(); |
| 201 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 202 | } |
| 203 | |
| 204 | /////////////////////////////////////////////////////////////////////////////// |
| 205 | |
| 206 | // Helper for exclusive access to a shared generator. |
| 207 | class SkImage_Lazy::ScopedGenerator { |
| 208 | public: |
| 209 | ScopedGenerator(const sk_sp<SharedGenerator>& gen) |
| 210 | : fSharedGenerator(gen) |
| 211 | , fAutoAquire(gen->fMutex) {} |
| 212 | |
| 213 | SkImageGenerator* operator->() const { |
| 214 | fSharedGenerator->fMutex.assertHeld(); |
| 215 | return fSharedGenerator->fGenerator.get(); |
| 216 | } |
| 217 | |
| 218 | operator SkImageGenerator*() const { |
| 219 | fSharedGenerator->fMutex.assertHeld(); |
| 220 | return fSharedGenerator->fGenerator.get(); |
| 221 | } |
| 222 | |
| 223 | private: |
| 224 | const sk_sp<SharedGenerator>& fSharedGenerator; |
| 225 | SkAutoExclusive fAutoAquire; |
| 226 | }; |
| 227 | |
| 228 | /////////////////////////////////////////////////////////////////////////////// |
| 229 | |
| 230 | SkImage_Lazy::SkImage_Lazy(Validator* validator) |
| 231 | : INHERITED(validator->fInfo.width(), validator->fInfo.height(), validator->fUniqueID) |
| 232 | , fSharedGenerator(std::move(validator->fSharedGenerator)) |
| 233 | , fInfo(validator->fInfo) |
| 234 | , fOrigin(validator->fOrigin) { |
| 235 | SkASSERT(fSharedGenerator); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 236 | // We explicit set the legacy format slot, but leave the others uninitialized (via SkOnce) |
| 237 | // and only resolove them to IDs as needed (by calling getUniqueID()). |
| 238 | fIDRecs[kLegacy_CachedFormat].fOnce([this, validator] { |
| 239 | fIDRecs[kLegacy_CachedFormat].fUniqueID = validator->fUniqueID; |
| 240 | }); |
| 241 | } |
| 242 | |
| 243 | uint32_t SkImage_Lazy::getUniqueID(CachedFormat format) const { |
| 244 | IDRec* rec = &fIDRecs[format]; |
| 245 | rec->fOnce([rec] { |
| 246 | rec->fUniqueID = SkNextID::ImageID(); |
| 247 | }); |
| 248 | return rec->fUniqueID; |
| 249 | } |
| 250 | |
| 251 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 252 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 253 | SkImageInfo SkImage_Lazy::buildCacheInfo(CachedFormat format) const { |
Brian Osman | bfc33e5 | 2018-06-27 14:24:11 -0400 | [diff] [blame] | 254 | if (kGray_8_SkColorType == fInfo.colorType()) { |
| 255 | return fInfo.makeColorSpace(nullptr); |
| 256 | } else { |
| 257 | return fInfo; |
| 258 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 259 | } |
| 260 | |
| 261 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 262 | |
| 263 | static bool check_output_bitmap(const SkBitmap& bitmap, uint32_t expectedID) { |
| 264 | SkASSERT(bitmap.getGenerationID() == expectedID); |
| 265 | SkASSERT(bitmap.isImmutable()); |
| 266 | SkASSERT(bitmap.getPixels()); |
| 267 | return true; |
| 268 | } |
| 269 | |
| 270 | bool SkImage_Lazy::directGeneratePixels(const SkImageInfo& info, void* pixels, size_t rb, |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 271 | int srcX, int srcY) const { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 272 | ScopedGenerator generator(fSharedGenerator); |
| 273 | const SkImageInfo& genInfo = generator->getInfo(); |
| 274 | // Currently generators do not natively handle subsets, so check that first. |
| 275 | if (srcX || srcY || genInfo.width() != info.width() || genInfo.height() != info.height()) { |
| 276 | return false; |
| 277 | } |
| 278 | |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 279 | return generator->getPixels(info, pixels, rb); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 280 | } |
| 281 | |
| 282 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 283 | |
| 284 | bool SkImage_Lazy::lockAsBitmapOnlyIfAlreadyCached(SkBitmap* bitmap, CachedFormat format) const { |
| 285 | uint32_t uniqueID = this->getUniqueID(format); |
| 286 | return SkBitmapCache::Find(SkBitmapCacheDesc::Make(uniqueID, |
| 287 | fInfo.width(), fInfo.height()), bitmap) && |
| 288 | check_output_bitmap(*bitmap, uniqueID); |
| 289 | } |
| 290 | |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 291 | static bool generate_pixels(SkImageGenerator* gen, const SkPixmap& pmap, int originX, int originY) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 292 | const int genW = gen->getInfo().width(); |
| 293 | const int genH = gen->getInfo().height(); |
| 294 | const SkIRect srcR = SkIRect::MakeWH(genW, genH); |
| 295 | const SkIRect dstR = SkIRect::MakeXYWH(originX, originY, pmap.width(), pmap.height()); |
| 296 | if (!srcR.contains(dstR)) { |
| 297 | return false; |
| 298 | } |
| 299 | |
| 300 | // If they are requesting a subset, we have to have a temp allocation for full image, and |
| 301 | // then copy the subset into their allocation |
| 302 | SkBitmap full; |
| 303 | SkPixmap fullPM; |
| 304 | const SkPixmap* dstPM = &pmap; |
| 305 | if (srcR != dstR) { |
| 306 | if (!full.tryAllocPixels(pmap.info().makeWH(genW, genH))) { |
| 307 | return false; |
| 308 | } |
| 309 | if (!full.peekPixels(&fullPM)) { |
| 310 | return false; |
| 311 | } |
| 312 | dstPM = &fullPM; |
| 313 | } |
| 314 | |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 315 | if (!gen->getPixels(dstPM->info(), dstPM->writable_addr(), dstPM->rowBytes())) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 316 | return false; |
| 317 | } |
| 318 | |
| 319 | if (srcR != dstR) { |
| 320 | if (!full.readPixels(pmap, originX, originY)) { |
| 321 | return false; |
| 322 | } |
| 323 | } |
| 324 | return true; |
| 325 | } |
| 326 | |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 327 | bool SkImage_Lazy::lockAsBitmap(SkBitmap* bitmap, SkImage::CachingHint chint, CachedFormat format, |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 328 | const SkImageInfo& info) const { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 329 | if (this->lockAsBitmapOnlyIfAlreadyCached(bitmap, format)) { |
| 330 | return true; |
| 331 | } |
| 332 | |
| 333 | uint32_t uniqueID = this->getUniqueID(format); |
| 334 | |
| 335 | SkBitmap tmpBitmap; |
| 336 | SkBitmapCache::RecPtr cacheRec; |
| 337 | SkPixmap pmap; |
| 338 | if (SkImage::kAllow_CachingHint == chint) { |
| 339 | auto desc = SkBitmapCacheDesc::Make(uniqueID, info.width(), info.height()); |
| 340 | cacheRec = SkBitmapCache::Alloc(desc, info, &pmap); |
| 341 | if (!cacheRec) { |
| 342 | return false; |
| 343 | } |
| 344 | } else { |
| 345 | if (!tmpBitmap.tryAllocPixels(info)) { |
| 346 | return false; |
| 347 | } |
| 348 | if (!tmpBitmap.peekPixels(&pmap)) { |
| 349 | return false; |
| 350 | } |
| 351 | } |
| 352 | |
| 353 | ScopedGenerator generator(fSharedGenerator); |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 354 | if (!generate_pixels(generator, pmap, fOrigin.x(), fOrigin.y())) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 355 | return false; |
| 356 | } |
| 357 | |
| 358 | if (cacheRec) { |
| 359 | SkBitmapCache::Add(std::move(cacheRec), bitmap); |
| 360 | SkASSERT(bitmap->getPixels()); // we're locked |
| 361 | SkASSERT(bitmap->isImmutable()); |
| 362 | SkASSERT(bitmap->getGenerationID() == uniqueID); |
| 363 | this->notifyAddedToCache(); |
| 364 | } else { |
| 365 | *bitmap = tmpBitmap; |
| 366 | bitmap->pixelRef()->setImmutableWithID(uniqueID); |
| 367 | } |
| 368 | |
| 369 | check_output_bitmap(*bitmap, uniqueID); |
| 370 | return true; |
| 371 | } |
| 372 | |
| 373 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 374 | |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 375 | bool SkImage_Lazy::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB, |
| 376 | int srcX, int srcY, CachingHint chint) const { |
Brian Osman | 61624f0 | 2016-12-09 14:51:59 -0500 | [diff] [blame] | 377 | SkColorSpace* dstColorSpace = dstInfo.colorSpace(); |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 378 | SkBitmap bm; |
reed | 6868c3f | 2015-11-24 11:44:47 -0800 | [diff] [blame] | 379 | if (kDisallow_CachingHint == chint) { |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 380 | CachedFormat cacheFormat = kLegacy_CachedFormat; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 381 | if (this->lockAsBitmapOnlyIfAlreadyCached(&bm, cacheFormat)) { |
reed | 6868c3f | 2015-11-24 11:44:47 -0800 | [diff] [blame] | 382 | return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY); |
| 383 | } else { |
| 384 | // Try passing the caller's buffer directly down to the generator. If this fails we |
| 385 | // may still succeed in the general case, as the generator may prefer some other |
| 386 | // config, which we could then convert via SkBitmap::readPixels. |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 387 | if (this->directGeneratePixels(dstInfo, dstPixels, dstRB, srcX, srcY)) { |
reed | 6868c3f | 2015-11-24 11:44:47 -0800 | [diff] [blame] | 388 | return true; |
| 389 | } |
| 390 | // else fall through |
| 391 | } |
| 392 | } |
| 393 | |
Brian Osman | 61624f0 | 2016-12-09 14:51:59 -0500 | [diff] [blame] | 394 | if (this->getROPixels(&bm, dstColorSpace, chint)) { |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 395 | return bm.readPixels(dstInfo, dstPixels, dstRB, srcX, srcY); |
| 396 | } |
| 397 | return false; |
| 398 | } |
| 399 | |
Ben Wagner | bdf5433 | 2018-05-15 14:12:14 -0400 | [diff] [blame] | 400 | sk_sp<SkData> SkImage_Lazy::onRefEncoded() const { |
| 401 | ScopedGenerator generator(fSharedGenerator); |
| 402 | return generator->refEncodedData(); |
| 403 | } |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 404 | |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 405 | bool SkImage_Lazy::getROPixels(SkBitmap* bitmap, SkColorSpace* dstColorSpace, |
| 406 | CachingHint chint) const { |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 407 | CachedFormat cacheFormat = kLegacy_CachedFormat; |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 408 | const SkImageInfo cacheInfo = this->buildCacheInfo(cacheFormat); |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 409 | return this->lockAsBitmap(bitmap, chint, cacheFormat, cacheInfo); |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 410 | } |
| 411 | |
Brian Osman | 5bbd076 | 2017-05-08 11:07:42 -0400 | [diff] [blame] | 412 | bool SkImage_Lazy::onIsValid(GrContext* context) const { |
| 413 | ScopedGenerator generator(fSharedGenerator); |
| 414 | return generator->isValid(context); |
| 415 | } |
| 416 | |
Mike Reed | 7f1d020 | 2017-05-08 16:13:39 -0400 | [diff] [blame] | 417 | bool SkImage_Lazy::onCanLazyGenerateOnGPU() const { |
| 418 | #if SK_SUPPORT_GPU |
| 419 | ScopedGenerator generator(fSharedGenerator); |
Stan Iliev | ba81af2 | 2017-06-08 15:16:53 -0400 | [diff] [blame] | 420 | return SkImageGenerator::TexGenType::kNone != generator->onCanGenerateTexture(); |
Mike Reed | 7f1d020 | 2017-05-08 16:13:39 -0400 | [diff] [blame] | 421 | #else |
| 422 | return false; |
| 423 | #endif |
| 424 | } |
| 425 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 426 | /////////////////////////////////////////////////////////////////////////////////////////////////// |
| 427 | |
Robert Phillips | b726d58 | 2017-03-09 16:36:32 -0500 | [diff] [blame] | 428 | #if SK_SUPPORT_GPU |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 429 | sk_sp<GrTextureProxy> SkImage_Lazy::asTextureProxyRef(GrContext* context, |
Brian Salomon | 2bbdcc4 | 2017-09-07 12:36:34 -0400 | [diff] [blame] | 430 | const GrSamplerState& params, |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 431 | SkColorSpace* dstColorSpace, |
| 432 | sk_sp<SkColorSpace>* texColorSpace, |
| 433 | SkScalar scaleAdjust[2]) const { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 434 | if (!context) { |
| 435 | return nullptr; |
| 436 | } |
| 437 | |
| 438 | GrImageTextureMaker textureMaker(context, this, kAllow_CachingHint); |
| 439 | return textureMaker.refTextureProxyForParams(params, dstColorSpace, texColorSpace, scaleAdjust); |
Robert Phillips | b726d58 | 2017-03-09 16:36:32 -0500 | [diff] [blame] | 440 | } |
| 441 | #endif |
| 442 | |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 443 | sk_sp<SkImage> SkImage_Lazy::onMakeSubset(const SkIRect& subset) const { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 444 | SkASSERT(fInfo.bounds().contains(subset)); |
| 445 | SkASSERT(fInfo.bounds() != subset); |
reed | 7b6945b | 2015-09-24 00:50:58 -0700 | [diff] [blame] | 446 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 447 | const SkIRect generatorSubset = subset.makeOffset(fOrigin.x(), fOrigin.y()); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 448 | Validator validator(fSharedGenerator, &generatorSubset, fInfo.refColorSpace()); |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 449 | return validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr; |
reed | 7b6945b | 2015-09-24 00:50:58 -0700 | [diff] [blame] | 450 | } |
| 451 | |
Matt Sarett | 9f3dcb3 | 2017-05-04 08:53:32 -0400 | [diff] [blame] | 452 | sk_sp<SkImage> SkImage_Lazy::onMakeColorSpace(sk_sp<SkColorSpace> target, |
Brian Osman | b62f50c | 2018-07-12 14:44:27 -0400 | [diff] [blame^] | 453 | SkColorType targetColorType) const { |
Christopher Cameron | d4b6787 | 2017-07-13 15:18:08 -0700 | [diff] [blame] | 454 | SkAutoExclusive autoAquire(fOnMakeColorSpaceMutex); |
| 455 | if (target && fOnMakeColorSpaceTarget && |
| 456 | SkColorSpace::Equals(target.get(), fOnMakeColorSpaceTarget.get())) { |
| 457 | return fOnMakeColorSpaceResult; |
| 458 | } |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 459 | const SkIRect generatorSubset = |
| 460 | SkIRect::MakeXYWH(fOrigin.x(), fOrigin.y(), fInfo.width(), fInfo.height()); |
| 461 | Validator validator(fSharedGenerator, &generatorSubset, target); |
Christopher Cameron | d4b6787 | 2017-07-13 15:18:08 -0700 | [diff] [blame] | 462 | sk_sp<SkImage> result = validator ? sk_sp<SkImage>(new SkImage_Lazy(&validator)) : nullptr; |
| 463 | if (result) { |
| 464 | fOnMakeColorSpaceTarget = target; |
| 465 | fOnMakeColorSpaceResult = result; |
| 466 | } |
| 467 | return result; |
Matt Sarett | 6de1310 | 2017-03-14 14:10:48 -0400 | [diff] [blame] | 468 | } |
| 469 | |
Mike Reed | 185130c | 2017-02-15 15:14:16 -0500 | [diff] [blame] | 470 | sk_sp<SkImage> SkImage::MakeFromGenerator(std::unique_ptr<SkImageGenerator> generator, |
| 471 | const SkIRect* subset) { |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 472 | SkImage_Lazy::Validator validator(SharedGenerator::Make(std::move(generator)), subset, nullptr); |
fmalita | 7929e3a | 2016-10-27 08:15:44 -0700 | [diff] [blame] | 473 | |
Brian Osman | f1b4382 | 2017-04-20 13:43:23 -0400 | [diff] [blame] | 474 | return validator ? sk_make_sp<SkImage_Lazy>(&validator) : nullptr; |
reed | 85d9178 | 2015-09-10 14:33:38 -0700 | [diff] [blame] | 475 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 476 | |
| 477 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 478 | |
| 479 | /** |
| 480 | * Implementation of SkImageCacherator interface, as needed by GrImageTextureMaker |
| 481 | */ |
| 482 | |
| 483 | #if SK_SUPPORT_GPU |
| 484 | |
| 485 | void SkImage_Lazy::makeCacheKeyFromOrigKey(const GrUniqueKey& origKey, CachedFormat format, |
| 486 | GrUniqueKey* cacheKey) { |
| 487 | SkASSERT(!cacheKey->isValid()); |
| 488 | if (origKey.isValid()) { |
| 489 | static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain(); |
Derek Sollenberger | e1c60d6 | 2018-04-04 11:53:35 -0400 | [diff] [blame] | 490 | GrUniqueKey::Builder builder(cacheKey, origKey, kDomain, 1, "Image"); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 491 | builder[0] = format; |
| 492 | } |
| 493 | } |
| 494 | |
| 495 | class Generator_GrYUVProvider : public GrYUVProvider { |
| 496 | SkImageGenerator* fGen; |
| 497 | |
| 498 | public: |
| 499 | Generator_GrYUVProvider(SkImageGenerator* gen) : fGen(gen) {} |
| 500 | |
| 501 | uint32_t onGetID() override { return fGen->uniqueID(); } |
| 502 | bool onQueryYUV8(SkYUVSizeInfo* sizeInfo, SkYUVColorSpace* colorSpace) const override { |
| 503 | return fGen->queryYUV8(sizeInfo, colorSpace); |
| 504 | } |
| 505 | bool onGetYUV8Planes(const SkYUVSizeInfo& sizeInfo, void* planes[3]) override { |
| 506 | return fGen->getYUV8Planes(sizeInfo, planes); |
| 507 | } |
| 508 | }; |
| 509 | |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 510 | static void set_key_on_proxy(GrProxyProvider* proxyProvider, |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 511 | GrTextureProxy* proxy, GrTextureProxy* originalProxy, |
| 512 | const GrUniqueKey& key) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 513 | if (key.isValid()) { |
Robert Phillips | 8a90f50 | 2017-07-24 15:09:56 -0400 | [diff] [blame] | 514 | SkASSERT(proxy->origin() == kTopLeft_GrSurfaceOrigin); |
Greg Daniel | f6f7b67 | 2018-02-15 13:06:26 -0500 | [diff] [blame] | 515 | if (originalProxy && originalProxy->getUniqueKey().isValid()) { |
| 516 | SkASSERT(originalProxy->getUniqueKey() == key); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 517 | SkASSERT(GrMipMapped::kYes == proxy->mipMapped() && |
| 518 | GrMipMapped::kNo == originalProxy->mipMapped()); |
Greg Daniel | f6f7b67 | 2018-02-15 13:06:26 -0500 | [diff] [blame] | 519 | // If we had an originalProxy with a valid key, that means there already is a proxy in |
| 520 | // the cache which matches the key, but it does not have mip levels and we require them. |
| 521 | // Thus we must remove the unique key from that proxy. |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 522 | proxyProvider->removeUniqueKeyFromProxy(key, originalProxy); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 523 | } |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 524 | proxyProvider->assignUniqueKeyToProxy(key, proxy); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 525 | } |
| 526 | } |
| 527 | |
| 528 | sk_sp<SkColorSpace> SkImage_Lazy::getColorSpace(GrContext* ctx, SkColorSpace* dstColorSpace) { |
Brian Osman | a8ac924 | 2017-09-07 10:19:08 -0400 | [diff] [blame] | 529 | if (!dstColorSpace) { |
| 530 | // In legacy mode, we do no modification to the image's color space or encoding. |
| 531 | // Subsequent legacy drawing is likely to ignore the color space, but some clients |
| 532 | // may want to know what space the image data is in, so return it. |
| 533 | return fInfo.refColorSpace(); |
| 534 | } else { |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 535 | CachedFormat format = kLegacy_CachedFormat; |
Brian Osman | a8ac924 | 2017-09-07 10:19:08 -0400 | [diff] [blame] | 536 | SkImageInfo cacheInfo = this->buildCacheInfo(format); |
| 537 | return cacheInfo.refColorSpace(); |
| 538 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 539 | } |
| 540 | |
| 541 | /* |
| 542 | * We have 4 ways to try to return a texture (in sorted order) |
| 543 | * |
| 544 | * 1. Check the cache for a pre-existing one |
| 545 | * 2. Ask the generator to natively create one |
| 546 | * 3. Ask the generator to return YUV planes, which the GPU can convert |
| 547 | * 4. Ask the generator to return RGB(A) data, which the GPU can convert |
| 548 | */ |
| 549 | sk_sp<GrTextureProxy> SkImage_Lazy::lockTextureProxy(GrContext* ctx, |
| 550 | const GrUniqueKey& origKey, |
| 551 | SkImage::CachingHint chint, |
| 552 | bool willBeMipped, |
Stan Iliev | ba81af2 | 2017-06-08 15:16:53 -0400 | [diff] [blame] | 553 | SkColorSpace* dstColorSpace, |
| 554 | GrTextureMaker::AllowedTexGenType genType) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 555 | // Values representing the various texture lock paths we can take. Used for logging the path |
| 556 | // taken to a histogram. |
| 557 | enum LockTexturePath { |
| 558 | kFailure_LockTexturePath, |
| 559 | kPreExisting_LockTexturePath, |
| 560 | kNative_LockTexturePath, |
| 561 | kCompressed_LockTexturePath, // Deprecated |
| 562 | kYUV_LockTexturePath, |
| 563 | kRGBA_LockTexturePath, |
| 564 | }; |
| 565 | |
| 566 | enum { kLockTexturePathCount = kRGBA_LockTexturePath + 1 }; |
| 567 | |
| 568 | // Determine which cached format we're going to use (which may involve decoding to a different |
| 569 | // info than the generator provides). |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 570 | CachedFormat format = kLegacy_CachedFormat; |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 571 | |
| 572 | // Fold the cache format into our texture key |
| 573 | GrUniqueKey key; |
| 574 | this->makeCacheKeyFromOrigKey(origKey, format, &key); |
| 575 | |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 576 | GrProxyProvider* proxyProvider = ctx->contextPriv().proxyProvider(); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 577 | sk_sp<GrTextureProxy> proxy; |
| 578 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 579 | // 1. Check the cache for a pre-existing one |
| 580 | if (key.isValid()) { |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 581 | proxy = proxyProvider->findOrCreateProxyByUniqueKey(key, kTopLeft_GrSurfaceOrigin); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 582 | if (proxy) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 583 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kPreExisting_LockTexturePath, |
| 584 | kLockTexturePathCount); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 585 | if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) { |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 586 | return proxy; |
| 587 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 588 | } |
| 589 | } |
| 590 | |
| 591 | // The CachedFormat is both an index for which cache "slot" we'll use to store this particular |
| 592 | // decoded variant of the encoded data, and also a recipe for how to transform the original |
| 593 | // info to get the one that we're going to decode to. |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 594 | const SkImageInfo cacheInfo = this->buildCacheInfo(format); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 595 | |
| 596 | // 2. Ask the generator to natively create one |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 597 | if (!proxy) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 598 | ScopedGenerator generator(fSharedGenerator); |
Stan Iliev | ba81af2 | 2017-06-08 15:16:53 -0400 | [diff] [blame] | 599 | if (GrTextureMaker::AllowedTexGenType::kCheap == genType && |
| 600 | SkImageGenerator::TexGenType::kCheap != generator->onCanGenerateTexture()) { |
| 601 | return nullptr; |
| 602 | } |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 603 | if ((proxy = generator->generateTexture(ctx, cacheInfo, fOrigin, willBeMipped))) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 604 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kNative_LockTexturePath, |
| 605 | kLockTexturePathCount); |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 606 | set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 607 | if (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped()) { |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 608 | return proxy; |
| 609 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 610 | } |
| 611 | } |
| 612 | |
Greg Daniel | 3e70fa3 | 2017-10-05 16:27:06 -0400 | [diff] [blame] | 613 | // 3. Ask the generator to return YUV planes, which the GPU can convert. If we will be mipping |
| 614 | // the texture we fall through here and have the CPU generate the mip maps for us. |
| 615 | if (!proxy && !willBeMipped && !ctx->contextPriv().disableGpuYUVConversion()) { |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 616 | const GrSurfaceDesc desc = GrImageInfoToSurfaceDesc(cacheInfo); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 617 | ScopedGenerator generator(fSharedGenerator); |
| 618 | Generator_GrYUVProvider provider(generator); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 619 | |
| 620 | // The pixels in the texture will be in the generator's color space. If onMakeColorSpace |
| 621 | // has been called then this will not match this image's color space. To correct this, apply |
| 622 | // a color space conversion from the generator's color space to this image's color space. |
Brian Osman | 56893cd | 2018-06-08 14:11:37 -0400 | [diff] [blame] | 623 | // Note that we can only do this conversion (on the GPU) if both color spaces are XYZ type. |
Brian Osman | 861ea5b | 2018-06-14 09:14:03 -0400 | [diff] [blame] | 624 | SkColorSpace* generatorColorSpace = fSharedGenerator->fGenerator->getInfo().colorSpace(); |
| 625 | SkColorSpace* thisColorSpace = fInfo.colorSpace(); |
Christopher Cameron | 77e9666 | 2017-07-08 01:47:47 -0700 | [diff] [blame] | 626 | |
Brian Osman | 56893cd | 2018-06-08 14:11:37 -0400 | [diff] [blame] | 627 | if ((!generatorColorSpace || generatorColorSpace->toXYZD50()) && |
| 628 | (!thisColorSpace || thisColorSpace->toXYZD50())) { |
| 629 | // TODO: Update to create the mipped surface in the YUV generator and draw the base |
| 630 | // layer directly into the mipped surface. |
| 631 | proxy = provider.refAsTextureProxy(ctx, desc, generatorColorSpace, thisColorSpace); |
| 632 | if (proxy) { |
| 633 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kYUV_LockTexturePath, |
| 634 | kLockTexturePathCount); |
| 635 | set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key); |
| 636 | return proxy; |
| 637 | } |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 638 | } |
| 639 | } |
| 640 | |
| 641 | // 4. Ask the generator to return RGB(A) data, which the GPU can convert |
| 642 | SkBitmap bitmap; |
Brian Osman | c87cfb6 | 2018-07-11 09:08:46 -0400 | [diff] [blame] | 643 | if (!proxy && this->lockAsBitmap(&bitmap, chint, format, cacheInfo)) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 644 | if (willBeMipped) { |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 645 | proxy = proxyProvider->createMipMapProxyFromBitmap(bitmap); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 646 | } |
| 647 | if (!proxy) { |
Brian Osman | 2b23c4b | 2018-06-01 12:25:08 -0400 | [diff] [blame] | 648 | proxy = GrUploadBitmapToTextureProxy(proxyProvider, bitmap); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 649 | } |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 650 | if (proxy && (!willBeMipped || GrMipMapped::kYes == proxy->mipMapped())) { |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 651 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kRGBA_LockTexturePath, |
| 652 | kLockTexturePathCount); |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 653 | set_key_on_proxy(proxyProvider, proxy.get(), nullptr, key); |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 654 | return proxy; |
| 655 | } |
| 656 | } |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 657 | |
| 658 | if (proxy) { |
| 659 | // We need a mipped proxy, but we either found a proxy earlier that wasn't mipped, generated |
| 660 | // a native non mipped proxy, or generated a non-mipped yuv proxy. Thus we generate a new |
| 661 | // mipped surface and copy the original proxy into the base layer. We will then let the gpu |
| 662 | // generate the rest of the mips. |
| 663 | SkASSERT(willBeMipped); |
Greg Daniel | e252f08 | 2017-10-23 16:05:23 -0400 | [diff] [blame] | 664 | SkASSERT(GrMipMapped::kNo == proxy->mipMapped()); |
Greg Daniel | e1da1d9 | 2017-10-06 15:59:27 -0400 | [diff] [blame] | 665 | if (auto mippedProxy = GrCopyBaseMipMapToTextureProxy(ctx, proxy.get())) { |
Robert Phillips | 1afd4cd | 2018-01-08 13:40:32 -0500 | [diff] [blame] | 666 | set_key_on_proxy(proxyProvider, mippedProxy.get(), proxy.get(), key); |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 667 | return mippedProxy; |
| 668 | } |
Greg Daniel | 8f5bbda | 2018-06-08 17:22:23 -0400 | [diff] [blame] | 669 | // We failed to make a mipped proxy with the base copied into it. This could have |
| 670 | // been from failure to make the proxy or failure to do the copy. Thus we will fall |
| 671 | // back to just using the non mipped proxy; See skbug.com/7094. |
| 672 | return proxy; |
Greg Daniel | fc5060d | 2017-10-04 18:36:15 +0000 | [diff] [blame] | 673 | } |
| 674 | |
Brian Osman | df7e075 | 2017-04-26 16:20:28 -0400 | [diff] [blame] | 675 | SK_HISTOGRAM_ENUMERATION("LockTexturePath", kFailure_LockTexturePath, |
| 676 | kLockTexturePathCount); |
| 677 | return nullptr; |
| 678 | } |
| 679 | |
| 680 | /////////////////////////////////////////////////////////////////////////////////////////////////// |
| 681 | |
| 682 | #endif |