blob: 451d545a7c181efe7b7d2e35d41b80cf11a83243 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Ben Wagner729a23f2019-05-17 16:29:34 -04008#include "src/gpu/ccpr/GrCCClipPath.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -06009
Chris Daltonc3318f02019-07-19 14:20:53 -060010#include "include/gpu/GrRenderTarget.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/gpu/GrTexture.h"
12#include "src/gpu/GrOnFlushResourceProvider.h"
13#include "src/gpu/GrProxyProvider.h"
14#include "src/gpu/ccpr/GrCCPerFlushResources.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060015
Chris Daltonc3318f02019-07-19 14:20:53 -060016void GrCCClipPath::init(
17 const SkPath& deviceSpacePath, const SkIRect& accessRect,
18 GrCCAtlas::CoverageType atlasCoverageType, const GrCaps& caps) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019 SkASSERT(!this->isInitialized());
20
Brian Salomon4eb38b72019-08-05 12:58:39 -040021 fAtlasLazyProxy = GrCCAtlas::MakeLazyAtlasProxy(
22 [this](GrResourceProvider* resourceProvider, GrPixelConfig,
23 const GrBackendFormat& format, int sampleCount) {
24 SkASSERT(fHasAtlas);
25 SkASSERT(!fHasAtlasTransform);
Greg Daniel4065d452018-11-16 15:43:41 -050026
Brian Salomon4eb38b72019-08-05 12:58:39 -040027 GrTextureProxy* textureProxy = fAtlas ? fAtlas->textureProxy() : nullptr;
Chris Dalton5ba36ba2018-05-09 01:08:38 -060028
Brian Salomon4eb38b72019-08-05 12:58:39 -040029 if (!textureProxy || !textureProxy->instantiate(resourceProvider)) {
30 fAtlasScale = fAtlasTranslate = {0, 0};
31 SkDEBUGCODE(fHasAtlasTransform = true);
32 return sk_sp<GrTexture>();
33 }
Chris Daltonf91b7552019-04-29 16:21:18 -060034
Brian Salomon4eb38b72019-08-05 12:58:39 -040035 sk_sp<GrTexture> texture = sk_ref_sp(textureProxy->peekTexture());
36 SkASSERT(texture);
37 SkASSERT(texture->backendFormat() == format);
38 SkASSERT(texture->asRenderTarget()->numSamples() == sampleCount);
39 SkASSERT(textureProxy->origin() == kTopLeft_GrSurfaceOrigin);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060040
Brian Salomon4eb38b72019-08-05 12:58:39 -040041 fAtlasScale = {1.f / texture->width(), 1.f / texture->height()};
42 fAtlasTranslate.set(fDevToAtlasOffset.fX * fAtlasScale.x(),
43 fDevToAtlasOffset.fY * fAtlasScale.y());
44 SkDEBUGCODE(fHasAtlasTransform = true);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060045
Brian Salomon4eb38b72019-08-05 12:58:39 -040046 return texture;
47 },
48 atlasCoverageType, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060049
50 fDeviceSpacePath = deviceSpacePath;
51 fDeviceSpacePath.getBounds().roundOut(&fPathDevIBounds);
52 fAccessRect = accessRect;
53}
54
Chris Dalton4da70192018-06-18 09:51:36 -060055void GrCCClipPath::accountForOwnPath(GrCCPerFlushResourceSpecs* specs) const {
Chris Dalton42c21152018-06-13 15:28:19 -060056 SkASSERT(this->isInitialized());
57
Chris Dalton4da70192018-06-18 09:51:36 -060058 ++specs->fNumClipPaths;
Chris Dalton09a7bb22018-08-31 19:53:15 +080059 specs->fRenderedPathStats[GrCCPerFlushResourceSpecs::kFillIdx].statPath(fDeviceSpacePath);
Chris Dalton42c21152018-06-13 15:28:19 -060060
61 SkIRect ibounds;
62 if (ibounds.intersect(fAccessRect, fPathDevIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -060063 specs->fRenderedAtlasSpecs.accountForSpace(ibounds.width(), ibounds.height());
Chris Dalton42c21152018-06-13 15:28:19 -060064 }
65}
66
Chris Daltondaef06a2018-05-23 17:11:09 -060067void GrCCClipPath::renderPathInAtlas(GrCCPerFlushResources* resources,
68 GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060069 SkASSERT(this->isInitialized());
70 SkASSERT(!fHasAtlas);
Chris Daltonc3318f02019-07-19 14:20:53 -060071 fAtlas = resources->renderDeviceSpacePathInAtlas(
72 fAccessRect, fDeviceSpacePath, fPathDevIBounds, GrFillRuleForSkPath(fDeviceSpacePath),
73 &fDevToAtlasOffset);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060074 SkDEBUGCODE(fHasAtlas = true);
75}