blob: 19ca80fdeeac785dd8ba0145b3340c84af0be07e [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCoverageCountingPathRenderer_DEFINED
9#define GrCoverageCountingPathRenderer_DEFINED
10
Brian Salomon653f42f2018-07-10 10:07:31 -040011#include <map>
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/GrOnFlushResourceProvider.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040013#include "src/gpu/GrOpsTask.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrPathRenderer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/ccpr/GrCCPerFlushResources.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040016#include "src/gpu/ccpr/GrCCPerOpsTaskPaths.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060017
Chris Dalton5ba36ba2018-05-09 01:08:38 -060018/**
Chris Dalton1a325d22017-07-14 15:17:41 -060019 * This is a path renderer that draws antialiased paths by counting coverage in an offscreen
Chris Dalton5ba36ba2018-05-09 01:08:38 -060020 * buffer. (See GrCCCoverageProcessor, GrCCPathProcessor.)
Chris Dalton1a325d22017-07-14 15:17:41 -060021 *
22 * It also serves as the per-render-target tracker for pending path draws, and at the start of
23 * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths.
24 */
Chris Dalton03730e62021-03-11 19:41:40 -070025class GrCoverageCountingPathRenderer : public GrOnFlushCallbackObject {
Chris Dalton1a325d22017-07-14 15:17:41 -060026public:
Chris Dalton03730e62021-03-11 19:41:40 -070027 static bool IsSupported(const GrCaps&);
Chris Daltonc3318f02019-07-19 14:20:53 -060028
Chris Dalton03730e62021-03-11 19:41:40 -070029 static std::unique_ptr<GrCoverageCountingPathRenderer> CreateIfSupported(const GrCaps&);
Chris Daltona2b5b642018-06-24 13:08:57 -060030
Greg Danielf41b2bd2019-08-22 16:19:24 -040031 using PendingPathsMap = std::map<uint32_t, sk_sp<GrCCPerOpsTaskPaths>>;
Chris Daltona71305c2018-05-23 12:00:07 -060032
Greg Danielf41b2bd2019-08-22 16:19:24 -040033 // In DDL mode, Ganesh needs to be able to move the pending GrCCPerOpsTaskPaths to the DDL
34 // object (detachPendingPaths) and then return them upon replay (mergePendingPaths).
Chris Daltona71305c2018-05-23 12:00:07 -060035 PendingPathsMap detachPendingPaths() { return std::move(fPendingPaths); }
36
Robert Phillips774168e2018-05-31 12:43:27 -040037 void mergePendingPaths(const PendingPathsMap& paths) {
38#ifdef SK_DEBUG
Greg Danielf41b2bd2019-08-22 16:19:24 -040039 // Ensure there are no duplicate opsTask IDs between the incoming path map and ours.
40 // This should always be true since opsTask IDs are globally unique and these are coming
Robert Phillips774168e2018-05-31 12:43:27 -040041 // from different DDL recordings.
42 for (const auto& it : paths) {
43 SkASSERT(!fPendingPaths.count(it.first));
44 }
45#endif
46
47 fPendingPaths.insert(paths.begin(), paths.end());
Chris Daltona71305c2018-05-23 12:00:07 -060048 }
49
Chris Dalton1e7b2e52021-03-09 21:29:32 -070050 // The atlas can take up a lot of memory. We should only use clip processors for small paths.
51 // Large clip paths should consider a different method, like MSAA stencil.
52 constexpr static int64_t kMaxClipPathArea = 256 * 256;
53
Chris Daltonbf9c63b2021-03-19 15:44:18 -060054 GrFPResult makeClipProcessor(std::unique_ptr<GrFragmentProcessor> inputFP, uint32_t opsTaskID,
55 const SkPath& deviceSpacePath, const SkIRect& accessRect,
56 const GrCaps& caps);
Chris Daltona32a3c32017-12-05 10:05:21 -070057
58 // GrOnFlushCallbackObject overrides.
Adlai Holler9902cff2020-11-11 08:51:25 -050059 void preFlush(GrOnFlushResourceProvider*, SkSpan<const uint32_t> taskIDs) override;
60 void postFlush(GrDeferredUploadToken, SkSpan<const uint32_t> taskIDs) override;
Chris Daltona32a3c32017-12-05 10:05:21 -070061
Chris Dalton09a7bb22018-08-31 19:53:15 +080062 // If a path spans more pixels than this, we need to crop it or else analytic AA can run out of
63 // fp32 precision.
64 static constexpr float kPathCropThreshold = 1 << 16;
65
Chris Dalton82de18f2018-09-12 17:24:09 -060066 // Maximum inflation of path bounds due to stroking (from width, miter, caps). Strokes wider
67 // than this will be converted to fill paths and drawn by the CCPR filler instead.
68 static constexpr float kMaxBoundsInflationFromStroke = 4096;
69
Jiulong Wangdf5739c2020-10-13 15:09:09 -070070 static constexpr int kDoCopiesThreshold = 100;
71
Chris Dalton1a325d22017-07-14 15:17:41 -060072private:
Greg Danielf41b2bd2019-08-22 16:19:24 -040073 GrCCPerOpsTaskPaths* lookupPendingPaths(uint32_t opsTaskID);
Chris Daltonc3318f02019-07-19 14:20:53 -060074
Greg Danielf41b2bd2019-08-22 16:19:24 -040075 // fPendingPaths holds the GrCCPerOpsTaskPaths objects that have already been created, but not
76 // flushed, and those that are still being created. All GrCCPerOpsTaskPaths objects will first
Chris Daltond7e22272018-05-23 10:17:17 -060077 // reside in fPendingPaths, then be moved to fFlushingPaths during preFlush().
Chris Daltona71305c2018-05-23 12:00:07 -060078 PendingPathsMap fPendingPaths;
Chris Daltona32a3c32017-12-05 10:05:21 -070079
Chris Dalton7d592cd2021-03-11 22:49:33 -070080 SkDEBUGCODE(bool fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -060081};
82
83#endif