blob: 39516a3e13b75147cedfc1d6937bd31af2088413 [file] [log] [blame]
bsalomon75398562015-08-17 12:55:38 -07001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Brian Salomon09d994e2016-12-21 11:14:46 -05008#ifndef GrOpFlushState_DEFINED
9#define GrOpFlushState_DEFINED
bsalomon75398562015-08-17 12:55:38 -070010
Brian Salomon7dc6e752017-11-02 11:34:51 -040011#include <utility>
Ben Wagner729a23f2019-05-17 16:29:34 -040012#include "src/core/SkArenaAlloc.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/core/SkArenaAllocList.h"
14#include "src/gpu/GrAppliedClip.h"
15#include "src/gpu/GrBufferAllocPool.h"
16#include "src/gpu/GrDeferredUpload.h"
Chris Daltonaa0e45c2020-03-16 10:05:11 -060017#include "src/gpu/GrProgramInfo.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040018#include "src/gpu/GrRenderTargetProxy.h"
Greg Daniel16f5c652019-10-29 11:26:01 -040019#include "src/gpu/GrSurfaceProxyView.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/ops/GrMeshDrawOp.h"
bsalomon75398562015-08-17 12:55:38 -070021
Robert Phillips646e4292017-06-13 12:44:56 -040022class GrGpu;
Greg Daniel2d41d0d2019-08-26 11:08:51 -040023class GrOpsRenderPass;
bsalomon75398562015-08-17 12:55:38 -070024class GrResourceProvider;
25
Greg Danielf41b2bd2019-08-22 16:19:24 -040026/** Tracks the state across all the GrOps (really just the GrDrawOps) in a GrOpsTask flush. */
Brian Salomon29b60c92017-10-31 14:42:10 -040027class GrOpFlushState final : public GrDeferredUploadTarget, public GrMeshDrawOp::Target {
bsalomon75398562015-08-17 12:55:38 -070028public:
Brian Salomon58f153c2018-10-18 21:51:15 -040029 // vertexSpace and indexSpace may either be null or an alloation of size
30 // GrBufferAllocPool::kDefaultBufferSize. If the latter, then CPU memory is only allocated for
31 // vertices/indices when a buffer larger than kDefaultBufferSize is required.
Robert Phillipse5f73282019-06-18 17:15:04 -040032 GrOpFlushState(GrGpu*, GrResourceProvider*, GrTokenTracker*,
Brian Salomon601ac802019-02-07 13:37:16 -050033 sk_sp<GrBufferAllocPool::CpuBufferCache> = nullptr);
bsalomon75398562015-08-17 12:55:38 -070034
Brian Salomon29b60c92017-10-31 14:42:10 -040035 ~GrOpFlushState() final { this->reset(); }
bsalomon75398562015-08-17 12:55:38 -070036
Brian Salomon742e31d2016-12-07 17:06:19 -050037 /** This is called after each op has a chance to prepare its draws and before the draws are
Brian Salomon7dc6e752017-11-02 11:34:51 -040038 executed. */
39 void preExecuteDraws();
bsalomon75398562015-08-17 12:55:38 -070040
Greg Danielb20d7e52019-09-03 13:54:39 -040041 /** Called to upload data to a texture using the GrDeferredTextureUploadFn. If the uploaded
42 surface needs to be prepared for being sampled in a draw after the upload, the caller
43 should pass in true for shouldPrepareSurfaceForSampling. This feature is needed for Vulkan
44 when doing inline uploads to reset the image layout back to sampled. */
45 void doUpload(GrDeferredTextureUploadFn&, bool shouldPrepareSurfaceForSampling = false);
bsalomon342bfc22016-04-01 06:06:20 -070046
Brian Salomon7dc6e752017-11-02 11:34:51 -040047 /** Called as ops are executed. Must be called in the same order as the ops were prepared. */
Robert Phillips3968fcb2019-12-05 16:40:31 -050048 void executeDrawsAndUploadsForMeshDrawOp(const GrOp* op, const SkRect& chainBounds,
49 const GrPipeline*);
Brian Salomon7dc6e752017-11-02 11:34:51 -040050
Greg Daniel2d41d0d2019-08-26 11:08:51 -040051 GrOpsRenderPass* opsRenderPass() { return fOpsRenderPass; }
52 void setOpsRenderPass(GrOpsRenderPass* renderPass) { fOpsRenderPass = renderPass; }
egdaniel9cb63402016-06-23 08:37:05 -070053
bsalomon75398562015-08-17 12:55:38 -070054 GrGpu* gpu() { return fGpu; }
55
Brian Salomon7dc6e752017-11-02 11:34:51 -040056 void reset();
joshualittf6d259b2015-10-02 11:27:14 -070057
Brian Salomon29b60c92017-10-31 14:42:10 -040058 /** Additional data required on a per-op basis when executing GrOps. */
59 struct OpArgs {
Robert Phillips8053c972019-11-21 10:44:53 -050060 // TODO: why does OpArgs have the op we're going to pass it to as a member? Remove it.
Greg Daniel16f5c652019-10-29 11:26:01 -040061 explicit OpArgs(GrOp* op, GrSurfaceProxyView* surfaceView, GrAppliedClip* appliedClip,
Greg Daniel524e28b2019-11-01 11:48:53 -040062 const GrXferProcessor::DstProxyView& dstProxyView)
Greg Daniel16f5c652019-10-29 11:26:01 -040063 : fOp(op)
64 , fSurfaceView(surfaceView)
Chris Dalton5e8cdfd2019-11-11 15:23:30 -070065 , fRenderTargetProxy(surfaceView->asRenderTargetProxy())
Greg Daniel16f5c652019-10-29 11:26:01 -040066 , fAppliedClip(appliedClip)
Greg Daniel524e28b2019-11-01 11:48:53 -040067 , fDstProxyView(dstProxyView) {
Greg Daniel16f5c652019-10-29 11:26:01 -040068 SkASSERT(surfaceView->asRenderTargetProxy());
Robert Phillips405413f2019-10-04 10:39:28 -040069 }
Robert Phillips2890fbf2017-07-26 15:48:41 -040070
Greg Daniel16f5c652019-10-29 11:26:01 -040071 GrSurfaceOrigin origin() const { return fSurfaceView->origin(); }
Brian Salomon982f5462020-03-30 12:52:33 -040072 GrSwizzle writeSwizzle() const { return fSurfaceView->swizzle(); }
Robert Phillips405413f2019-10-04 10:39:28 -040073
74 GrOp* op() { return fOp; }
Brian Salomon8afde5f2020-04-01 16:22:00 -040075 const GrSurfaceProxyView* writeView() const { return fSurfaceView; }
Chris Dalton5e8cdfd2019-11-11 15:23:30 -070076 GrRenderTargetProxy* proxy() const { return fRenderTargetProxy; }
Robert Phillips405413f2019-10-04 10:39:28 -040077 GrAppliedClip* appliedClip() { return fAppliedClip; }
78 const GrAppliedClip* appliedClip() const { return fAppliedClip; }
Greg Daniel524e28b2019-11-01 11:48:53 -040079 const GrXferProcessor::DstProxyView& dstProxyView() const { return fDstProxyView; }
Robert Phillips405413f2019-10-04 10:39:28 -040080
81#ifdef SK_DEBUG
82 void validate() const {
83 SkASSERT(fOp);
Greg Daniel16f5c652019-10-29 11:26:01 -040084 SkASSERT(fSurfaceView);
Robert Phillips405413f2019-10-04 10:39:28 -040085 }
86#endif
87
88 private:
Greg Daniel524e28b2019-11-01 11:48:53 -040089 GrOp* fOp;
90 GrSurfaceProxyView* fSurfaceView;
Chris Dalton5e8cdfd2019-11-11 15:23:30 -070091 GrRenderTargetProxy* fRenderTargetProxy;
Greg Daniel524e28b2019-11-01 11:48:53 -040092 GrAppliedClip* fAppliedClip;
93 GrXferProcessor::DstProxyView fDstProxyView; // TODO: do we still need the dst proxy here?
Brian Salomon54d212e2017-03-21 14:22:38 -040094 };
95
Brian Salomon29b60c92017-10-31 14:42:10 -040096 void setOpArgs(OpArgs* opArgs) { fOpArgs = opArgs; }
Brian Salomon54d212e2017-03-21 14:22:38 -040097
Brian Salomon29b60c92017-10-31 14:42:10 -040098 const OpArgs& drawOpArgs() const {
Brian Salomon54d212e2017-03-21 14:22:38 -040099 SkASSERT(fOpArgs);
Robert Phillips405413f2019-10-04 10:39:28 -0400100 SkDEBUGCODE(fOpArgs->validate());
Brian Salomon54d212e2017-03-21 14:22:38 -0400101 return *fOpArgs;
102 }
103
Michael Ludwigfcdd0612019-11-25 08:34:31 -0500104 void setSampledProxyArray(SkTArray<GrSurfaceProxy*, true>* sampledProxies) {
Greg Danielb20d7e52019-09-03 13:54:39 -0400105 fSampledProxies = sampledProxies;
106 }
107
Michael Ludwigfcdd0612019-11-25 08:34:31 -0500108 SkTArray<GrSurfaceProxy*, true>* sampledProxyArray() override {
Greg Danielb20d7e52019-09-03 13:54:39 -0400109 return fSampledProxies;
110 }
111
Brian Salomon29b60c92017-10-31 14:42:10 -0400112 /** Overrides of GrDeferredUploadTarget. */
Brian Salomonbfd18cd2017-08-09 16:27:09 -0400113
Robert Phillips40a29d72018-01-18 12:59:22 -0500114 const GrTokenTracker* tokenTracker() final { return fTokenTracker; }
Brian Salomon29b60c92017-10-31 14:42:10 -0400115 GrDeferredUploadToken addInlineUpload(GrDeferredTextureUploadFn&&) final;
116 GrDeferredUploadToken addASAPUpload(GrDeferredTextureUploadFn&&) final;
Brian Salomon29b60c92017-10-31 14:42:10 -0400117
118 /** Overrides of GrMeshDrawOp::Target. */
Robert Phillips6c59fe42020-02-27 09:30:37 -0500119 void recordDraw(const GrGeometryProcessor*,
Chris Daltoneb694b72020-03-16 09:25:50 -0600120 const GrSimpleMesh[],
Robert Phillips6c59fe42020-02-27 09:30:37 -0500121 int meshCnt,
Chris Dalton304e14d2020-03-17 14:29:06 -0600122 const GrSurfaceProxy* const primProcProxies[],
Robert Phillips6c59fe42020-02-27 09:30:37 -0500123 GrPrimitiveType) final;
Brian Salomon12d22642019-01-29 14:38:50 -0500124 void* makeVertexSpace(size_t vertexSize, int vertexCount, sk_sp<const GrBuffer>*,
Brian Salomon29b60c92017-10-31 14:42:10 -0400125 int* startVertex) final;
Brian Salomon12d22642019-01-29 14:38:50 -0500126 uint16_t* makeIndexSpace(int indexCount, sk_sp<const GrBuffer>*, int* startIndex) final;
Brian Salomon29b60c92017-10-31 14:42:10 -0400127 void* makeVertexSpaceAtLeast(size_t vertexSize, int minVertexCount, int fallbackVertexCount,
Brian Salomon12d22642019-01-29 14:38:50 -0500128 sk_sp<const GrBuffer>*, int* startVertex,
129 int* actualVertexCount) final;
130 uint16_t* makeIndexSpaceAtLeast(int minIndexCount, int fallbackIndexCount,
131 sk_sp<const GrBuffer>*, int* startIndex,
132 int* actualIndexCount) final;
Brian Salomon29b60c92017-10-31 14:42:10 -0400133 void putBackIndices(int indexCount) final;
134 void putBackVertices(int vertices, size_t vertexStride) final;
Brian Salomon8afde5f2020-04-01 16:22:00 -0400135 const GrSurfaceProxyView* writeView() const final { return this->drawOpArgs().writeView(); }
Chris Dalton5e8cdfd2019-11-11 15:23:30 -0700136 GrRenderTargetProxy* proxy() const final { return this->drawOpArgs().proxy(); }
Robert Phillips8053c972019-11-21 10:44:53 -0500137 const GrAppliedClip* appliedClip() const final { return this->drawOpArgs().appliedClip(); }
Chris Daltonaa0e45c2020-03-16 10:05:11 -0600138 const GrAppliedHardClip& appliedHardClip() const {
139 return (fOpArgs->appliedClip()) ?
140 fOpArgs->appliedClip()->hardClip() : GrAppliedHardClip::Disabled();
Chris Dalton78dac6d2020-03-09 14:40:22 -0600141 }
Brian Salomon29b60c92017-10-31 14:42:10 -0400142 GrAppliedClip detachAppliedClip() final;
Greg Daniel524e28b2019-11-01 11:48:53 -0400143 const GrXferProcessor::DstProxyView& dstProxyView() const final {
Chris Dalton5e8cdfd2019-11-11 15:23:30 -0700144 return this->drawOpArgs().dstProxyView();
Greg Daniel524e28b2019-11-01 11:48:53 -0400145 }
Brian Salomon29b60c92017-10-31 14:42:10 -0400146 GrDeferredUploadTarget* deferredUploadTarget() final { return this; }
147 const GrCaps& caps() const final;
148 GrResourceProvider* resourceProvider() const final { return fResourceProvider; }
Brian Salomon6d4b65e2017-05-03 17:06:09 -0400149
Herb Derby081e6f32019-01-16 13:46:02 -0500150 GrStrikeCache* glyphCache() const final;
Robert Phillipsc4039ea2018-03-01 11:36:45 -0500151
152 // At this point we know we're flushing so full access to the GrAtlasManager is required (and
153 // permissible).
Robert Phillips5a66efb2018-03-07 15:13:18 -0500154 GrAtlasManager* atlasManager() const final;
Robert Phillipsc4039ea2018-03-01 11:36:45 -0500155
Brian Salomon29b60c92017-10-31 14:42:10 -0400156 /** GrMeshDrawOp::Target override. */
Chris Dalton07cdcfc92019-02-26 11:13:22 -0700157 SkArenaAlloc* allocator() override { return &fArena; }
Brian Salomon7dc6e752017-11-02 11:34:51 -0400158
Chris Daltonaa0e45c2020-03-16 10:05:11 -0600159 // This is a convenience method that binds the given pipeline, and then, if our applied clip has
160 // a scissor, sets the scissor rect from the applied clip.
161 void bindPipelineAndScissorClip(const GrProgramInfo& programInfo, const SkRect& drawBounds) {
162 SkASSERT((programInfo.pipeline().isScissorTestEnabled()) ==
163 (this->appliedClip() && this->appliedClip()->scissorState().enabled()));
164 this->bindPipeline(programInfo, drawBounds);
165 if (programInfo.pipeline().isScissorTestEnabled()) {
166 this->setScissorRect(this->appliedClip()->scissorState().rect());
167 }
168 }
169
170 // This is a convenience method for when the primitive processor has exactly one texture. It
171 // binds one texture for the primitive processor, and any others for FPs on the pipeline.
172 void bindTextures(const GrPrimitiveProcessor& primProc,
173 const GrSurfaceProxy& singlePrimProcTexture, const GrPipeline& pipeline) {
174 SkASSERT(primProc.numTextureSamplers() == 1);
175 const GrSurfaceProxy* ptr = &singlePrimProcTexture;
176 this->bindTextures(primProc, &ptr, pipeline);
177 }
178
Chris Dalton765ed362020-03-16 17:34:44 -0600179 // Makes the appropriate bindBuffers() and draw*() calls for the provided mesh.
180 void drawMesh(const GrSimpleMesh& mesh);
181
Chris Daltonaa0e45c2020-03-16 10:05:11 -0600182 // Pass-through methods to GrOpsRenderPass.
183 void bindPipeline(const GrProgramInfo& programInfo, const SkRect& drawBounds) {
184 fOpsRenderPass->bindPipeline(programInfo, drawBounds);
185 }
186 void setScissorRect(const SkIRect& scissorRect) {
187 fOpsRenderPass->setScissorRect(scissorRect);
188 }
189 void bindTextures(const GrPrimitiveProcessor& primProc,
190 const GrSurfaceProxy* const primProcTextures[], const GrPipeline& pipeline) {
191 fOpsRenderPass->bindTextures(primProc, primProcTextures, pipeline);
192 }
193 void bindBuffers(const GrBuffer* indexBuffer, const GrBuffer* instanceBuffer,
194 const GrBuffer* vertexBuffer,
195 GrPrimitiveRestart primitiveRestart = GrPrimitiveRestart::kNo) {
196 fOpsRenderPass->bindBuffers(indexBuffer, instanceBuffer, vertexBuffer, primitiveRestart);
197 }
198 void draw(int vertexCount, int baseVertex) {
199 fOpsRenderPass->draw(vertexCount, baseVertex);
200 }
201 void drawIndexed(int indexCount, int baseIndex, uint16_t minIndexValue, uint16_t maxIndexValue,
202 int baseVertex) {
203 fOpsRenderPass->drawIndexed(indexCount, baseIndex, minIndexValue, maxIndexValue,
204 baseVertex);
205 }
206 void drawInstanced(int instanceCount, int baseInstance, int vertexCount, int baseVertex) {
207 fOpsRenderPass->drawInstanced(instanceCount, baseInstance, vertexCount, baseVertex);
208 }
209 void drawIndexedInstanced(int indexCount, int baseIndex, int instanceCount, int baseInstance,
210 int baseVertex) {
211 fOpsRenderPass->drawIndexedInstanced(indexCount, baseIndex, instanceCount, baseInstance,
212 baseVertex);
213 }
214 void drawIndexPattern(int patternIndexCount, int patternRepeatCount,
215 int maxPatternRepetitionsInIndexBuffer, int patternVertexCount,
216 int baseVertex) {
217 fOpsRenderPass->drawIndexPattern(patternIndexCount, patternRepeatCount,
218 maxPatternRepetitionsInIndexBuffer, patternVertexCount,
219 baseVertex);
220 }
221
Brian Salomonbeb7f522019-08-30 16:19:42 -0400222private:
Brian Salomon7dc6e752017-11-02 11:34:51 -0400223 struct InlineUpload {
224 InlineUpload(GrDeferredTextureUploadFn&& upload, GrDeferredUploadToken token)
225 : fUpload(std::move(upload)), fUploadBeforeToken(token) {}
226 GrDeferredTextureUploadFn fUpload;
227 GrDeferredUploadToken fUploadBeforeToken;
228 };
229
230 // A set of contiguous draws that share a draw token, geometry processor, and pipeline. The
231 // meshes for the draw are stored in the fMeshes array. The reason for coalescing meshes
232 // that share a geometry processor into a Draw is that it allows the Gpu object to setup
233 // the shared state once and then issue draws for each mesh.
234 struct Draw {
Brian Salomonf7232642018-09-19 08:58:08 -0400235 ~Draw();
Robert Phillips7cd0bfe2019-11-20 16:08:10 -0500236 // The geometry processor is always forced to be in an arena allocation or appears on
237 // the stack (for CCPR). In either case this object does not need to manage its
238 // lifetime.
239 const GrGeometryProcessor* fGeometryProcessor = nullptr;
Chris Dalton304e14d2020-03-17 14:29:06 -0600240 // Must have GrPrimitiveProcessor::numTextureSamplers() entries. Can be null if no samplers.
241 const GrSurfaceProxy* const* fPrimProcProxies = nullptr;
Chris Daltoneb694b72020-03-16 09:25:50 -0600242 const GrSimpleMesh* fMeshes = nullptr;
Brian Osmane8012102018-11-29 14:05:07 -0500243 const GrOp* fOp = nullptr;
Brian Salomon7eae3e02018-08-07 14:02:38 +0000244 int fMeshCnt = 0;
Robert Phillipscea290f2019-11-06 11:21:03 -0500245 GrPrimitiveType fPrimitiveType;
Brian Salomon7dc6e752017-11-02 11:34:51 -0400246 };
247
Brian Salomon7dc6e752017-11-02 11:34:51 -0400248 // Storage for ops' pipelines, draws, and inline uploads.
249 SkArenaAlloc fArena{sizeof(GrPipeline) * 100};
250
251 // Store vertex and index data on behalf of ops that are flushed.
252 GrVertexBufferAllocPool fVertexPool;
253 GrIndexBufferAllocPool fIndexPool;
254
255 // Data stored on behalf of the ops being flushed.
Robert Phillips40a29d72018-01-18 12:59:22 -0500256 SkArenaAllocList<GrDeferredTextureUploadFn> fASAPUploads;
Brian Salomoncbcb0a12017-11-19 13:20:13 -0500257 SkArenaAllocList<InlineUpload> fInlineUploads;
258 SkArenaAllocList<Draw> fDraws;
Brian Salomon7dc6e752017-11-02 11:34:51 -0400259
260 // All draws we store have an implicit draw token. This is the draw token for the first draw
261 // in fDraws.
262 GrDeferredUploadToken fBaseDrawToken = GrDeferredUploadToken::AlreadyFlushedToken();
263
264 // Info about the op that is currently preparing or executing using the flush state or null if
265 // an op is not currently preparing of executing.
266 OpArgs* fOpArgs = nullptr;
Brian Salomon29b60c92017-10-31 14:42:10 -0400267
Greg Danielb20d7e52019-09-03 13:54:39 -0400268 // This field is only transiently set during flush. Each GrOpsTask will set it to point to an
269 // array of proxies it uses before call onPrepare and onExecute.
Michael Ludwigfcdd0612019-11-25 08:34:31 -0500270 SkTArray<GrSurfaceProxy*, true>* fSampledProxies;
Greg Danielb20d7e52019-09-03 13:54:39 -0400271
Brian Salomon6d4b65e2017-05-03 17:06:09 -0400272 GrGpu* fGpu;
273 GrResourceProvider* fResourceProvider;
Robert Phillips40a29d72018-01-18 12:59:22 -0500274 GrTokenTracker* fTokenTracker;
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400275 GrOpsRenderPass* fOpsRenderPass = nullptr;
Brian Salomon7dc6e752017-11-02 11:34:51 -0400276
277 // Variables that are used to track where we are in lists as ops are executed
Brian Salomoncbcb0a12017-11-19 13:20:13 -0500278 SkArenaAllocList<Draw>::Iter fCurrDraw;
Brian Salomoncbcb0a12017-11-19 13:20:13 -0500279 SkArenaAllocList<InlineUpload>::Iter fCurrUpload;
bsalomon75398562015-08-17 12:55:38 -0700280};
281
bsalomon75398562015-08-17 12:55:38 -0700282#endif