Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2020 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrMockOpTarget_DEFINED |
| 9 | #define GrMockOpTarget_DEFINED |
| 10 | |
| 11 | #include "include/gpu/GrDirectContext.h" |
| 12 | #include "src/gpu/GrDirectContextPriv.h" |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 13 | #include "src/gpu/GrGpu.h" |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 14 | #include "src/gpu/ops/GrMeshDrawOp.h" |
| 15 | |
| 16 | // This is a mock GrMeshDrawOp::Target implementation that just gives back pointers into |
| 17 | // pre-allocated CPU buffers, rather than allocating and mapping GPU buffers. |
| 18 | class GrMockOpTarget : public GrMeshDrawOp::Target { |
| 19 | public: |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 20 | GrMockOpTarget(sk_sp<GrDirectContext> mockContext) : fMockContext(std::move(mockContext)) { |
| 21 | fStaticVertexBuffer = fMockContext->priv().getGpu()->createBuffer( |
| 22 | sizeof(fStaticVertexData), GrGpuBufferType::kVertex, kDynamic_GrAccessPattern); |
| 23 | fStaticIndirectBuffer = fMockContext->priv().getGpu()->createBuffer( |
| 24 | sizeof(fStaticIndirectData), GrGpuBufferType::kDrawIndirect, |
| 25 | kDynamic_GrAccessPattern); |
| 26 | } |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 27 | const GrDirectContext* mockContext() const { return fMockContext.get(); } |
| 28 | const GrCaps& caps() const override { return *fMockContext->priv().caps(); } |
| 29 | GrThreadSafeCache* threadSafeCache() const override { |
| 30 | return fMockContext->priv().threadSafeCache(); |
| 31 | } |
| 32 | GrResourceProvider* resourceProvider() const override { |
| 33 | return fMockContext->priv().resourceProvider(); |
| 34 | } |
| 35 | GrSmallPathAtlasMgr* smallPathAtlasManager() const override { return nullptr; } |
| 36 | void resetAllocator() { fAllocator.reset(); } |
| 37 | SkArenaAlloc* allocator() override { return &fAllocator; } |
| 38 | void putBackVertices(int vertices, size_t vertexStride) override { /* no-op */ } |
| 39 | GrAppliedClip detachAppliedClip() override { return GrAppliedClip::Disabled(); } |
John Stiles | 52cb1d0 | 2021-06-02 11:58:05 -0400 | [diff] [blame^] | 40 | const GrDstProxyView& dstProxyView() const override { return fDstProxyView; } |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 41 | GrXferBarrierFlags renderPassBarriers() const override { return GrXferBarrierFlags::kNone; } |
Greg Daniel | 42dbca5 | 2020-11-20 10:22:43 -0500 | [diff] [blame] | 42 | GrLoadOp colorLoadOp() const override { return GrLoadOp::kLoad; } |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 43 | |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 44 | void* makeVertexSpace(size_t vertexSize, int vertexCount, sk_sp<const GrBuffer>* buffer, |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 45 | int* startVertex) override { |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 46 | if (vertexSize * vertexCount > sizeof(fStaticVertexData)) { |
| 47 | SK_ABORT("FATAL: wanted %zu bytes of static vertex data; only have %zu.\n", |
| 48 | vertexSize * vertexCount, sizeof(fStaticVertexData)); |
| 49 | } |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 50 | *buffer = fStaticVertexBuffer; |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 51 | *startVertex = 0; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 52 | return fStaticVertexData; |
| 53 | } |
| 54 | |
| 55 | void* makeVertexSpaceAtLeast(size_t vertexSize, int minVertexCount, int fallbackVertexCount, |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 56 | sk_sp<const GrBuffer>* buffer, int* startVertex, |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 57 | int* actualVertexCount) override { |
| 58 | if (vertexSize * minVertexCount > sizeof(fStaticVertexData)) { |
| 59 | SK_ABORT("FATAL: wanted %zu bytes of static vertex data; only have %zu.\n", |
| 60 | vertexSize * minVertexCount, sizeof(fStaticVertexData)); |
| 61 | } |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 62 | *buffer = fStaticVertexBuffer; |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 63 | *startVertex = 0; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 64 | *actualVertexCount = sizeof(fStaticVertexData) / vertexSize; |
| 65 | return fStaticVertexData; |
| 66 | } |
| 67 | |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame] | 68 | GrDrawIndirectWriter makeDrawIndirectSpace(int drawCount, sk_sp<const GrBuffer>* buffer, |
| 69 | size_t* offsetInBytes) override { |
| 70 | if (sizeof(GrDrawIndirectCommand) * drawCount > sizeof(fStaticIndirectData)) { |
| 71 | SK_ABORT("FATAL: wanted %zu bytes of static indirect data; only have %zu.\n", |
| 72 | sizeof(GrDrawIndirectCommand) * drawCount, sizeof(fStaticIndirectData)); |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 73 | } |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 74 | *buffer = fStaticIndirectBuffer; |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 75 | *offsetInBytes = 0; |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame] | 76 | return fStaticIndirectData; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 77 | } |
| 78 | |
Chris Dalton | 7512507 | 2020-11-24 09:30:51 -0700 | [diff] [blame] | 79 | void putBackIndirectDraws(int count) override { /* no-op */ } |
| 80 | |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame] | 81 | GrDrawIndexedIndirectWriter makeDrawIndexedIndirectSpace(int drawCount, |
| 82 | sk_sp<const GrBuffer>* buffer, |
| 83 | size_t* offsetInBytes) override { |
| 84 | if (sizeof(GrDrawIndexedIndirectCommand) * drawCount > sizeof(fStaticIndirectData)) { |
| 85 | SK_ABORT("FATAL: wanted %zu bytes of static indirect data; only have %zu.\n", |
| 86 | sizeof(GrDrawIndexedIndirectCommand) * drawCount, sizeof(fStaticIndirectData)); |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 87 | } |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 88 | *buffer = fStaticIndirectBuffer; |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 89 | *offsetInBytes = 0; |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame] | 90 | return fStaticIndirectData; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 91 | } |
| 92 | |
Chris Dalton | 7512507 | 2020-11-24 09:30:51 -0700 | [diff] [blame] | 93 | void putBackIndexedIndirectDraws(int count) override { /* no-op */ } |
| 94 | |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame] | 95 | // Call these methods to see what got written after the previous call to make*Space. |
| 96 | const void* peekStaticVertexData() const { return fStaticVertexData; } |
| 97 | const void* peekStaticIndirectData() const { return fStaticIndirectData; } |
| 98 | |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 99 | #define UNIMPL(...) __VA_ARGS__ override { SK_ABORT("unimplemented."); } |
| 100 | UNIMPL(void recordDraw(const GrGeometryProcessor*, const GrSimpleMesh[], int, |
| 101 | const GrSurfaceProxy* const[], GrPrimitiveType)) |
| 102 | UNIMPL(uint16_t* makeIndexSpace(int, sk_sp<const GrBuffer>*, int*)) |
| 103 | UNIMPL(uint16_t* makeIndexSpaceAtLeast(int, int, sk_sp<const GrBuffer>*, int*, int*)) |
| 104 | UNIMPL(void putBackIndices(int)) |
Robert Phillips | 5c80964 | 2020-11-20 12:28:45 -0500 | [diff] [blame] | 105 | UNIMPL(GrRenderTargetProxy* rtProxy() const) |
Adlai Holler | e2296f7 | 2020-11-19 13:41:26 -0500 | [diff] [blame] | 106 | UNIMPL(const GrSurfaceProxyView& writeView() const) |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 107 | UNIMPL(const GrAppliedClip* appliedClip() const) |
Chris Dalton | 2517ce3 | 2021-04-13 00:21:15 -0600 | [diff] [blame] | 108 | UNIMPL(bool usesMSAASurface() const) |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 109 | UNIMPL(GrStrikeCache* strikeCache() const) |
| 110 | UNIMPL(GrAtlasManager* atlasManager() const) |
| 111 | UNIMPL(SkTArray<GrSurfaceProxy*, true>* sampledProxyArray()) |
| 112 | UNIMPL(GrDeferredUploadTarget* deferredUploadTarget()) |
| 113 | #undef UNIMPL |
| 114 | |
| 115 | private: |
| 116 | sk_sp<GrDirectContext> fMockContext; |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 117 | char fStaticVertexData[6 * 1024 * 1024]; |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 118 | sk_sp<GrGpuBuffer> fStaticVertexBuffer; |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame] | 119 | char fStaticIndirectData[sizeof(GrDrawIndexedIndirectCommand) * 32]; |
Chris Dalton | 8ed7a8d | 2021-03-31 10:40:29 -0600 | [diff] [blame] | 120 | sk_sp<GrGpuBuffer> fStaticIndirectBuffer; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 121 | SkSTArenaAllocWithReset<1024 * 1024> fAllocator; |
John Stiles | 52cb1d0 | 2021-06-02 11:58:05 -0400 | [diff] [blame^] | 122 | GrDstProxyView fDstProxyView; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 123 | }; |
| 124 | |
| 125 | #endif |