Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2020 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrMockOpTarget_DEFINED |
| 9 | #define GrMockOpTarget_DEFINED |
| 10 | |
| 11 | #include "include/gpu/GrDirectContext.h" |
| 12 | #include "src/gpu/GrDirectContextPriv.h" |
| 13 | #include "src/gpu/ops/GrMeshDrawOp.h" |
| 14 | |
| 15 | // This is a mock GrMeshDrawOp::Target implementation that just gives back pointers into |
| 16 | // pre-allocated CPU buffers, rather than allocating and mapping GPU buffers. |
| 17 | class GrMockOpTarget : public GrMeshDrawOp::Target { |
| 18 | public: |
| 19 | GrMockOpTarget(sk_sp<GrDirectContext> mockContext) : fMockContext(std::move(mockContext)) {} |
| 20 | const GrDirectContext* mockContext() const { return fMockContext.get(); } |
| 21 | const GrCaps& caps() const override { return *fMockContext->priv().caps(); } |
| 22 | GrThreadSafeCache* threadSafeCache() const override { |
| 23 | return fMockContext->priv().threadSafeCache(); |
| 24 | } |
| 25 | GrResourceProvider* resourceProvider() const override { |
| 26 | return fMockContext->priv().resourceProvider(); |
| 27 | } |
| 28 | GrSmallPathAtlasMgr* smallPathAtlasManager() const override { return nullptr; } |
| 29 | void resetAllocator() { fAllocator.reset(); } |
| 30 | SkArenaAlloc* allocator() override { return &fAllocator; } |
| 31 | void putBackVertices(int vertices, size_t vertexStride) override { /* no-op */ } |
| 32 | GrAppliedClip detachAppliedClip() override { return GrAppliedClip::Disabled(); } |
| 33 | const GrXferProcessor::DstProxyView& dstProxyView() const override { return fDstProxyView; } |
| 34 | GrXferBarrierFlags renderPassBarriers() const override { return GrXferBarrierFlags::kNone; } |
Greg Daniel | 42dbca5 | 2020-11-20 10:22:43 -0500 | [diff] [blame] | 35 | GrLoadOp colorLoadOp() const override { return GrLoadOp::kLoad; } |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 36 | |
| 37 | void* makeVertexSpace(size_t vertexSize, int vertexCount, sk_sp<const GrBuffer>*, |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 38 | int* startVertex) override { |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 39 | if (vertexSize * vertexCount > sizeof(fStaticVertexData)) { |
| 40 | SK_ABORT("FATAL: wanted %zu bytes of static vertex data; only have %zu.\n", |
| 41 | vertexSize * vertexCount, sizeof(fStaticVertexData)); |
| 42 | } |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 43 | *startVertex = 0; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 44 | return fStaticVertexData; |
| 45 | } |
| 46 | |
| 47 | void* makeVertexSpaceAtLeast(size_t vertexSize, int minVertexCount, int fallbackVertexCount, |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 48 | sk_sp<const GrBuffer>*, int* startVertex, |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 49 | int* actualVertexCount) override { |
| 50 | if (vertexSize * minVertexCount > sizeof(fStaticVertexData)) { |
| 51 | SK_ABORT("FATAL: wanted %zu bytes of static vertex data; only have %zu.\n", |
| 52 | vertexSize * minVertexCount, sizeof(fStaticVertexData)); |
| 53 | } |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 54 | *startVertex = 0; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 55 | *actualVertexCount = sizeof(fStaticVertexData) / vertexSize; |
| 56 | return fStaticVertexData; |
| 57 | } |
| 58 | |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame^] | 59 | GrDrawIndirectWriter makeDrawIndirectSpace(int drawCount, sk_sp<const GrBuffer>* buffer, |
| 60 | size_t* offsetInBytes) override { |
| 61 | if (sizeof(GrDrawIndirectCommand) * drawCount > sizeof(fStaticIndirectData)) { |
| 62 | SK_ABORT("FATAL: wanted %zu bytes of static indirect data; only have %zu.\n", |
| 63 | sizeof(GrDrawIndirectCommand) * drawCount, sizeof(fStaticIndirectData)); |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 64 | } |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 65 | *offsetInBytes = 0; |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame^] | 66 | return fStaticIndirectData; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 67 | } |
| 68 | |
Chris Dalton | 7512507 | 2020-11-24 09:30:51 -0700 | [diff] [blame] | 69 | void putBackIndirectDraws(int count) override { /* no-op */ } |
| 70 | |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame^] | 71 | GrDrawIndexedIndirectWriter makeDrawIndexedIndirectSpace(int drawCount, |
| 72 | sk_sp<const GrBuffer>* buffer, |
| 73 | size_t* offsetInBytes) override { |
| 74 | if (sizeof(GrDrawIndexedIndirectCommand) * drawCount > sizeof(fStaticIndirectData)) { |
| 75 | SK_ABORT("FATAL: wanted %zu bytes of static indirect data; only have %zu.\n", |
| 76 | sizeof(GrDrawIndexedIndirectCommand) * drawCount, sizeof(fStaticIndirectData)); |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 77 | } |
Chris Dalton | 641ff3b | 2020-11-24 11:04:23 -0700 | [diff] [blame] | 78 | *offsetInBytes = 0; |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame^] | 79 | return fStaticIndirectData; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 80 | } |
| 81 | |
Chris Dalton | 7512507 | 2020-11-24 09:30:51 -0700 | [diff] [blame] | 82 | void putBackIndexedIndirectDraws(int count) override { /* no-op */ } |
| 83 | |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame^] | 84 | // Call these methods to see what got written after the previous call to make*Space. |
| 85 | const void* peekStaticVertexData() const { return fStaticVertexData; } |
| 86 | const void* peekStaticIndirectData() const { return fStaticIndirectData; } |
| 87 | |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 88 | #define UNIMPL(...) __VA_ARGS__ override { SK_ABORT("unimplemented."); } |
| 89 | UNIMPL(void recordDraw(const GrGeometryProcessor*, const GrSimpleMesh[], int, |
| 90 | const GrSurfaceProxy* const[], GrPrimitiveType)) |
| 91 | UNIMPL(uint16_t* makeIndexSpace(int, sk_sp<const GrBuffer>*, int*)) |
| 92 | UNIMPL(uint16_t* makeIndexSpaceAtLeast(int, int, sk_sp<const GrBuffer>*, int*, int*)) |
| 93 | UNIMPL(void putBackIndices(int)) |
Robert Phillips | 5c80964 | 2020-11-20 12:28:45 -0500 | [diff] [blame] | 94 | UNIMPL(GrRenderTargetProxy* rtProxy() const) |
Adlai Holler | e2296f7 | 2020-11-19 13:41:26 -0500 | [diff] [blame] | 95 | UNIMPL(const GrSurfaceProxyView& writeView() const) |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 96 | UNIMPL(const GrAppliedClip* appliedClip() const) |
| 97 | UNIMPL(GrStrikeCache* strikeCache() const) |
| 98 | UNIMPL(GrAtlasManager* atlasManager() const) |
| 99 | UNIMPL(SkTArray<GrSurfaceProxy*, true>* sampledProxyArray()) |
| 100 | UNIMPL(GrDeferredUploadTarget* deferredUploadTarget()) |
| 101 | #undef UNIMPL |
| 102 | |
| 103 | private: |
| 104 | sk_sp<GrDirectContext> fMockContext; |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 105 | char fStaticVertexData[6 * 1024 * 1024]; |
Chris Dalton | a6a3d05 | 2021-02-07 20:56:36 -0700 | [diff] [blame^] | 106 | char fStaticIndirectData[sizeof(GrDrawIndexedIndirectCommand) * 32]; |
Chris Dalton | 90ad0fe | 2020-11-09 14:13:39 -0700 | [diff] [blame] | 107 | SkSTArenaAllocWithReset<1024 * 1024> fAllocator; |
| 108 | GrXferProcessor::DstProxyView fDstProxyView; |
| 109 | }; |
| 110 | |
| 111 | #endif |