blob: d7e1910d6b1d2f7d42cc73b2a23857f04427727e [file] [log] [blame]
Timothy Liange30739a2018-07-31 10:51:17 -04001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6*/
7
8#ifndef GrMtlResourceProvider_DEFINED
9#define GrMtlResourceProvider_DEFINED
10
Jim Van Verthbbf85f92019-06-20 12:38:38 -040011#include "include/private/SkSpinlock.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "include/private/SkTArray.h"
13#include "src/core/SkLRUCache.h"
Jim Van Verth75c53262019-04-26 12:23:51 -040014#include "src/gpu/mtl/GrMtlDepthStencil.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/mtl/GrMtlPipelineStateBuilder.h"
Jim Van Verth75c53262019-04-26 12:23:51 -040016#include "src/gpu/mtl/GrMtlSampler.h"
Timothy Liange30739a2018-07-31 10:51:17 -040017
Aaron O'Mullan829b6a02019-07-08 01:31:14 +020018#import <Metal/Metal.h>
Timothy Liange30739a2018-07-31 10:51:17 -040019
20class GrMtlGpu;
Jim Van Verthbbf85f92019-06-20 12:38:38 -040021class GrMtlCommandBuffer;
Timothy Liange30739a2018-07-31 10:51:17 -040022
23class GrMtlResourceProvider {
24public:
Jim Van Verth1223e7f2019-02-28 17:38:35 -050025 GrMtlResourceProvider(GrMtlGpu* gpu);
Timothy Liange30739a2018-07-31 10:51:17 -040026
Jim Van Verth1223e7f2019-02-28 17:38:35 -050027 GrMtlPipelineState* findOrCreateCompatiblePipelineState(
28 GrRenderTarget*, GrSurfaceOrigin,
29 const GrPipeline&,
30 const GrPrimitiveProcessor&,
31 const GrTextureProxy* const primProcProxies[],
32 GrPrimitiveType);
33
Jim Van Verth75c53262019-04-26 12:23:51 -040034 // Finds or creates a compatible MTLDepthStencilState based on the GrStencilSettings.
35 GrMtlDepthStencil* findOrCreateCompatibleDepthStencilState(const GrStencilSettings&,
36 GrSurfaceOrigin);
37
38 // Finds or creates a compatible MTLSamplerState based on the GrSamplerState.
39 GrMtlSampler* findOrCreateCompatibleSampler(const GrSamplerState&, uint32_t maxMipLevel);
40
Jim Van Verth35a67eb2019-05-03 10:58:40 -040041 id<MTLBuffer> getDynamicBuffer(size_t size, size_t* offset);
Jim Van Verthbbf85f92019-06-20 12:38:38 -040042 void addBufferCompletionHandler(GrMtlCommandBuffer* cmdBuffer);
Jim Van Verth35a67eb2019-05-03 10:58:40 -040043
Jim Van Verthcf23f582019-05-22 09:46:57 -040044 // Destroy any cached resources. To be called before releasing the MtlDevice.
45 void destroyResources();
46
Timothy Liange30739a2018-07-31 10:51:17 -040047private:
Jim Van Verth1223e7f2019-02-28 17:38:35 -050048#ifdef SK_DEBUG
49#define GR_PIPELINE_STATE_CACHE_STATS
50#endif
51
52 class PipelineStateCache : public ::SkNoncopyable {
53 public:
54 PipelineStateCache(GrMtlGpu* gpu);
55 ~PipelineStateCache();
56
Jim Van Verthcf23f582019-05-22 09:46:57 -040057 void release();
Jim Van Verth1223e7f2019-02-28 17:38:35 -050058 GrMtlPipelineState* refPipelineState(GrRenderTarget*, GrSurfaceOrigin,
59 const GrPrimitiveProcessor&,
60 const GrTextureProxy* const primProcProxies[],
61 const GrPipeline&,
62 GrPrimitiveType);
63
64 private:
65 enum {
66 // We may actually have kMaxEntries+1 PipelineStates in context because we create a new
67 // PipelineState before evicting from the cache.
68 kMaxEntries = 128,
69 };
70
71 struct Entry;
72
73 struct DescHash {
74 uint32_t operator()(const GrProgramDesc& desc) const {
75 return SkOpts::hash_fn(desc.asKey(), desc.keyLength(), 0);
76 }
77 };
78
79 SkLRUCache<const GrMtlPipelineStateBuilder::Desc, std::unique_ptr<Entry>, DescHash> fMap;
80
81 GrMtlGpu* fGpu;
82
83#ifdef GR_PIPELINE_STATE_CACHE_STATS
84 int fTotalRequests;
85 int fCacheMisses;
86#endif
87 };
88
Jim Van Verthbbf85f92019-06-20 12:38:38 -040089 // Buffer allocator
90 class BufferSuballocator : public SkRefCnt {
91 public:
92 BufferSuballocator(id<MTLDevice> device, size_t size);
93 ~BufferSuballocator() {
94 fBuffer = nil;
95 fTotalSize = 0;
96 }
97
98 id<MTLBuffer> getAllocation(size_t size, size_t* offset);
99 void addCompletionHandler(GrMtlCommandBuffer* cmdBuffer);
100 size_t size() { return fTotalSize; }
101
102 private:
103 id<MTLBuffer> fBuffer;
104 size_t fTotalSize;
Jim Van Verth5e8f3892019-07-01 15:11:29 -0400105 size_t fHead SK_GUARDED_BY(fMutex); // where we start allocating
106 size_t fTail SK_GUARDED_BY(fMutex); // where we start deallocating
Jim Van Verthbbf85f92019-06-20 12:38:38 -0400107 SkSpinlock fMutex;
108 };
Jim Van Verthccd895e2019-06-28 13:01:58 -0400109 static constexpr size_t kBufferSuballocatorStartSize = 1024*1024;
Jim Van Verthccd895e2019-06-28 13:01:58 -0400110 static constexpr size_t kBufferSuballocatorMaxSize = 8*1024*1024;
Jim Van Verthbbf85f92019-06-20 12:38:38 -0400111
Timothy Liange30739a2018-07-31 10:51:17 -0400112 GrMtlGpu* fGpu;
Jim Van Verth1223e7f2019-02-28 17:38:35 -0500113
114 // Cache of GrMtlPipelineStates
115 std::unique_ptr<PipelineStateCache> fPipelineStateCache;
Jim Van Verth75c53262019-04-26 12:23:51 -0400116
117 SkTDynamicHash<GrMtlSampler, GrMtlSampler::Key> fSamplers;
118 SkTDynamicHash<GrMtlDepthStencil, GrMtlDepthStencil::Key> fDepthStencilStates;
Jim Van Verth35a67eb2019-05-03 10:58:40 -0400119
Jim Van Verthbbf85f92019-06-20 12:38:38 -0400120 // This is ref-counted because we might delete the GrContext before the command buffer
121 // finishes. The completion handler will retain a reference to this so it won't get
122 // deleted along with the GrContext.
123 sk_sp<BufferSuballocator> fBufferSuballocator;
Timothy Liange30739a2018-07-31 10:51:17 -0400124};
125
126#endif