blob: 9b9ce43dac0590701c523f5b625a9e093948ee69 [file] [log] [blame]
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001/*
Jim Van Verth03b8ab22020-02-24 11:36:15 -05002 * Copyright 2020 Google LLC
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05003 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05008#include "src/gpu/d3d/GrD3DGpu.h"
9
Greg Daniel31a7b072020-02-26 15:31:49 -050010#include "src/gpu/d3d/GrD3DCaps.h"
11#include "src/gpu/d3d/GrD3DOpsRenderPass.h"
12
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050013sk_sp<GrGpu> GrD3DGpu::Make(const GrD3DBackendContext& backendContext,
14 const GrContextOptions& contextOptions, GrContext* context) {
15 return sk_sp<GrGpu>(new GrD3DGpu(context, contextOptions, backendContext));
16}
17
Greg Daniel83ed2132020-03-24 13:15:33 -040018// This constant determines how many OutstandingCommandLists are allocated together as a block in
19// the deque. As such it needs to balance allocating too much memory vs. incurring
20// allocation/deallocation thrashing. It should roughly correspond to the max number of outstanding
21// command lists we expect to see.
22static const int kDefaultOutstandingAllocCnt = 8;
23
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050024GrD3DGpu::GrD3DGpu(GrContext* context, const GrContextOptions& contextOptions,
25 const GrD3DBackendContext& backendContext)
Jim Van Verth03b8ab22020-02-24 11:36:15 -050026 : INHERITED(context)
27 , fDevice(backendContext.fDevice)
Greg Daniel02c45902020-03-09 10:58:09 -040028
29 , fQueue(backendContext.fQueue)
Greg Daniel83ed2132020-03-24 13:15:33 -040030 , fResourceProvider(this)
31 , fOutstandingCommandLists(sizeof(OutstandingCommandList), kDefaultOutstandingAllocCnt) {
Jim Van Verth8ec13302020-02-26 12:59:56 -050032 fCaps.reset(new GrD3DCaps(contextOptions,
33 backendContext.fAdapter.Get(),
34 backendContext.fDevice.Get()));
Greg Daniel85da3362020-03-09 15:18:35 -040035
36 fCurrentDirectCommandList = fResourceProvider.findOrCreateDirectCommandList();
Greg Daniele52c9782020-03-23 14:18:37 -040037 SkASSERT(fCurrentDirectCommandList);
Greg Daniel83ed2132020-03-24 13:15:33 -040038
39 SkASSERT(fCurrentFenceValue == 0);
40 SkDEBUGCODE(HRESULT hr = ) fDevice->CreateFence(fCurrentFenceValue, D3D12_FENCE_FLAG_NONE,
41 IID_PPV_ARGS(&fFence));
42 SkASSERT(SUCCEEDED(hr));
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050043}
44
Greg Daniel83ed2132020-03-24 13:15:33 -040045GrD3DGpu::~GrD3DGpu() {
46 this->destroyResources();
47}
48
49void GrD3DGpu::destroyResources() {
50 if (fCurrentDirectCommandList) {
51 fCurrentDirectCommandList->close();
52 fCurrentDirectCommandList.reset();
53 }
54
55 // We need to make sure everything has finished on the queue.
56 if (fFence->GetCompletedValue() < fCurrentFenceValue) {
57 HANDLE fenceEvent;
58 fenceEvent = CreateEvent(nullptr, FALSE, FALSE, nullptr);
59 SkASSERT(fenceEvent);
60 SkDEBUGCODE(HRESULT hr = ) fFence->SetEventOnCompletion(fCurrentFenceValue, fenceEvent);
61 SkASSERT(SUCCEEDED(hr));
62 WaitForSingleObject(fenceEvent, INFINITE);
63 CloseHandle(fenceEvent);
64 }
65
66 SkDEBUGCODE(uint64_t fenceValue = fFence->GetCompletedValue();)
67
68 // We used a placement new for each object in fOutstandingCommandLists, so we're responsible
69 // for calling the destructor on each of them as well.
70 while (!fOutstandingCommandLists.empty()) {
71 OutstandingCommandList* list = (OutstandingCommandList*)fOutstandingCommandLists.back();
72 SkASSERT(list->fFenceValue <= fenceValue);
73 // No reason to recycle the command lists since we are destroying all resources anyways.
74 list->~OutstandingCommandList();
75 fOutstandingCommandLists.pop_back();
76 }
77}
Greg Daniel31a7b072020-02-26 15:31:49 -050078
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050079GrOpsRenderPass* GrD3DGpu::getOpsRenderPass(
80 GrRenderTarget* rt, GrSurfaceOrigin origin, const SkIRect& bounds,
81 const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
Greg Daniel31a7b072020-02-26 15:31:49 -050082 const GrOpsRenderPass::StencilLoadAndStoreInfo& stencilInfo,
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050083 const SkTArray<GrSurfaceProxy*, true>& sampledProxies) {
Greg Daniel31a7b072020-02-26 15:31:49 -050084 if (!fCachedOpsRenderPass) {
85 fCachedOpsRenderPass.reset(new GrD3DOpsRenderPass(this));
86 }
87
88 if (!fCachedOpsRenderPass->set(rt, origin, bounds, colorInfo, stencilInfo, sampledProxies)) {
89 return nullptr;
90 }
91 return fCachedOpsRenderPass.get();
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050092}
93
Greg Daniel83ed2132020-03-24 13:15:33 -040094void GrD3DGpu::submitDirectCommandList() {
95 SkASSERT(fCurrentDirectCommandList);
96
97 fCurrentDirectCommandList->submit(fQueue.Get());
98
99 new (fOutstandingCommandLists.push_back()) OutstandingCommandList(
100 std::move(fCurrentDirectCommandList), ++fCurrentFenceValue);
101
102 SkDEBUGCODE(HRESULT hr = ) fQueue->Signal(fFence.Get(), fCurrentFenceValue);
103 SkASSERT(SUCCEEDED(hr));
104
105 fCurrentDirectCommandList = fResourceProvider.findOrCreateDirectCommandList();
106
107 // This should be done after we have a new command list in case the freeing of any resources
108 // held by a finished command list causes us send a new command to the gpu (like changing the
109 // resource state.
110 this->checkForFinishedCommandLists();
111
112 SkASSERT(fCurrentDirectCommandList);
113}
114
115void GrD3DGpu::checkForFinishedCommandLists() {
116 uint64_t currentFenceValue = fFence->GetCompletedValue();
117
118 // Iterate over all the outstanding command lists to see if any have finished. The commands
119 // lists are in order from oldest to newest, so we start at the front to check if their fence
120 // value is less than the last signaled value. If so we pop it off and move onto the next.
121 // Repeat till we find a command list that has not finished yet (and all others afterwards are
122 // also guaranteed to not have finished).
123 SkDeque::F2BIter iter(fOutstandingCommandLists);
124 const OutstandingCommandList* curList = (const OutstandingCommandList*)iter.next();
125 while (curList && curList->fFenceValue <= currentFenceValue) {
126 curList = (const OutstandingCommandList*)iter.next();
127 OutstandingCommandList* front = (OutstandingCommandList*)fOutstandingCommandLists.front();
Greg Daniel7a5f1fa2020-03-24 14:50:19 -0400128 fResourceProvider.recycleDirectCommandList(std::move(front->fCommandList));
Greg Daniel83ed2132020-03-24 13:15:33 -0400129 // Since we used placement new we are responsible for calling the destructor manually.
130 front->~OutstandingCommandList();
131 fOutstandingCommandLists.pop_front();
132 }
133}
134
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500135void GrD3DGpu::submit(GrOpsRenderPass* renderPass) {
136 // TODO: actually submit something here
137 delete renderPass;
138}
139
140void GrD3DGpu::querySampleLocations(GrRenderTarget* rt, SkTArray<SkPoint>* sampleLocations) {
141 // TODO
142}
143
144sk_sp<GrTexture> GrD3DGpu::onCreateTexture(SkISize dimensions,
145 const GrBackendFormat& format,
146 GrRenderable renderable,
147 int renderTargetSampleCnt,
148 SkBudgeted budgeted,
149 GrProtected isProtected,
150 int mipLevelCount,
151 uint32_t levelClearMask) {
152 // TODO
153 return nullptr;
154}
155
156sk_sp<GrTexture> GrD3DGpu::onCreateCompressedTexture(SkISize dimensions,
157 const GrBackendFormat& format,
158 SkBudgeted budgeted,
159 GrMipMapped mipMapped,
160 GrProtected isProtected,
161 const void* data, size_t dataSize) {
162 // TODO
163 return nullptr;
164}
165
166sk_sp<GrTexture> GrD3DGpu::onWrapBackendTexture(const GrBackendTexture& tex, GrColorType colorType,
167 GrWrapOwnership ownership,
168 GrWrapCacheable wrapType, GrIOType ioType) {
169 // TODO
170 return nullptr;
171}
172
173sk_sp<GrTexture> GrD3DGpu::onWrapCompressedBackendTexture(const GrBackendTexture& tex,
174 GrWrapOwnership ownership,
175 GrWrapCacheable wrapType) {
176 return nullptr;
177}
178
179sk_sp<GrTexture> GrD3DGpu::onWrapRenderableBackendTexture(const GrBackendTexture& tex,
180 int sampleCnt,
181 GrColorType colorType,
182 GrWrapOwnership ownership,
183 GrWrapCacheable cacheable) {
184 // TODO
185 return nullptr;
186}
187
188sk_sp<GrRenderTarget> GrD3DGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& rt,
189 GrColorType colorType) {
190 // TODO
191 return nullptr;
192}
193
194sk_sp<GrRenderTarget> GrD3DGpu::onWrapBackendTextureAsRenderTarget(const GrBackendTexture& tex,
195 int sampleCnt,
196 GrColorType colorType) {
197 // TODO
198 return nullptr;
199}
200
201sk_sp<GrGpuBuffer> GrD3DGpu::onCreateBuffer(size_t sizeInBytes, GrGpuBufferType type,
202 GrAccessPattern accessPattern, const void*) {
203 // TODO
204 return nullptr;
205}
206
207GrStencilAttachment* GrD3DGpu::createStencilAttachmentForRenderTarget(
208 const GrRenderTarget* rt, int width, int height, int numStencilSamples) {
209 // TODO
210 return nullptr;
211}
212
213GrBackendTexture GrD3DGpu::onCreateBackendTexture(SkISize dimensions,
214 const GrBackendFormat& format,
215 GrRenderable,
216 GrMipMapped mipMapped,
217 GrProtected,
218 const BackendTextureData*) {
219 // TODO
220 return GrBackendTexture();
221}
222
223GrBackendTexture GrD3DGpu::onCreateCompressedBackendTexture(SkISize dimensions,
224 const GrBackendFormat& format,
225 GrMipMapped mipMapped,
226 GrProtected,
227 const BackendTextureData*) {
228 // TODO
229 return GrBackendTexture();
230}
231
232void GrD3DGpu::deleteBackendTexture(const GrBackendTexture& tex) {
233 // TODO
234}
235
Robert Phillips979b2232020-02-20 10:47:29 -0500236bool GrD3DGpu::compile(const GrProgramDesc&, const GrProgramInfo&) {
237 return false;
238}
239
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500240#if GR_TEST_UTILS
241bool GrD3DGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
242 // TODO
243 return false;
244}
245
246GrBackendRenderTarget GrD3DGpu::createTestingOnlyBackendRenderTarget(int w, int h,
247 GrColorType colorType) {
248 // TODO
249 return GrBackendRenderTarget();
250}
251
252void GrD3DGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget&) {}
253#endif