blob: 3943fce571a1dc6916ec2c78740f9764a345d7f7 [file] [log] [blame]
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05001/*
Jim Van Verth03b8ab22020-02-24 11:36:15 -05002 * Copyright 2020 Google LLC
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05003 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -05008#include "src/gpu/d3d/GrD3DGpu.h"
9
Greg Daniel31a7b072020-02-26 15:31:49 -050010#include "src/gpu/d3d/GrD3DCaps.h"
11#include "src/gpu/d3d/GrD3DOpsRenderPass.h"
Jim Van Verthaa90dad2020-03-30 15:00:39 -040012#include "src/gpu/d3d/GrD3DTexture.h"
13#include "src/gpu/d3d/GrD3DTextureRenderTarget.h"
14#include "src/gpu/d3d/GrD3DUtil.h"
Greg Daniel31a7b072020-02-26 15:31:49 -050015
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050016sk_sp<GrGpu> GrD3DGpu::Make(const GrD3DBackendContext& backendContext,
17 const GrContextOptions& contextOptions, GrContext* context) {
18 return sk_sp<GrGpu>(new GrD3DGpu(context, contextOptions, backendContext));
19}
20
Greg Daniel83ed2132020-03-24 13:15:33 -040021// This constant determines how many OutstandingCommandLists are allocated together as a block in
22// the deque. As such it needs to balance allocating too much memory vs. incurring
23// allocation/deallocation thrashing. It should roughly correspond to the max number of outstanding
24// command lists we expect to see.
25static const int kDefaultOutstandingAllocCnt = 8;
26
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050027GrD3DGpu::GrD3DGpu(GrContext* context, const GrContextOptions& contextOptions,
28 const GrD3DBackendContext& backendContext)
Jim Van Verth03b8ab22020-02-24 11:36:15 -050029 : INHERITED(context)
30 , fDevice(backendContext.fDevice)
Greg Daniel02c45902020-03-09 10:58:09 -040031
32 , fQueue(backendContext.fQueue)
Greg Daniel83ed2132020-03-24 13:15:33 -040033 , fResourceProvider(this)
34 , fOutstandingCommandLists(sizeof(OutstandingCommandList), kDefaultOutstandingAllocCnt) {
Jim Van Verth8ec13302020-02-26 12:59:56 -050035 fCaps.reset(new GrD3DCaps(contextOptions,
36 backendContext.fAdapter.Get(),
37 backendContext.fDevice.Get()));
Greg Daniel85da3362020-03-09 15:18:35 -040038
39 fCurrentDirectCommandList = fResourceProvider.findOrCreateDirectCommandList();
Greg Daniele52c9782020-03-23 14:18:37 -040040 SkASSERT(fCurrentDirectCommandList);
Greg Daniel83ed2132020-03-24 13:15:33 -040041
42 SkASSERT(fCurrentFenceValue == 0);
43 SkDEBUGCODE(HRESULT hr = ) fDevice->CreateFence(fCurrentFenceValue, D3D12_FENCE_FLAG_NONE,
44 IID_PPV_ARGS(&fFence));
45 SkASSERT(SUCCEEDED(hr));
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050046}
47
Greg Daniel83ed2132020-03-24 13:15:33 -040048GrD3DGpu::~GrD3DGpu() {
49 this->destroyResources();
50}
51
52void GrD3DGpu::destroyResources() {
53 if (fCurrentDirectCommandList) {
54 fCurrentDirectCommandList->close();
55 fCurrentDirectCommandList.reset();
56 }
57
58 // We need to make sure everything has finished on the queue.
59 if (fFence->GetCompletedValue() < fCurrentFenceValue) {
60 HANDLE fenceEvent;
61 fenceEvent = CreateEvent(nullptr, FALSE, FALSE, nullptr);
62 SkASSERT(fenceEvent);
63 SkDEBUGCODE(HRESULT hr = ) fFence->SetEventOnCompletion(fCurrentFenceValue, fenceEvent);
64 SkASSERT(SUCCEEDED(hr));
65 WaitForSingleObject(fenceEvent, INFINITE);
66 CloseHandle(fenceEvent);
67 }
68
69 SkDEBUGCODE(uint64_t fenceValue = fFence->GetCompletedValue();)
70
71 // We used a placement new for each object in fOutstandingCommandLists, so we're responsible
72 // for calling the destructor on each of them as well.
73 while (!fOutstandingCommandLists.empty()) {
74 OutstandingCommandList* list = (OutstandingCommandList*)fOutstandingCommandLists.back();
75 SkASSERT(list->fFenceValue <= fenceValue);
76 // No reason to recycle the command lists since we are destroying all resources anyways.
77 list->~OutstandingCommandList();
78 fOutstandingCommandLists.pop_back();
79 }
80}
Greg Daniel31a7b072020-02-26 15:31:49 -050081
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050082GrOpsRenderPass* GrD3DGpu::getOpsRenderPass(
83 GrRenderTarget* rt, GrSurfaceOrigin origin, const SkIRect& bounds,
84 const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
Greg Daniel31a7b072020-02-26 15:31:49 -050085 const GrOpsRenderPass::StencilLoadAndStoreInfo& stencilInfo,
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050086 const SkTArray<GrSurfaceProxy*, true>& sampledProxies) {
Greg Daniel31a7b072020-02-26 15:31:49 -050087 if (!fCachedOpsRenderPass) {
88 fCachedOpsRenderPass.reset(new GrD3DOpsRenderPass(this));
89 }
90
91 if (!fCachedOpsRenderPass->set(rt, origin, bounds, colorInfo, stencilInfo, sampledProxies)) {
92 return nullptr;
93 }
94 return fCachedOpsRenderPass.get();
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -050095}
96
Greg Daniel83ed2132020-03-24 13:15:33 -040097void GrD3DGpu::submitDirectCommandList() {
98 SkASSERT(fCurrentDirectCommandList);
99
100 fCurrentDirectCommandList->submit(fQueue.Get());
101
102 new (fOutstandingCommandLists.push_back()) OutstandingCommandList(
103 std::move(fCurrentDirectCommandList), ++fCurrentFenceValue);
104
105 SkDEBUGCODE(HRESULT hr = ) fQueue->Signal(fFence.Get(), fCurrentFenceValue);
106 SkASSERT(SUCCEEDED(hr));
107
108 fCurrentDirectCommandList = fResourceProvider.findOrCreateDirectCommandList();
109
110 // This should be done after we have a new command list in case the freeing of any resources
111 // held by a finished command list causes us send a new command to the gpu (like changing the
112 // resource state.
113 this->checkForFinishedCommandLists();
114
115 SkASSERT(fCurrentDirectCommandList);
116}
117
118void GrD3DGpu::checkForFinishedCommandLists() {
119 uint64_t currentFenceValue = fFence->GetCompletedValue();
120
121 // Iterate over all the outstanding command lists to see if any have finished. The commands
122 // lists are in order from oldest to newest, so we start at the front to check if their fence
123 // value is less than the last signaled value. If so we pop it off and move onto the next.
124 // Repeat till we find a command list that has not finished yet (and all others afterwards are
125 // also guaranteed to not have finished).
126 SkDeque::F2BIter iter(fOutstandingCommandLists);
127 const OutstandingCommandList* curList = (const OutstandingCommandList*)iter.next();
128 while (curList && curList->fFenceValue <= currentFenceValue) {
129 curList = (const OutstandingCommandList*)iter.next();
130 OutstandingCommandList* front = (OutstandingCommandList*)fOutstandingCommandLists.front();
Greg Daniel7a5f1fa2020-03-24 14:50:19 -0400131 fResourceProvider.recycleDirectCommandList(std::move(front->fCommandList));
Greg Daniel83ed2132020-03-24 13:15:33 -0400132 // Since we used placement new we are responsible for calling the destructor manually.
133 front->~OutstandingCommandList();
134 fOutstandingCommandLists.pop_front();
135 }
136}
137
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500138void GrD3DGpu::submit(GrOpsRenderPass* renderPass) {
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400139 SkASSERT(fCachedOpsRenderPass.get() == renderPass);
140
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500141 // TODO: actually submit something here
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400142 fCachedOpsRenderPass.reset();
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500143}
144
145void GrD3DGpu::querySampleLocations(GrRenderTarget* rt, SkTArray<SkPoint>* sampleLocations) {
146 // TODO
147}
148
149sk_sp<GrTexture> GrD3DGpu::onCreateTexture(SkISize dimensions,
150 const GrBackendFormat& format,
151 GrRenderable renderable,
152 int renderTargetSampleCnt,
153 SkBudgeted budgeted,
154 GrProtected isProtected,
155 int mipLevelCount,
156 uint32_t levelClearMask) {
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400157 DXGI_FORMAT dxgiFormat;
158 SkAssertResult(format.asDxgiFormat(&dxgiFormat));
159 SkASSERT(!GrDxgiFormatIsCompressed(dxgiFormat));
160
161 D3D12_RESOURCE_FLAGS usageFlags = D3D12_RESOURCE_FLAG_NONE;
162
163 if (renderable == GrRenderable::kYes) {
164 usageFlags |= D3D12_RESOURCE_FLAG_ALLOW_RENDER_TARGET;
165 }
166
167 // This desc refers to the texture that will be read by the client. Thus even if msaa is
168 // requested, this describes the resolved texture. Therefore we always have samples set
169 // to 1.
170 SkASSERT(mipLevelCount > 0);
171 D3D12_RESOURCE_DESC resourceDesc;
172 resourceDesc.Dimension = D3D12_RESOURCE_DIMENSION_TEXTURE2D;
173 // TODO: will use 4MB alignment for MSAA textures and 64KB for everything else
174 // might want to manually set alignment to 4KB for smaller textures
175 resourceDesc.Alignment = 0;
176 resourceDesc.Width = dimensions.fWidth;
177 resourceDesc.Height = dimensions.fHeight;
178 resourceDesc.DepthOrArraySize = 1;
179 resourceDesc.MipLevels = mipLevelCount;
180 resourceDesc.Format = dxgiFormat;
181 resourceDesc.SampleDesc.Count = 1;
182 resourceDesc.SampleDesc.Quality = 0; // quality levels are only supported for tiled resources
183 // so ignore for now
184 resourceDesc.Layout = D3D12_TEXTURE_LAYOUT_UNKNOWN; // use driver-selected swizzle for now
185 resourceDesc.Flags = usageFlags;
186
187 GrMipMapsStatus mipMapsStatus =
188 mipLevelCount > 1 ? GrMipMapsStatus::kDirty : GrMipMapsStatus::kNotAllocated;
189
190 sk_sp<GrD3DTexture> tex;
191 if (renderable == GrRenderable::kYes) {
192 tex = GrD3DTextureRenderTarget::MakeNewTextureRenderTarget(
193 this, budgeted, dimensions, renderTargetSampleCnt, resourceDesc, isProtected,
194 mipMapsStatus);
195 } else {
196 tex = GrD3DTexture::MakeNewTexture(this, budgeted, dimensions, resourceDesc, isProtected,
197 mipMapsStatus);
198 }
199
200 if (!tex) {
201 return nullptr;
202 }
203
204 if (levelClearMask) {
205 // TODO
206 }
207 return std::move(tex);
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500208}
209
210sk_sp<GrTexture> GrD3DGpu::onCreateCompressedTexture(SkISize dimensions,
211 const GrBackendFormat& format,
212 SkBudgeted budgeted,
213 GrMipMapped mipMapped,
214 GrProtected isProtected,
215 const void* data, size_t dataSize) {
216 // TODO
217 return nullptr;
218}
219
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400220static bool check_resource_info(const GrD3DCaps& caps,
221 const GrD3DTextureResourceInfo& info,
222 GrColorType colorType) {
223 if (!info.fResource) {
224 return false;
225 }
226
227 SkASSERTF(colorType == GrColorType::kUnknown ||
228 caps.areColorTypeAndFormatCompatible(colorType,
229 GrBackendFormat::MakeDxgi(info.fFormat)),
230 "Direct3D format/colorType mismatch - format %d colorType %d\n",
231 info.fFormat, colorType);
232 return true;
233}
234
235static bool check_tex_resource_info(const GrD3DCaps& caps, const GrD3DTextureResourceInfo& info) {
236 if (!caps.isFormatTexturable(info.fFormat)) {
237 return false;
238 }
239 return true;
240}
241
242static bool check_rt_resource_info(const GrD3DCaps& caps, const GrD3DTextureResourceInfo& info,
243 int sampleCnt) {
244 if (!caps.isFormatRenderable(info.fFormat, sampleCnt)) {
245 return false;
246 }
247 return true;
248}
249
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500250sk_sp<GrTexture> GrD3DGpu::onWrapBackendTexture(const GrBackendTexture& tex, GrColorType colorType,
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400251 GrWrapOwnership, GrWrapCacheable wrapType,
252 GrIOType ioType) {
253 GrD3DTextureResourceInfo textureInfo;
254 if (!tex.getD3DTextureResourceInfo(&textureInfo)) {
255 return nullptr;
256 }
257
258 if (!check_resource_info(this->d3dCaps(), textureInfo, colorType)) {
259 return nullptr;
260 }
261
262 if (!check_tex_resource_info(this->d3dCaps(), textureInfo)) {
263 return nullptr;
264 }
265
266 // TODO: support protected context
267 if (tex.isProtected()) {
268 return nullptr;
269 }
270
271 sk_sp<GrD3DResourceState> state = tex.getGrD3DResourceState();
272 SkASSERT(state);
273 return GrD3DTexture::MakeWrappedTexture(this, tex.dimensions(), wrapType, ioType, textureInfo,
274 std::move(state));
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500275}
276
277sk_sp<GrTexture> GrD3DGpu::onWrapCompressedBackendTexture(const GrBackendTexture& tex,
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400278 GrWrapOwnership,
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500279 GrWrapCacheable wrapType) {
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400280 GrD3DTextureResourceInfo textureInfo;
281 if (!tex.getD3DTextureResourceInfo(&textureInfo)) {
282 return nullptr;
283 }
284
285 if (!check_resource_info(this->d3dCaps(), textureInfo, GrColorType::kUnknown)) {
286 return nullptr;
287 }
288
289 if (!check_tex_resource_info(this->d3dCaps(), textureInfo)) {
290 return nullptr;
291 }
292
293 // TODO: support protected context
294 if (tex.isProtected()) {
295 return nullptr;
296 }
297
298 sk_sp<GrD3DResourceState> state = tex.getGrD3DResourceState();
299 SkASSERT(state);
300 return GrD3DTexture::MakeWrappedTexture(this, tex.dimensions(), wrapType, kRead_GrIOType,
301 textureInfo, std::move(state));
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500302}
303
304sk_sp<GrTexture> GrD3DGpu::onWrapRenderableBackendTexture(const GrBackendTexture& tex,
305 int sampleCnt,
306 GrColorType colorType,
307 GrWrapOwnership ownership,
308 GrWrapCacheable cacheable) {
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400309 GrD3DTextureResourceInfo textureInfo;
310 if (!tex.getD3DTextureResourceInfo(&textureInfo)) {
311 return nullptr;
312 }
313
314 if (!check_resource_info(this->d3dCaps(), textureInfo, colorType)) {
315 return nullptr;
316 }
317
318 if (!check_tex_resource_info(this->d3dCaps(), textureInfo)) {
319 return nullptr;
320 }
321 if (!check_rt_resource_info(this->d3dCaps(), textureInfo, sampleCnt)) {
322 return nullptr;
323 }
324
325 // TODO: support protected context
326 if (tex.isProtected()) {
327 return nullptr;
328 }
329
330 sampleCnt = this->d3dCaps().getRenderTargetSampleCount(sampleCnt, textureInfo.fFormat);
331
332 sk_sp<GrD3DResourceState> state = tex.getGrD3DResourceState();
333 SkASSERT(state);
334
335 return GrD3DTextureRenderTarget::MakeWrappedTextureRenderTarget(this, tex.dimensions(),
336 sampleCnt, cacheable,
337 textureInfo, std::move(state));
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500338}
339
340sk_sp<GrRenderTarget> GrD3DGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& rt,
341 GrColorType colorType) {
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400342 // Currently the Direct3D backend does not support wrapping of msaa render targets directly. In
343 // general this is not an issue since swapchain images in D3D are never multisampled. Thus if
344 // you want a multisampled RT it is best to wrap the swapchain images and then let Skia handle
345 // creating and owning the MSAA images.
346 if (rt.sampleCnt() > 1) {
347 return nullptr;
348 }
349
350 GrD3DTextureResourceInfo info;
351 if (!rt.getD3DTextureResourceInfo(&info)) {
352 return nullptr;
353 }
354
355 if (!check_resource_info(this->d3dCaps(), info, colorType)) {
356 return nullptr;
357 }
358
359 if (!check_rt_resource_info(this->d3dCaps(), info, rt.sampleCnt())) {
360 return nullptr;
361 }
362
363 // TODO: support protected context
364 if (rt.isProtected()) {
365 return nullptr;
366 }
367
368 sk_sp<GrD3DResourceState> state = rt.getGrD3DResourceState();
369
370 sk_sp<GrD3DRenderTarget> tgt = GrD3DRenderTarget::MakeWrappedRenderTarget(
371 this, rt.dimensions(), 1, info, std::move(state));
372
373 // We don't allow the client to supply a premade stencil buffer. We always create one if needed.
374 SkASSERT(!rt.stencilBits());
375 if (tgt) {
376 SkASSERT(tgt->canAttemptStencilAttachment());
377 }
378
379 return std::move(tgt);
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500380}
381
382sk_sp<GrRenderTarget> GrD3DGpu::onWrapBackendTextureAsRenderTarget(const GrBackendTexture& tex,
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400383 int sampleCnt,
384 GrColorType colorType) {
385
386 GrD3DTextureResourceInfo textureInfo;
387 if (!tex.getD3DTextureResourceInfo(&textureInfo)) {
388 return nullptr;
389 }
390 if (!check_resource_info(this->d3dCaps(), textureInfo, colorType)) {
391 return nullptr;
392 }
393
394 if (!check_rt_resource_info(this->d3dCaps(), textureInfo, sampleCnt)) {
395 return nullptr;
396 }
397
398 // TODO: support protected context
399 if (tex.isProtected()) {
400 return nullptr;
401 }
402
403 sampleCnt = this->d3dCaps().getRenderTargetSampleCount(sampleCnt, textureInfo.fFormat);
404 if (!sampleCnt) {
405 return nullptr;
406 }
407
408 sk_sp<GrD3DResourceState> state = tex.getGrD3DResourceState();
409 SkASSERT(state);
410
411 return GrD3DRenderTarget::MakeWrappedRenderTarget(this, tex.dimensions(), sampleCnt,
412 textureInfo, std::move(state));
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500413}
414
415sk_sp<GrGpuBuffer> GrD3DGpu::onCreateBuffer(size_t sizeInBytes, GrGpuBufferType type,
416 GrAccessPattern accessPattern, const void*) {
417 // TODO
418 return nullptr;
419}
420
421GrStencilAttachment* GrD3DGpu::createStencilAttachmentForRenderTarget(
422 const GrRenderTarget* rt, int width, int height, int numStencilSamples) {
423 // TODO
424 return nullptr;
425}
426
427GrBackendTexture GrD3DGpu::onCreateBackendTexture(SkISize dimensions,
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400428 const GrBackendFormat& format,
429 GrRenderable,
430 GrMipMapped mipMapped,
431 GrProtected,
432 const BackendTextureData*) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500433 // TODO
434 return GrBackendTexture();
435}
436
437GrBackendTexture GrD3DGpu::onCreateCompressedBackendTexture(SkISize dimensions,
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400438 const GrBackendFormat& format,
439 GrMipMapped mipMapped,
440 GrProtected,
441 const BackendTextureData*) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500442 // TODO
443 return GrBackendTexture();
444}
445
446void GrD3DGpu::deleteBackendTexture(const GrBackendTexture& tex) {
447 // TODO
448}
449
Robert Phillips979b2232020-02-20 10:47:29 -0500450bool GrD3DGpu::compile(const GrProgramDesc&, const GrProgramInfo&) {
451 return false;
452}
453
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500454#if GR_TEST_UTILS
455bool GrD3DGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
456 // TODO
457 return false;
458}
459
460GrBackendRenderTarget GrD3DGpu::createTestingOnlyBackendRenderTarget(int w, int h,
Jim Van Verthaa90dad2020-03-30 15:00:39 -0400461 GrColorType colorType) {
Jim Van Verthd2d4c5e2020-02-19 14:57:58 -0500462 // TODO
463 return GrBackendRenderTarget();
464}
465
466void GrD3DGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget&) {}
467#endif