blob: e04095ea71a59a9d896729e6dd4c10b5dcb47d1b [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrVkGpu_DEFINED
9#define GrVkGpu_DEFINED
10
11#include "GrGpu.h"
12#include "GrGpuFactory.h"
13#include "GrVkCaps.h"
14#include "GrVkIndexBuffer.h"
15#include "GrVkProgram.h"
16#include "GrVkResourceProvider.h"
17#include "GrVkVertexBuffer.h"
18#include "GrVkUtil.h"
19
20#include "shaderc/shaderc.h"
21#include "vulkan/vulkan.h"
22
23class GrPipeline;
24class GrNonInstancedVertices;
25
26class GrVkBufferImpl;
27class GrVkCommandBuffer;
28class GrVkPipeline;
29class GrVkRenderPass;
30class GrVkTexture;
31struct GrVkInterface;
32
33class GrVkGpu : public GrGpu {
34public:
35 // Currently passing in the inst so that we can properly delete it when we are done.
36 // Normally this would be done by the client.
37 GrVkGpu(GrContext* context, const GrContextOptions& options,
38 VkPhysicalDevice physDev, VkDevice device, VkQueue queue, VkCommandPool cmdPool,
39 VkInstance inst);
40 ~GrVkGpu() override;
41
42 const GrVkInterface* vkInterface() const { return fInterface.get(); }
43 const GrVkCaps& vkCaps() const { return *fVkCaps; }
44
45 VkDevice device() const { return fDevice; }
46 VkQueue queue() const { return fQueue; }
47 VkCommandPool cmdPool() const { return fCmdPool; }
48 VkPhysicalDeviceMemoryProperties physicalDeviceMemoryProperties() const {
49 return fPhysDevMemProps;
50 }
51
52 GrVkResourceProvider& resourceProvider() { return fResourceProvider; }
53
54 enum SyncQueue {
55 kForce_SyncQueue,
56 kSkip_SyncQueue
57 };
58
59 bool onGetReadPixelsInfo(GrSurface* srcSurface, int readWidth, int readHeight, size_t rowBytes,
60 GrPixelConfig readConfig, DrawPreference*,
61 ReadPixelTempDrawInfo*) override;
62
63 bool onGetWritePixelsInfo(GrSurface* dstSurface, int width, int height,
64 GrPixelConfig srcConfig, DrawPreference*,
65 WritePixelTempDrawInfo*) override;
66
67 void buildProgramDesc(GrProgramDesc*, const GrPrimitiveProcessor&,
68 const GrPipeline&) const override;
69
70 void discard(GrRenderTarget*) override {
71 SkDebugf("discard not yet implemented for Vulkan\n");
72 }
73
74 bool onCopySurface(GrSurface* dst,
75 GrSurface* src,
76 const SkIRect& srcRect,
77 const SkIPoint& dstPoint) override;
78
79 bool initCopySurfaceDstDesc(const GrSurface* src, GrSurfaceDesc* desc) const override {
80 SkDebugf("initCopySurfaceDstDesc not yet implemented for Vulkan\n");
81 return false;
82 }
83
84 void xferBarrier(GrRenderTarget*, GrXferBarrierType) override {}
85
86 GrBackendObject createTestingOnlyBackendTexture(void* pixels, int w, int h,
87 GrPixelConfig config) override;
88 bool isTestingOnlyBackendTexture(GrBackendObject id) const override;
89 void deleteTestingOnlyBackendTexture(GrBackendObject id, bool abandonTexture) override;
90
91 GrStencilAttachment* createStencilAttachmentForRenderTarget(const GrRenderTarget*,
92 int width,
93 int height) override;
94
95 void clearStencil(GrRenderTarget* target) override {
96 SkDebugf("clearStencil not yet implemented for Vulkan\n");
97 }
98
99 void drawDebugWireRect(GrRenderTarget*, const SkIRect&, GrColor) override {
100 SkDebugf("drawDebugWireRect not yet implemented for Vulkan\n");
101 }
102
103 void addMemoryBarrier(VkPipelineStageFlags srcStageMask,
104 VkPipelineStageFlags dstStageMask,
105 bool byRegion,
106 VkMemoryBarrier* barrier) const;
107 void addBufferMemoryBarrier(VkPipelineStageFlags srcStageMask,
108 VkPipelineStageFlags dstStageMask,
109 bool byRegion,
110 VkBufferMemoryBarrier* barrier) const;
111 void addImageMemoryBarrier(VkPipelineStageFlags srcStageMask,
112 VkPipelineStageFlags dstStageMask,
113 bool byRegion,
114 VkImageMemoryBarrier* barrier) const;
115
116 shaderc_compiler_t shadercCompiler() const {
117 return fCompiler;
118 }
119
120 void finishDrawTarget() override;
121
122private:
123 void onResetContext(uint32_t resetBits) override {
124 SkDebugf("onResetContext not yet implemented for Vulkan\n");
125 }
126
127 GrTexture* onCreateTexture(const GrSurfaceDesc& desc, GrGpuResource::LifeCycle,
128 const void* srcData, size_t rowBytes) override;
129
130 GrTexture* onCreateCompressedTexture(const GrSurfaceDesc& desc, GrGpuResource::LifeCycle,
131 const void* srcData) override {
132 SkDebugf("onCreateCompressedTexture not yet implemented for Vulkan\n");
133 return NULL;
134 }
135
136 GrTexture* onWrapBackendTexture(const GrBackendTextureDesc&, GrWrapOwnership) override;
137
138 GrRenderTarget* onWrapBackendRenderTarget(const GrBackendRenderTargetDesc&,
139 GrWrapOwnership) override;
jvanverthf7e865a2016-02-25 07:59:45 -0800140 GrRenderTarget* onWrapBackendTextureAsRenderTarget(const GrBackendTextureDesc&,
141 GrWrapOwnership) override {
142 SkDebugf("onWrapBackendTextureAsRenderTarget not yet implemented for Vulkan\n");
143 return NULL;
144 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500145
146 GrVertexBuffer* onCreateVertexBuffer(size_t size, bool dynamic) override;
147 GrIndexBuffer* onCreateIndexBuffer(size_t size, bool dynamic) override;
148 GrTransferBuffer* onCreateTransferBuffer(size_t size, TransferType type) override;
149
150 void onClear(GrRenderTarget*, const SkIRect& rect, GrColor color) override;
151
152 void onClearStencilClip(GrRenderTarget*, const SkIRect& rect, bool insideClip) override {
153 SkDebugf("onClearStencilClip not yet implemented for Vulkan\n");
154 }
155
156 void onDraw(const DrawArgs&, const GrNonInstancedVertices&) override;
157
158 bool onReadPixels(GrSurface* surface,
159 int left, int top, int width, int height,
160 GrPixelConfig,
161 void* buffer,
162 size_t rowBytes) override;
163
164 bool onWritePixels(GrSurface* surface,
165 int left, int top, int width, int height,
166 GrPixelConfig config, const void* buffer, size_t rowBytes) override;
167
168 bool onTransferPixels(GrSurface*,
169 int left, int top, int width, int height,
170 GrPixelConfig config, GrTransferBuffer* buffer,
171 size_t offset, size_t rowBytes) override {
172 SkDebugf("onTransferPixels not yet implemented for Vulkan\n");
173 return false;
174 }
175
176 void onResolveRenderTarget(GrRenderTarget* target) override {
177 SkDebugf("onResolveRenderTarget not yet implemented for Vulkan\n");
178 }
179
180 // Bind vertex and index buffers
181 void bindGeometry(const GrPrimitiveProcessor&, const GrNonInstancedVertices&);
182
183 // Ends and submits the current command buffer to the queue and then creates a new command
184 // buffer and begins it. If sync is set to kForce_SyncQueue, the function will wait for all
185 // work in the queue to finish before returning.
186 void submitCommandBuffer(SyncQueue sync);
187
188 void copySurfaceAsCopyImage(GrSurface* dst,
189 GrSurface* src,
190 const SkIRect& srcRect,
191 const SkIPoint& dstPoint);
192
193 void copySurfaceAsDraw(GrSurface* dst,
194 GrSurface* src,
195 const SkIRect& srcRect,
196 const SkIPoint& dstPoint);
197
198 // helper for onCreateTexture and writeTexturePixels
199 bool uploadTexData(GrVkTexture* tex,
200 int left, int top, int width, int height,
201 GrPixelConfig dataConfig,
202 const void* data,
203 size_t rowBytes);
204
205 SkAutoTUnref<const GrVkInterface> fInterface;
206 SkAutoTUnref<GrVkCaps> fVkCaps;
207 VkPhysicalDeviceMemoryProperties fPhysDevMemProps;
208 VkDevice fDevice;
209 VkQueue fQueue; // for now, one queue
210 VkCommandPool fCmdPool;
211 GrVkCommandBuffer* fCurrentCmdBuffer;
212 GrVkResourceProvider fResourceProvider;
213
214 // Shaderc compiler used for compiling glsl in spirv. We only want to create the compiler once
215 // since there is significant overhead to the first compile of any compiler.
216 shaderc_compiler_t fCompiler;
217
218 // This is only for our current testing and building. The client should be holding on to the
219 // VkInstance.
220 VkInstance fVkInstance;
221
222 typedef GrGpu INHERITED;
223};
224
225#endif