blob: 5b5355411721db50c8e32d0d4765ac54372639d3 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040014#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050015#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040016#include "libANGLE/renderer/vulkan/BufferVk.h"
17#include "libANGLE/renderer/vulkan/CompilerVk.h"
18#include "libANGLE/renderer/vulkan/ContextVk.h"
19#include "libANGLE/renderer/vulkan/DeviceVk.h"
20#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040021#include "libANGLE/renderer/vulkan/FramebufferVk.h"
22#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080023#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040024#include "libANGLE/renderer/vulkan/ProgramVk.h"
25#include "libANGLE/renderer/vulkan/QueryVk.h"
26#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
27#include "libANGLE/renderer/vulkan/RendererVk.h"
28#include "libANGLE/renderer/vulkan/SamplerVk.h"
29#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040030#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040031#include "libANGLE/renderer/vulkan/TextureVk.h"
32#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
33#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050034#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040035
36namespace rx
37{
38
Jamie Madilld03a8492017-10-03 15:46:06 -040039namespace
40{
41
42VkIndexType GetVkIndexType(GLenum glIndexType)
43{
44 switch (glIndexType)
45 {
46 case GL_UNSIGNED_SHORT:
47 return VK_INDEX_TYPE_UINT16;
48 case GL_UNSIGNED_INT:
49 return VK_INDEX_TYPE_UINT32;
50 default:
51 UNREACHABLE();
52 return VK_INDEX_TYPE_MAX_ENUM;
53 }
54}
55
Jamie Madill76e471e2017-10-21 09:56:01 -040056enum DescriptorPoolIndex : uint8_t
57{
58 UniformBufferPool = 0,
59 TexturePool = 1,
60};
61
Jamie Madilld03a8492017-10-03 15:46:06 -040062} // anonymous namespace
63
Jamie Madillacccc6c2016-05-03 17:22:10 -040064ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill72106562017-03-24 14:18:50 -040065 : ContextImpl(state), mRenderer(renderer), mCurrentDrawMode(GL_NONE)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040066{
Jamie Madillebf72992017-10-13 14:09:45 -040067 // The module handle is filled out at draw time.
68 mCurrentShaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
69 mCurrentShaderStages[0].pNext = nullptr;
70 mCurrentShaderStages[0].flags = 0;
71 mCurrentShaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
72 mCurrentShaderStages[0].module = VK_NULL_HANDLE;
73 mCurrentShaderStages[0].pName = "main";
74 mCurrentShaderStages[0].pSpecializationInfo = nullptr;
75
76 mCurrentShaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
77 mCurrentShaderStages[1].pNext = nullptr;
78 mCurrentShaderStages[1].flags = 0;
79 mCurrentShaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
80 mCurrentShaderStages[1].module = VK_NULL_HANDLE;
81 mCurrentShaderStages[1].pName = "main";
82 mCurrentShaderStages[1].pSpecializationInfo = nullptr;
83
84 // The binding descriptions are filled in at draw time.
85 mCurrentVertexInputState.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
86 mCurrentVertexInputState.pNext = nullptr;
87 mCurrentVertexInputState.flags = 0;
88 mCurrentVertexInputState.vertexBindingDescriptionCount = 0;
89 mCurrentVertexInputState.pVertexBindingDescriptions = nullptr;
90 mCurrentVertexInputState.vertexAttributeDescriptionCount = 0;
91 mCurrentVertexInputState.pVertexAttributeDescriptions = nullptr;
92
93 // Primitive topology is filled in at draw time.
94 mCurrentInputAssemblyState.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
95 mCurrentInputAssemblyState.pNext = nullptr;
96 mCurrentInputAssemblyState.flags = 0;
97 mCurrentInputAssemblyState.topology = gl_vk::GetPrimitiveTopology(mCurrentDrawMode);
98 mCurrentInputAssemblyState.primitiveRestartEnable = VK_FALSE;
99
100 // Set initial viewport and scissor state.
101 mCurrentViewportVk.x = 0.0f;
102 mCurrentViewportVk.y = 0.0f;
103 mCurrentViewportVk.width = 0.0f;
104 mCurrentViewportVk.height = 0.0f;
105 mCurrentViewportVk.minDepth = 0.0f;
106 mCurrentViewportVk.maxDepth = 1.0f;
107
108 mCurrentScissorVk.offset.x = 0;
109 mCurrentScissorVk.offset.y = 0;
110 mCurrentScissorVk.extent.width = 0u;
111 mCurrentScissorVk.extent.height = 0u;
112
113 mCurrentViewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
114 mCurrentViewportState.pNext = nullptr;
115 mCurrentViewportState.flags = 0;
116 mCurrentViewportState.viewportCount = 1;
117 mCurrentViewportState.pViewports = &mCurrentViewportVk;
118 mCurrentViewportState.scissorCount = 1;
119 mCurrentViewportState.pScissors = &mCurrentScissorVk;
120
121 // Set initial rasterizer state.
122 // TODO(jmadill): Extra rasterizer state features.
123 mCurrentRasterState.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
124 mCurrentRasterState.pNext = nullptr;
125 mCurrentRasterState.flags = 0;
126 mCurrentRasterState.depthClampEnable = VK_FALSE;
127 mCurrentRasterState.rasterizerDiscardEnable = VK_FALSE;
128 mCurrentRasterState.polygonMode = VK_POLYGON_MODE_FILL;
129 mCurrentRasterState.cullMode = VK_CULL_MODE_NONE;
130 mCurrentRasterState.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
131 mCurrentRasterState.depthBiasEnable = VK_FALSE;
132 mCurrentRasterState.depthBiasConstantFactor = 0.0f;
133 mCurrentRasterState.depthBiasClamp = 0.0f;
134 mCurrentRasterState.depthBiasSlopeFactor = 0.0f;
135 mCurrentRasterState.lineWidth = 1.0f;
136
137 // Initialize a dummy multisample state.
138 // TODO(jmadill): Multisample state.
139 mCurrentMultisampleState.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
140 mCurrentMultisampleState.pNext = nullptr;
141 mCurrentMultisampleState.flags = 0;
142 mCurrentMultisampleState.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
143 mCurrentMultisampleState.sampleShadingEnable = VK_FALSE;
144 mCurrentMultisampleState.minSampleShading = 0.0f;
145 mCurrentMultisampleState.pSampleMask = nullptr;
146 mCurrentMultisampleState.alphaToCoverageEnable = VK_FALSE;
147 mCurrentMultisampleState.alphaToOneEnable = VK_FALSE;
148
149 // TODO(jmadill): Depth/stencil state.
150
151 // Initialize a dummy MRT blend state.
152 // TODO(jmadill): Blend state/MRT.
153 mCurrentBlendAttachmentState.blendEnable = VK_FALSE;
154 mCurrentBlendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_ONE;
155 mCurrentBlendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ONE;
156 mCurrentBlendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD;
157 mCurrentBlendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
158 mCurrentBlendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
159 mCurrentBlendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD;
160 mCurrentBlendAttachmentState.colorWriteMask =
161 (VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT |
162 VK_COLOR_COMPONENT_A_BIT);
163
164 mCurrentBlendState.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
165 mCurrentBlendState.pNext = 0;
166 mCurrentBlendState.flags = 0;
167 mCurrentBlendState.logicOpEnable = VK_FALSE;
168 mCurrentBlendState.logicOp = VK_LOGIC_OP_CLEAR;
169 mCurrentBlendState.attachmentCount = 1;
170 mCurrentBlendState.pAttachments = &mCurrentBlendAttachmentState;
171 mCurrentBlendState.blendConstants[0] = 0.0f;
172 mCurrentBlendState.blendConstants[1] = 0.0f;
173 mCurrentBlendState.blendConstants[2] = 0.0f;
174 mCurrentBlendState.blendConstants[3] = 0.0f;
175
176 // TODO(jmadill): Dynamic state.
177
178 // The layout and renderpass are filled out at draw time.
179 mCurrentPipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
180 mCurrentPipelineInfo.pNext = nullptr;
181 mCurrentPipelineInfo.flags = 0;
182 mCurrentPipelineInfo.stageCount = 2;
183 mCurrentPipelineInfo.pStages = mCurrentShaderStages;
184 mCurrentPipelineInfo.pVertexInputState = &mCurrentVertexInputState;
185 mCurrentPipelineInfo.pInputAssemblyState = &mCurrentInputAssemblyState;
186 mCurrentPipelineInfo.pTessellationState = nullptr;
187 mCurrentPipelineInfo.pViewportState = &mCurrentViewportState;
188 mCurrentPipelineInfo.pRasterizationState = &mCurrentRasterState;
189 mCurrentPipelineInfo.pMultisampleState = &mCurrentMultisampleState;
190 mCurrentPipelineInfo.pDepthStencilState = nullptr;
191 mCurrentPipelineInfo.pColorBlendState = &mCurrentBlendState;
192 mCurrentPipelineInfo.pDynamicState = nullptr;
193 mCurrentPipelineInfo.layout = VK_NULL_HANDLE;
194 mCurrentPipelineInfo.renderPass = VK_NULL_HANDLE;
195 mCurrentPipelineInfo.subpass = 0;
196 mCurrentPipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
197 mCurrentPipelineInfo.basePipelineIndex = 0;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400198}
199
200ContextVk::~ContextVk()
201{
Jamie Madill72106562017-03-24 14:18:50 -0400202 invalidateCurrentPipeline();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400203}
204
Jamie Madill76e471e2017-10-21 09:56:01 -0400205void ContextVk::onDestroy(const gl::Context *context)
206{
207 VkDevice device = mRenderer->getDevice();
208
209 mDescriptorPool.destroy(device);
210}
211
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400212gl::Error ContextVk::initialize()
213{
Jamie Madill76e471e2017-10-21 09:56:01 -0400214 VkDevice device = mRenderer->getDevice();
215
216 VkDescriptorPoolSize poolSizes[2];
217 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
218 poolSizes[UniformBufferPool].descriptorCount = 1024;
219 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
220 poolSizes[TexturePool].descriptorCount = 1024;
221
222 VkDescriptorPoolCreateInfo descriptorPoolInfo;
223 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
224 descriptorPoolInfo.pNext = nullptr;
225 descriptorPoolInfo.flags = 0;
226
227 // TODO(jmadill): Pick non-arbitrary max.
228 descriptorPoolInfo.maxSets = 2048;
229
230 // Reserve pools for uniform blocks and textures.
231 descriptorPoolInfo.poolSizeCount = 2;
232 descriptorPoolInfo.pPoolSizes = poolSizes;
233
234 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
235
Jamie Madille09bd5d2016-11-29 16:20:35 -0500236 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400237}
238
239gl::Error ContextVk::flush()
240{
241 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500242 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400243}
244
245gl::Error ContextVk::finish()
246{
Jamie Madillbd159f02017-10-09 19:39:06 -0400247 // TODO(jmadill): Implement finish.
248 // UNIMPLEMENTED();
249 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400250}
251
Jamie Madill4928b7c2017-06-20 12:57:39 -0400252gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400253{
Jamie Madill72106562017-03-24 14:18:50 -0400254 ASSERT(!mCurrentPipeline.valid());
255
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500256 VkDevice device = mRenderer->getDevice();
257 const auto &state = mState.getState();
258 const auto &programGL = state.getProgram();
259 const auto &vao = state.getVertexArray();
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500260 const auto &programVk = GetImplAs<ProgramVk>(programGL);
261 const auto *drawFBO = state.getDrawFramebuffer();
262 FramebufferVk *vkFBO = GetImplAs<FramebufferVk>(drawFBO);
Jamie Madillebf72992017-10-13 14:09:45 -0400263 VertexArrayVk *vkVAO = GetImplAs<VertexArrayVk>(vao);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500264
Jamie Madillebf72992017-10-13 14:09:45 -0400265 // Ensure the attribs and bindings are updated.
266 vkVAO->updateVertexDescriptions(context);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500267
Jamie Madillebf72992017-10-13 14:09:45 -0400268 const auto &vertexBindings = vkVAO->getVertexBindingDescs();
269 const auto &vertexAttribs = vkVAO->getVertexAttribDescs();
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500270
271 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Jamie Madillebf72992017-10-13 14:09:45 -0400272 mCurrentVertexInputState.vertexBindingDescriptionCount =
273 static_cast<uint32_t>(vertexBindings.size());
274 mCurrentVertexInputState.pVertexBindingDescriptions = vertexBindings.data();
275 mCurrentVertexInputState.vertexAttributeDescriptionCount =
276 static_cast<uint32_t>(vertexAttribs.size());
277 mCurrentVertexInputState.pVertexAttributeDescriptions = vertexAttribs.data();
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500278
Jamie Madillebf72992017-10-13 14:09:45 -0400279 mCurrentInputAssemblyState.topology = gl_vk::GetPrimitiveTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500280
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500281 vk::RenderPass *renderPass = nullptr;
Jamie Madill4928b7c2017-06-20 12:57:39 -0400282 ANGLE_TRY_RESULT(vkFBO->getRenderPass(context, device), renderPass);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500283 ASSERT(renderPass && renderPass->valid());
284
Jamie Madillc5143482017-10-15 20:20:06 -0400285 const vk::PipelineLayout &pipelineLayout = programVk->getPipelineLayout();
286 ASSERT(pipelineLayout.valid());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500287
Jamie Madillc5143482017-10-15 20:20:06 -0400288 mCurrentPipelineInfo.layout = pipelineLayout.getHandle();
Jamie Madillebf72992017-10-13 14:09:45 -0400289 mCurrentPipelineInfo.renderPass = renderPass->getHandle();
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500290
Jamie Madill5deea722017-02-16 10:44:46 -0500291 vk::Pipeline newPipeline;
Jamie Madillebf72992017-10-13 14:09:45 -0400292 ANGLE_TRY(newPipeline.initGraphics(device, mCurrentPipelineInfo));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500293
Jamie Madillebf72992017-10-13 14:09:45 -0400294 // TODO(jmadill): Don't dispose the current pipeline immediately, it could be in use.
Jamie Madill5deea722017-02-16 10:44:46 -0500295 mCurrentPipeline.retain(device, std::move(newPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500296
Jamie Madill72106562017-03-24 14:18:50 -0400297 return gl::NoError();
298}
299
Jamie Madilld03a8492017-10-03 15:46:06 -0400300gl::Error ContextVk::setupDraw(const gl::Context *context, GLenum mode)
Jamie Madill72106562017-03-24 14:18:50 -0400301{
302 if (mode != mCurrentDrawMode)
303 {
304 invalidateCurrentPipeline();
305 mCurrentDrawMode = mode;
306 }
307
308 if (!mCurrentPipeline.valid())
309 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400310 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400311 ASSERT(mCurrentPipeline.valid());
312 }
313
314 VkDevice device = mRenderer->getDevice();
315 const auto &state = mState.getState();
316 const auto &programGL = state.getProgram();
Jamie Madill76e471e2017-10-21 09:56:01 -0400317 ProgramVk *programVk = GetImplAs<ProgramVk>(programGL);
Jamie Madill72106562017-03-24 14:18:50 -0400318 const auto &vao = state.getVertexArray();
Jamie Madillbd159f02017-10-09 19:39:06 -0400319 VertexArrayVk *vkVAO = GetImplAs<VertexArrayVk>(vao);
Jamie Madill72106562017-03-24 14:18:50 -0400320 const auto *drawFBO = state.getDrawFramebuffer();
321 FramebufferVk *vkFBO = GetImplAs<FramebufferVk>(drawFBO);
322 Serial queueSerial = mRenderer->getCurrentQueueSerial();
Jamie Madillbd159f02017-10-09 19:39:06 -0400323 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400324
Jamie Madillbd159f02017-10-09 19:39:06 -0400325 // Process vertex attributes. Assume zero offsets for now.
326 // TODO(jmadill): Offset handling.
327 const std::vector<VkBuffer> &vertexHandles = vkVAO->getCurrentVertexBufferHandlesCache();
328 angle::MemoryBuffer *zeroBuf = nullptr;
329 ANGLE_TRY(context->getZeroFilledBuffer(maxAttrib * sizeof(VkDeviceSize), &zeroBuf));
Jamie Madill72106562017-03-24 14:18:50 -0400330
Jamie Madill0c0dc342017-03-24 14:18:51 -0400331 vk::CommandBuffer *commandBuffer = nullptr;
332 ANGLE_TRY(mRenderer->getStartedCommandBuffer(&commandBuffer));
Jamie Madilld4826152017-09-21 11:18:59 -0400333 ANGLE_TRY(vkFBO->ensureInRenderPass(context, device, commandBuffer, queueSerial, state));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500334
335 commandBuffer->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline);
Jamie Madillbd159f02017-10-09 19:39:06 -0400336 commandBuffer->bindVertexBuffers(0, maxAttrib, vertexHandles.data(),
337 reinterpret_cast<const VkDeviceSize *>(zeroBuf->data()));
338
Jamie Madillabd31352017-09-19 00:24:58 -0400339 // TODO(jmadill): the queue serial should be bound to the pipeline.
340 setQueueSerial(queueSerial);
Jamie Madillbd159f02017-10-09 19:39:06 -0400341 vkVAO->updateCurrentBufferSerials(programGL->getActiveAttribLocationsMask(), queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500342
Jamie Madill76e471e2017-10-21 09:56:01 -0400343 // TODO(jmadill): Can probably use more dirty bits here.
344 ContextVk *contextVk = GetImplAs<ContextVk>(context);
345 ANGLE_TRY(programVk->updateUniforms(contextVk));
346
347 // Bind the graphics descriptor sets.
348 // TODO(jmadill): Handle multiple command buffers.
349 VkDescriptorSet uniformDescriptorSet = programVk->getDescriptorSet();
350 if (uniformDescriptorSet != VK_NULL_HANDLE)
351 {
352 const vk::PipelineLayout &pipelineLayout = programVk->getPipelineLayout();
353 commandBuffer->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1,
354 &uniformDescriptorSet, 0, nullptr);
355 }
356
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500357 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400358}
359
Jamie Madilld03a8492017-10-03 15:46:06 -0400360gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
361{
362 ANGLE_TRY(setupDraw(context, mode));
363
364 vk::CommandBuffer *commandBuffer = nullptr;
365 ANGLE_TRY(mRenderer->getStartedCommandBuffer(&commandBuffer));
366
367 commandBuffer->draw(count, 1, first, 0);
368 return gl::NoError();
369}
370
Jamie Madillc564c072017-06-01 12:45:42 -0400371gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
372 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400373 GLint first,
374 GLsizei count,
375 GLsizei instanceCount)
376{
377 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500378 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400379}
380
Jamie Madillc564c072017-06-01 12:45:42 -0400381gl::Error ContextVk::drawElements(const gl::Context *context,
382 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400383 GLsizei count,
384 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800385 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400386{
Jamie Madilld03a8492017-10-03 15:46:06 -0400387 ANGLE_TRY(setupDraw(context, mode));
388
389 if (indices)
390 {
391 // TODO(jmadill): Buffer offsets and immediate data.
392 UNIMPLEMENTED();
393 return gl::InternalError() << "Only zero-offset index buffers are currently implemented.";
394 }
395
396 if (type == GL_UNSIGNED_BYTE)
397 {
398 // TODO(jmadill): Index translation.
399 UNIMPLEMENTED();
400 return gl::InternalError() << "Unsigned byte translation is not yet implemented.";
401 }
402
403 vk::CommandBuffer *commandBuffer = nullptr;
404 ANGLE_TRY(mRenderer->getStartedCommandBuffer(&commandBuffer));
405
406 const gl::Buffer *elementArrayBuffer =
407 mState.getState().getVertexArray()->getElementArrayBuffer().get();
408 ASSERT(elementArrayBuffer);
409
410 BufferVk *elementArrayBufferVk = GetImplAs<BufferVk>(elementArrayBuffer);
411
412 commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type));
413 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
414
415 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400416}
417
Jamie Madillc564c072017-06-01 12:45:42 -0400418gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
419 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400420 GLsizei count,
421 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400422 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800423 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400424{
425 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500426 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400427}
428
Jamie Madillc564c072017-06-01 12:45:42 -0400429gl::Error ContextVk::drawRangeElements(const gl::Context *context,
430 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400431 GLuint start,
432 GLuint end,
433 GLsizei count,
434 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800435 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400436{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500437 return gl::NoError();
438}
439
440VkDevice ContextVk::getDevice() const
441{
442 return mRenderer->getDevice();
443}
444
Jamie Madill0c0dc342017-03-24 14:18:51 -0400445vk::Error ContextVk::getStartedCommandBuffer(vk::CommandBuffer **commandBufferOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500446{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400447 return mRenderer->getStartedCommandBuffer(commandBufferOut);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500448}
449
Jamie Madill0c0dc342017-03-24 14:18:51 -0400450vk::Error ContextVk::submitCommands(vk::CommandBuffer *commandBuffer)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500451{
Jamie Madill72106562017-03-24 14:18:50 -0400452 setQueueSerial(mRenderer->getCurrentQueueSerial());
Jamie Madillf651c772017-02-21 15:03:51 -0500453 ANGLE_TRY(mRenderer->submitCommandBuffer(commandBuffer));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500454 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400455}
456
Jamie Madillc564c072017-06-01 12:45:42 -0400457gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
458 GLenum mode,
459 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800460{
461 UNIMPLEMENTED();
462 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
463}
464
Jamie Madillc564c072017-06-01 12:45:42 -0400465gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
466 GLenum mode,
467 GLenum type,
468 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800469{
470 UNIMPLEMENTED();
471 return gl::InternalError()
472 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
473}
474
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400475GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400476{
477 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400478 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400479}
480
481std::string ContextVk::getVendorString() const
482{
483 UNIMPLEMENTED();
484 return std::string();
485}
486
487std::string ContextVk::getRendererDescription() const
488{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500489 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400490}
491
492void ContextVk::insertEventMarker(GLsizei length, const char *marker)
493{
494 UNIMPLEMENTED();
495}
496
497void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
498{
499 UNIMPLEMENTED();
500}
501
502void ContextVk::popGroupMarker()
503{
504 UNIMPLEMENTED();
505}
506
Jamie Madillfe548342017-06-19 11:13:24 -0400507void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400508{
Jamie Madill72106562017-03-24 14:18:50 -0400509 if (dirtyBits.any())
510 {
511 invalidateCurrentPipeline();
512 }
Jamie Madillebf72992017-10-13 14:09:45 -0400513
514 const auto &glState = context->getGLState();
515
516 // TODO(jmadill): Full dirty bits implementation.
517
518 for (auto dirtyBit : dirtyBits)
519 {
520 switch (dirtyBit)
521 {
522 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
523 WARN() << "DIRTY_BIT_SCISSOR_TEST_ENABLED unimplemented";
524 break;
525 case gl::State::DIRTY_BIT_SCISSOR:
526 WARN() << "DIRTY_BIT_SCISSOR unimplemented";
527 break;
528 case gl::State::DIRTY_BIT_VIEWPORT:
529 {
530 const gl::Rectangle &viewportGL = glState.getViewport();
531 mCurrentViewportVk.x = static_cast<float>(viewportGL.x);
532 mCurrentViewportVk.y = static_cast<float>(viewportGL.y);
533 mCurrentViewportVk.width = static_cast<float>(viewportGL.width);
534 mCurrentViewportVk.height = static_cast<float>(viewportGL.height);
535 mCurrentViewportVk.minDepth = glState.getNearPlane();
536 mCurrentViewportVk.maxDepth = glState.getFarPlane();
537
538 // TODO(jmadill): Scissor.
539 mCurrentScissorVk.offset.x = viewportGL.x;
540 mCurrentScissorVk.offset.y = viewportGL.y;
541 mCurrentScissorVk.extent.width = viewportGL.width;
542 mCurrentScissorVk.extent.height = viewportGL.height;
543 break;
544 }
545 case gl::State::DIRTY_BIT_DEPTH_RANGE:
546 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
547 break;
548 case gl::State::DIRTY_BIT_BLEND_ENABLED:
549 WARN() << "DIRTY_BIT_BLEND_ENABLED unimplemented";
550 break;
551 case gl::State::DIRTY_BIT_BLEND_COLOR:
552 WARN() << "DIRTY_BIT_BLEND_COLOR unimplemented";
553 break;
554 case gl::State::DIRTY_BIT_BLEND_FUNCS:
555 WARN() << "DIRTY_BIT_BLEND_FUNCS unimplemented";
556 break;
557 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
558 WARN() << "DIRTY_BIT_BLEND_EQUATIONS unimplemented";
559 break;
560 case gl::State::DIRTY_BIT_COLOR_MASK:
561 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
562 break;
563 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
564 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
565 break;
566 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
567 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
568 break;
569 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
570 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
571 break;
572 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
573 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
574 break;
575 case gl::State::DIRTY_BIT_SAMPLE_MASK_WORD_0:
576 case gl::State::DIRTY_BIT_SAMPLE_MASK_WORD_0 + 1:
577 WARN() << "DIRTY_BIT_SAMPLE_MASK_WORD unimplemented";
578 break;
579 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
580 WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented";
581 break;
582 case gl::State::DIRTY_BIT_DEPTH_FUNC:
583 WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented";
584 break;
585 case gl::State::DIRTY_BIT_DEPTH_MASK:
586 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
587 break;
588 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
589 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
590 break;
591 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
592 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
593 break;
594 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
595 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
596 break;
597 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
598 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
599 break;
600 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
601 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
602 break;
603 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
604 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
605 break;
606 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
607 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
608 break;
609 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
610 case gl::State::DIRTY_BIT_CULL_FACE:
611 mCurrentRasterState.cullMode = gl_vk::GetCullMode(glState.getRasterizerState());
612 break;
613 case gl::State::DIRTY_BIT_FRONT_FACE:
614 mCurrentRasterState.frontFace =
615 gl_vk::GetFrontFace(glState.getRasterizerState().frontFace);
616 break;
617 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
618 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
619 break;
620 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
621 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
622 break;
623 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
624 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
625 break;
626 case gl::State::DIRTY_BIT_LINE_WIDTH:
627 mCurrentRasterState.lineWidth = glState.getLineWidth();
628 break;
629 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
630 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
631 break;
632 case gl::State::DIRTY_BIT_CLEAR_COLOR:
633 WARN() << "DIRTY_BIT_CLEAR_COLOR unimplemented";
634 break;
635 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
636 WARN() << "DIRTY_BIT_CLEAR_DEPTH unimplemented";
637 break;
638 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
639 WARN() << "DIRTY_BIT_CLEAR_STENCIL unimplemented";
640 break;
641 case gl::State::DIRTY_BIT_UNPACK_ALIGNMENT:
642 WARN() << "DIRTY_BIT_UNPACK_ALIGNMENT unimplemented";
643 break;
644 case gl::State::DIRTY_BIT_UNPACK_ROW_LENGTH:
645 WARN() << "DIRTY_BIT_UNPACK_ROW_LENGTH unimplemented";
646 break;
647 case gl::State::DIRTY_BIT_UNPACK_IMAGE_HEIGHT:
648 WARN() << "DIRTY_BIT_UNPACK_IMAGE_HEIGHT unimplemented";
649 break;
650 case gl::State::DIRTY_BIT_UNPACK_SKIP_IMAGES:
651 WARN() << "DIRTY_BIT_UNPACK_SKIP_IMAGES unimplemented";
652 break;
653 case gl::State::DIRTY_BIT_UNPACK_SKIP_ROWS:
654 WARN() << "DIRTY_BIT_UNPACK_SKIP_ROWS unimplemented";
655 break;
656 case gl::State::DIRTY_BIT_UNPACK_SKIP_PIXELS:
657 WARN() << "DIRTY_BIT_UNPACK_SKIP_PIXELS unimplemented";
658 break;
659 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
660 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
661 break;
662 case gl::State::DIRTY_BIT_PACK_ALIGNMENT:
663 WARN() << "DIRTY_BIT_PACK_ALIGNMENT unimplemented";
664 break;
665 case gl::State::DIRTY_BIT_PACK_REVERSE_ROW_ORDER:
666 WARN() << "DIRTY_BIT_PACK_REVERSE_ROW_ORDER unimplemented";
667 break;
668 case gl::State::DIRTY_BIT_PACK_ROW_LENGTH:
669 WARN() << "DIRTY_BIT_PACK_ROW_LENGTH unimplemented";
670 break;
671 case gl::State::DIRTY_BIT_PACK_SKIP_ROWS:
672 WARN() << "DIRTY_BIT_PACK_SKIP_ROWS unimplemented";
673 break;
674 case gl::State::DIRTY_BIT_PACK_SKIP_PIXELS:
675 WARN() << "DIRTY_BIT_PACK_SKIP_PIXELS unimplemented";
676 break;
677 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
678 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
679 break;
680 case gl::State::DIRTY_BIT_DITHER_ENABLED:
681 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
682 break;
683 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
684 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
685 break;
686 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
687 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
688 break;
689 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
690 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
691 break;
692 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
693 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
694 break;
695 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
696 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
697 break;
698 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
699 WARN() << "DIRTY_BIT_VERTEX_ARRAY_BINDING unimplemented";
700 break;
701 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
702 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
703 break;
704 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
705 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
706 break;
707 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
708 {
709 // { vertex, fragment }
710 ProgramVk *programVk = GetImplAs<ProgramVk>(glState.getProgram());
711 mCurrentShaderStages[0].module = programVk->getLinkedVertexModule().getHandle();
712 mCurrentShaderStages[1].module = programVk->getLinkedFragmentModule().getHandle();
713
714 // Also invalidate the vertex descriptions cache in the Vertex Array.
715 VertexArrayVk *vaoVk = GetImplAs<VertexArrayVk>(glState.getVertexArray());
716 vaoVk->invalidateVertexDescriptions();
717 break;
718 }
719 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
720 WARN() << "DIRTY_BIT_TEXTURE_BINDINGS unimplemented";
721 break;
722 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
723 WARN() << "DIRTY_BIT_SAMPLER_BINDINGS unimplemented";
724 break;
725 case gl::State::DIRTY_BIT_MULTISAMPLING:
726 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
727 break;
728 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
729 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
730 break;
731 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
732 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
733 break;
734 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
735 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
736 break;
737 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
738 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
739 break;
740 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
741 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
742 break;
743 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
744 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
745 break;
746 default:
747 if (dirtyBit >= gl::State::DIRTY_BIT_CURRENT_VALUE_0 &&
748 dirtyBit < gl::State::DIRTY_BIT_CURRENT_VALUE_MAX)
749 {
750 WARN() << "DIRTY_BIT_CURRENT_VALUE unimplemented";
751 }
752 else
753 {
754 UNREACHABLE();
755 }
756 break;
757 }
758 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400759}
760
761GLint ContextVk::getGPUDisjoint()
762{
763 UNIMPLEMENTED();
764 return GLint();
765}
766
767GLint64 ContextVk::getTimestamp()
768{
769 UNIMPLEMENTED();
770 return GLint64();
771}
772
Jamie Madill4928b7c2017-06-20 12:57:39 -0400773void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400774{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400775}
776
777const gl::Caps &ContextVk::getNativeCaps() const
778{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400779 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400780}
781
782const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
783{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400784 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400785}
786
787const gl::Extensions &ContextVk::getNativeExtensions() const
788{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400789 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400790}
791
792const gl::Limitations &ContextVk::getNativeLimitations() const
793{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400794 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400795}
796
797CompilerImpl *ContextVk::createCompiler()
798{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400799 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400800}
801
Jamie Madillacccc6c2016-05-03 17:22:10 -0400802ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400803{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400804 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400805}
806
Jamie Madillacccc6c2016-05-03 17:22:10 -0400807ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400808{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400809 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400810}
811
Jamie Madillacccc6c2016-05-03 17:22:10 -0400812FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400813{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500814 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400815}
816
817TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
818{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400819 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400820}
821
822RenderbufferImpl *ContextVk::createRenderbuffer()
823{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400824 return new RenderbufferVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400825}
826
Jamie Madill8f775602016-11-03 16:45:34 -0400827BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400828{
Jamie Madill8f775602016-11-03 16:45:34 -0400829 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400830}
831
Jamie Madillacccc6c2016-05-03 17:22:10 -0400832VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400833{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400834 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400835}
836
837QueryImpl *ContextVk::createQuery(GLenum type)
838{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400839 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400840}
841
842FenceNVImpl *ContextVk::createFenceNV()
843{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400844 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400845}
846
Jamie Madill70b5bb02017-08-28 13:32:37 -0400847SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400848{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400849 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400850}
851
Geoff Lang73bd2182016-07-15 13:01:24 -0400852TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400853{
Geoff Lang73bd2182016-07-15 13:01:24 -0400854 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400855}
856
Jamie Madill06ef36b2017-09-09 23:32:46 -0400857SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400858{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400859 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400860}
861
Yunchao Hea336b902017-08-02 16:05:21 +0800862ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
863{
864 return new ProgramPipelineVk(state);
865}
866
Sami Väisänene45e53b2016-05-25 10:36:04 +0300867std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
868{
869 return std::vector<PathImpl *>();
870}
871
Jamie Madill72106562017-03-24 14:18:50 -0400872// TODO(jmadill): Use pipeline cache.
873void ContextVk::invalidateCurrentPipeline()
874{
875 mRenderer->enqueueGarbageOrDeleteNow(*this, mCurrentPipeline);
876}
877
Jamie Madillfe548342017-06-19 11:13:24 -0400878gl::Error ContextVk::dispatchCompute(const gl::Context *context,
879 GLuint numGroupsX,
880 GLuint numGroupsY,
881 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800882{
883 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500884 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800885}
886
Jamie Madill76e471e2017-10-21 09:56:01 -0400887vk::DescriptorPool *ContextVk::getDescriptorPool()
888{
889 return &mDescriptorPool;
890}
891
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400892} // namespace rx