blob: 90ed697f5eaf70ee8bc0328d9fc0cd33eeb9fd23 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040014#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050015#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040016#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050017#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/CompilerVk.h"
19#include "libANGLE/renderer/vulkan/ContextVk.h"
20#include "libANGLE/renderer/vulkan/DeviceVk.h"
21#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
23#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080024#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040025#include "libANGLE/renderer/vulkan/ProgramVk.h"
26#include "libANGLE/renderer/vulkan/QueryVk.h"
27#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
28#include "libANGLE/renderer/vulkan/RendererVk.h"
29#include "libANGLE/renderer/vulkan/SamplerVk.h"
30#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040031#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040032#include "libANGLE/renderer/vulkan/TextureVk.h"
33#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
34#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050035#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040036
37namespace rx
38{
39
Jamie Madilld03a8492017-10-03 15:46:06 -040040namespace
41{
42
43VkIndexType GetVkIndexType(GLenum glIndexType)
44{
45 switch (glIndexType)
46 {
47 case GL_UNSIGNED_SHORT:
48 return VK_INDEX_TYPE_UINT16;
49 case GL_UNSIGNED_INT:
50 return VK_INDEX_TYPE_UINT32;
51 default:
52 UNREACHABLE();
53 return VK_INDEX_TYPE_MAX_ENUM;
54 }
55}
56
Jamie Madill76e471e2017-10-21 09:56:01 -040057enum DescriptorPoolIndex : uint8_t
58{
59 UniformBufferPool = 0,
60 TexturePool = 1,
61};
62
Jamie Madilld03a8492017-10-03 15:46:06 -040063} // anonymous namespace
64
Jamie Madillacccc6c2016-05-03 17:22:10 -040065ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050066 : ContextImpl(state),
67 mRenderer(renderer),
68 mCurrentDrawMode(GL_NONE),
69 mVertexArrayDirty(false),
Frank Henigman17448952017-01-05 15:48:26 -050070 mTexturesDirty(false),
71 mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, 1024 * 1024)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040072{
Jamie Madillf4d693c2018-02-14 16:38:16 -050073 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
74 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040075}
76
77ContextVk::~ContextVk()
78{
79}
80
Jamie Madill76e471e2017-10-21 09:56:01 -040081void ContextVk::onDestroy(const gl::Context *context)
82{
83 VkDevice device = mRenderer->getDevice();
84
85 mDescriptorPool.destroy(device);
Frank Henigman17448952017-01-05 15:48:26 -050086 mStreamingVertexData.destroy(device);
Luc Ferron360098d2018-02-21 07:33:50 -050087 mLineLoopHandler.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -040088}
89
Jamie Madill9e54b5a2016-05-25 12:57:39 -040090gl::Error ContextVk::initialize()
91{
Jamie Madill76e471e2017-10-21 09:56:01 -040092 VkDevice device = mRenderer->getDevice();
93
94 VkDescriptorPoolSize poolSizes[2];
95 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
96 poolSizes[UniformBufferPool].descriptorCount = 1024;
97 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
98 poolSizes[TexturePool].descriptorCount = 1024;
99
100 VkDescriptorPoolCreateInfo descriptorPoolInfo;
101 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
102 descriptorPoolInfo.pNext = nullptr;
103 descriptorPoolInfo.flags = 0;
104
105 // TODO(jmadill): Pick non-arbitrary max.
106 descriptorPoolInfo.maxSets = 2048;
107
108 // Reserve pools for uniform blocks and textures.
109 descriptorPoolInfo.poolSizeCount = 2;
110 descriptorPoolInfo.pPoolSizes = poolSizes;
111
112 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
113
Jamie Madillf2f6d372018-01-10 21:37:23 -0500114 mPipelineDesc.reset(new vk::PipelineDesc());
115 mPipelineDesc->initDefaults();
116
Jamie Madille09bd5d2016-11-29 16:20:35 -0500117 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400118}
119
Jamie Madillafa02a22017-11-23 12:57:38 -0500120gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400121{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500122 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400123 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500124 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400125}
126
Jamie Madillafa02a22017-11-23 12:57:38 -0500127gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400128{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500129 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400130}
131
Jamie Madill4928b7c2017-06-20 12:57:39 -0400132gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400133{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500134 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400135
Jamie Madillf2f6d372018-01-10 21:37:23 -0500136 const gl::State &state = mState.getState();
137 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
138 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
139 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500140 const gl::AttributesMask activeAttribLocationsMask =
141 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500142
143 // Ensure the topology of the pipeline description is updated.
144 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500145
Jamie Madill112a3a82018-01-23 13:04:06 -0500146 // Copy over the latest attrib and binding descriptions.
147 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500148
Jamie Madillf2f6d372018-01-10 21:37:23 -0500149 // Ensure that the RenderPass description is updated.
150 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500151
152 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500153 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
154 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500155
Jamie Madill72106562017-03-24 14:18:50 -0400156 return gl::NoError();
157}
158
Jamie Madill49ac74b2017-12-21 14:42:33 -0500159gl::Error ContextVk::setupDraw(const gl::Context *context,
160 GLenum mode,
161 DrawType drawType,
Frank Henigman17448952017-01-05 15:48:26 -0500162 int firstVertex,
163 int lastVertex,
Luc Ferron78e39b32018-02-26 07:42:44 -0500164 ResourceVk *elementArrayBufferOverride,
Jamie Madill49ac74b2017-12-21 14:42:33 -0500165 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400166{
167 if (mode != mCurrentDrawMode)
168 {
169 invalidateCurrentPipeline();
170 mCurrentDrawMode = mode;
171 }
172
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500173 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400174 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400175 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400176 }
177
Frank Henigman17448952017-01-05 15:48:26 -0500178 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500179 const gl::Program *programGL = state.getProgram();
Frank Henigman17448952017-01-05 15:48:26 -0500180 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500181 const gl::VertexArray *vao = state.getVertexArray();
Frank Henigman17448952017-01-05 15:48:26 -0500182 VertexArrayVk *vkVAO = vk::GetImpl(vao);
183 const auto *drawFBO = state.getDrawFramebuffer();
184 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Luc Ferronf8be7562018-02-06 15:59:11 -0500185 Serial queueSerial = mRenderer->getCurrentQueueSerial();
186 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400187
Jamie Madill1f46bc12018-02-20 16:09:43 -0500188 vk::CommandGraphNode *renderNode = nullptr;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500189 ANGLE_TRY(vkFBO->getRenderNode(context, &renderNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500190
Jamie Madill49ac74b2017-12-21 14:42:33 -0500191 if (!renderNode->getInsideRenderPassCommands()->valid())
192 {
193 mVertexArrayDirty = true;
194 mTexturesDirty = true;
Jamie Madill1f46bc12018-02-20 16:09:43 -0500195 ANGLE_TRY(renderNode->beginInsideRenderPassRecording(mRenderer, commandBuffer));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500196 }
197 else
198 {
199 *commandBuffer = renderNode->getInsideRenderPassCommands();
200 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400201
Jamie Madill49ac74b2017-12-21 14:42:33 -0500202 // Ensure any writes to the VAO buffers are flushed before we read from them.
Luc Ferron78e39b32018-02-26 07:42:44 -0500203 if (mVertexArrayDirty || elementArrayBufferOverride != nullptr)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500204 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500205
Jamie Madill49ac74b2017-12-21 14:42:33 -0500206 mVertexArrayDirty = false;
207 vkVAO->updateDrawDependencies(renderNode, programGL->getActiveAttribLocationsMask(),
Luc Ferron78e39b32018-02-26 07:42:44 -0500208 elementArrayBufferOverride, queueSerial, drawType);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500209 }
210
211 // Ensure any writes to the textures are flushed before we read from them.
212 if (mTexturesDirty)
213 {
214 mTexturesDirty = false;
215 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
216 const auto &completeTextures = state.getCompleteTextureCache();
217 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
218 {
219 ASSERT(!samplerBinding.unreferenced);
220
221 // TODO(jmadill): Sampler arrays
222 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
223
224 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
225 const gl::Texture *texture = completeTextures[textureUnit];
226
227 // TODO(jmadill): Incomplete textures handling.
228 ASSERT(texture);
229
230 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill0e654542018-02-07 14:50:06 -0500231 textureVk->onReadResource(renderNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500232 }
233 }
234
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500235 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Frank Henigman17448952017-01-05 15:48:26 -0500236 ContextVk *contextVk = vk::GetImpl(context);
237 ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500238 (*commandBuffer)
Frank Henigman17448952017-01-05 15:48:26 -0500239 ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(),
240 vkVAO->getCurrentArrayBufferOffsets().data());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500241
242 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500243 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
244 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500245
Jamie Madill76e471e2017-10-21 09:56:01 -0400246 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500247 ANGLE_TRY(programVk->updateUniforms(this));
248 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400249
250 // Bind the graphics descriptor sets.
251 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400252 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500253 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
254 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400255 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500256 ASSERT(!descriptorSets.empty());
257 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500258 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500259 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
260 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400261 }
262
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500263 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400264}
265
Jamie Madilld03a8492017-10-03 15:46:06 -0400266gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
267{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500268 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferron78e39b32018-02-26 07:42:44 -0500269 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, nullptr,
270 &commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500271
272 if (mode == GL_LINE_LOOP)
273 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500274 ANGLE_TRY(mLineLoopHandler.createIndexBuffer(this, first, count));
275 mLineLoopHandler.bindIndexBuffer(VK_INDEX_TYPE_UINT32, &commandBuffer);
276 ANGLE_TRY(mLineLoopHandler.draw(count, commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500277 }
278 else
279 {
280 commandBuffer->draw(count, 1, first, 0);
281 }
282
Jamie Madilld03a8492017-10-03 15:46:06 -0400283 return gl::NoError();
284}
285
Jamie Madillc564c072017-06-01 12:45:42 -0400286gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
287 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400288 GLint first,
289 GLsizei count,
290 GLsizei instanceCount)
291{
292 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500293 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400294}
295
Jamie Madillc564c072017-06-01 12:45:42 -0400296gl::Error ContextVk::drawElements(const gl::Context *context,
297 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400298 GLsizei count,
299 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800300 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400301{
Jamie Madilld03a8492017-10-03 15:46:06 -0400302 const gl::Buffer *elementArrayBuffer =
303 mState.getState().getVertexArray()->getElementArrayBuffer().get();
304 ASSERT(elementArrayBuffer);
305
Luc Ferron78e39b32018-02-26 07:42:44 -0500306 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
307 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madilld03a8492017-10-03 15:46:06 -0400308
Luc Ferron78e39b32018-02-26 07:42:44 -0500309 if (mode == GL_LINE_LOOP)
310 {
311 ANGLE_TRY(mLineLoopHandler.createIndexBufferFromElementArrayBuffer(
312 this, elementArrayBufferVk, GetVkIndexType(type), count));
313
314 // TODO(fjhenigman): calculate the index range and pass to setupDraw()
315 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0,
316 mLineLoopHandler.getLineLoopBufferResource(), &commandBuffer));
317
318 mLineLoopHandler.bindIndexBuffer(GetVkIndexType(type), &commandBuffer);
319 commandBuffer->drawIndexed(count + 1, 1, 0, 0, 0);
320 }
321 else
322 {
323 // TODO(fjhenigman): calculate the index range and pass to setupDraw()
324 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0, nullptr, &commandBuffer));
325
326 if (indices)
327 {
328 // TODO(jmadill): Buffer offsets and immediate data.
329 UNIMPLEMENTED();
330 return gl::InternalError()
331 << "Only zero-offset index buffers are currently implemented.";
332 }
333
334 if (type == GL_UNSIGNED_BYTE)
335 {
336 // TODO(jmadill): Index translation.
337 UNIMPLEMENTED();
338 return gl::InternalError() << "Unsigned byte translation is not yet implemented.";
339 }
340
341 commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0,
342 GetVkIndexType(type));
343 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
344 }
Jamie Madilld03a8492017-10-03 15:46:06 -0400345
346 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400347}
348
Jamie Madillc564c072017-06-01 12:45:42 -0400349gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
350 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400351 GLsizei count,
352 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400353 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800354 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400355{
356 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500357 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400358}
359
Jamie Madillc564c072017-06-01 12:45:42 -0400360gl::Error ContextVk::drawRangeElements(const gl::Context *context,
361 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400362 GLuint start,
363 GLuint end,
364 GLsizei count,
365 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800366 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400367{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500368 return gl::NoError();
369}
370
371VkDevice ContextVk::getDevice() const
372{
373 return mRenderer->getDevice();
374}
375
Jamie Madillc564c072017-06-01 12:45:42 -0400376gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
377 GLenum mode,
378 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800379{
380 UNIMPLEMENTED();
381 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
382}
383
Jamie Madillc564c072017-06-01 12:45:42 -0400384gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
385 GLenum mode,
386 GLenum type,
387 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800388{
389 UNIMPLEMENTED();
390 return gl::InternalError()
391 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
392}
393
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400394GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400395{
396 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400397 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400398}
399
400std::string ContextVk::getVendorString() const
401{
402 UNIMPLEMENTED();
403 return std::string();
404}
405
406std::string ContextVk::getRendererDescription() const
407{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500408 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400409}
410
411void ContextVk::insertEventMarker(GLsizei length, const char *marker)
412{
413 UNIMPLEMENTED();
414}
415
416void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
417{
418 UNIMPLEMENTED();
419}
420
421void ContextVk::popGroupMarker()
422{
423 UNIMPLEMENTED();
424}
425
Geoff Lang5d5253a2017-11-22 14:51:12 -0500426void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
427{
428 UNIMPLEMENTED();
429}
430
431void ContextVk::popDebugGroup()
432{
433 UNIMPLEMENTED();
434}
435
Jamie Madillfe548342017-06-19 11:13:24 -0400436void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400437{
Jamie Madill72106562017-03-24 14:18:50 -0400438 if (dirtyBits.any())
439 {
440 invalidateCurrentPipeline();
441 }
Jamie Madillebf72992017-10-13 14:09:45 -0400442
443 const auto &glState = context->getGLState();
444
445 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400446 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400447
448 for (auto dirtyBit : dirtyBits)
449 {
450 switch (dirtyBit)
451 {
452 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500453 if (glState.isScissorTestEnabled())
454 {
455 mPipelineDesc->updateScissor(glState.getScissor());
456 }
457 else
458 {
459 mPipelineDesc->updateScissor(glState.getViewport());
460 }
Jamie Madillebf72992017-10-13 14:09:45 -0400461 break;
462 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500463 // Only modify the scissor region if the test is enabled, otherwise we want to keep
464 // the viewport size as the scissor region.
465 if (glState.isScissorTestEnabled())
466 {
467 mPipelineDesc->updateScissor(glState.getScissor());
468 }
Jamie Madillebf72992017-10-13 14:09:45 -0400469 break;
470 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500471 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
472 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500473
474 // If the scissor test isn't enabled, we have to also update the scissor to
475 // be equal to the viewport to make sure we keep rendering everything in the
476 // viewport.
477 if (!glState.isScissorTestEnabled())
478 {
479 mPipelineDesc->updateScissor(glState.getViewport());
480 }
Jamie Madillebf72992017-10-13 14:09:45 -0400481 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400482 case gl::State::DIRTY_BIT_DEPTH_RANGE:
483 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
484 break;
485 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500486 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400487 break;
488 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500489 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400490 break;
491 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500492 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400493 break;
494 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500495 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400496 break;
497 case gl::State::DIRTY_BIT_COLOR_MASK:
498 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
499 break;
500 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
501 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
502 break;
503 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
504 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
505 break;
506 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
507 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
508 break;
509 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
510 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
511 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400512 case gl::State::DIRTY_BIT_SAMPLE_MASK:
513 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400514 break;
515 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
516 WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented";
517 break;
518 case gl::State::DIRTY_BIT_DEPTH_FUNC:
519 WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented";
520 break;
521 case gl::State::DIRTY_BIT_DEPTH_MASK:
522 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
523 break;
524 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
525 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
526 break;
527 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
528 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
529 break;
530 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
531 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
532 break;
533 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
534 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
535 break;
536 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
537 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
538 break;
539 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
540 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
541 break;
542 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
543 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
544 break;
545 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
546 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500547 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400548 break;
549 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500550 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400551 break;
552 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
553 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
554 break;
555 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
556 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
557 break;
558 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
559 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
560 break;
561 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500562 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400563 break;
564 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
565 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
566 break;
567 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500568 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
569 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
570 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
571 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400572 break;
573 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500574 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400575 break;
576 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500577 mClearDepthStencilValue.depthStencil.stencil =
578 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400579 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400580 case gl::State::DIRTY_BIT_UNPACK_STATE:
581 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400582 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500583 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
584 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
585 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400586 case gl::State::DIRTY_BIT_PACK_STATE:
587 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400588 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500589 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
590 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
591 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400592 case gl::State::DIRTY_BIT_DITHER_ENABLED:
593 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
594 break;
595 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
596 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
597 break;
598 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
599 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
600 break;
601 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
602 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
603 break;
604 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
605 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
606 break;
607 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
608 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
609 break;
610 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500611 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400612 break;
613 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
614 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
615 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800616 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
617 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
618 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400619 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
620 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
621 break;
622 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
623 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500624 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
625 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400626 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400627 break;
628 }
629 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400630 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400631 break;
632 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400633 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400634 break;
Geoff Langded79232017-11-28 15:21:11 -0500635 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
636 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
637 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800638 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
639 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
640 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500641 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
642 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
643 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400644 case gl::State::DIRTY_BIT_MULTISAMPLING:
645 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
646 break;
647 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
648 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
649 break;
650 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
651 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
652 break;
653 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
654 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
655 break;
656 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
657 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
658 break;
659 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
660 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
661 break;
662 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
663 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
664 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400665 case gl::State::DIRTY_BIT_CURRENT_VALUES:
666 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
667 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400668 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400669 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400670 break;
671 }
672 }
Jamie Madill5547b382017-10-23 18:16:01 -0400673
674 if (dirtyTextures)
675 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400676 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400677 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500678 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400679 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400680}
681
682GLint ContextVk::getGPUDisjoint()
683{
684 UNIMPLEMENTED();
685 return GLint();
686}
687
688GLint64 ContextVk::getTimestamp()
689{
690 UNIMPLEMENTED();
691 return GLint64();
692}
693
Jamie Madill4928b7c2017-06-20 12:57:39 -0400694void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400695{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400696}
697
698const gl::Caps &ContextVk::getNativeCaps() const
699{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400700 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400701}
702
703const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
704{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400705 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400706}
707
708const gl::Extensions &ContextVk::getNativeExtensions() const
709{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400710 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400711}
712
713const gl::Limitations &ContextVk::getNativeLimitations() const
714{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400715 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400716}
717
718CompilerImpl *ContextVk::createCompiler()
719{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400720 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400721}
722
Jamie Madillacccc6c2016-05-03 17:22:10 -0400723ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400724{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400725 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400726}
727
Jamie Madillacccc6c2016-05-03 17:22:10 -0400728ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400729{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400730 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400731}
732
Jamie Madillacccc6c2016-05-03 17:22:10 -0400733FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400734{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500735 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400736}
737
738TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
739{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400740 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400741}
742
Jamie Madille703c602018-02-20 10:21:48 -0500743RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400744{
Jamie Madille703c602018-02-20 10:21:48 -0500745 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400746}
747
Jamie Madill8f775602016-11-03 16:45:34 -0400748BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400749{
Jamie Madill8f775602016-11-03 16:45:34 -0400750 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400751}
752
Jamie Madillacccc6c2016-05-03 17:22:10 -0400753VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400754{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400755 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400756}
757
758QueryImpl *ContextVk::createQuery(GLenum type)
759{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400760 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400761}
762
763FenceNVImpl *ContextVk::createFenceNV()
764{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400765 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400766}
767
Jamie Madill70b5bb02017-08-28 13:32:37 -0400768SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400769{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400770 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400771}
772
Geoff Lang73bd2182016-07-15 13:01:24 -0400773TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400774{
Geoff Lang73bd2182016-07-15 13:01:24 -0400775 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400776}
777
Jamie Madill06ef36b2017-09-09 23:32:46 -0400778SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400779{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400780 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400781}
782
Yunchao Hea336b902017-08-02 16:05:21 +0800783ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
784{
785 return new ProgramPipelineVk(state);
786}
787
Sami Väisänene45e53b2016-05-25 10:36:04 +0300788std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
789{
790 return std::vector<PathImpl *>();
791}
792
Jamie Madill72106562017-03-24 14:18:50 -0400793void ContextVk::invalidateCurrentPipeline()
794{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500795 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400796}
797
Jamie Madill49ac74b2017-12-21 14:42:33 -0500798void ContextVk::onVertexArrayChange()
799{
800 // TODO(jmadill): Does not handle dependent state changes.
801 mVertexArrayDirty = true;
802 invalidateCurrentPipeline();
803}
804
Jamie Madillfe548342017-06-19 11:13:24 -0400805gl::Error ContextVk::dispatchCompute(const gl::Context *context,
806 GLuint numGroupsX,
807 GLuint numGroupsY,
808 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800809{
810 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500811 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800812}
813
Qin Jiajia62fcf622017-11-30 16:16:12 +0800814gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
815{
816 UNIMPLEMENTED();
817 return gl::InternalError();
818}
819
Xinghua Cao89c422a2017-11-29 18:24:20 +0800820gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
821{
822 UNIMPLEMENTED();
823 return gl::InternalError();
824}
825
826gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
827{
828 UNIMPLEMENTED();
829 return gl::InternalError();
830}
831
Jamie Madill76e471e2017-10-21 09:56:01 -0400832vk::DescriptorPool *ContextVk::getDescriptorPool()
833{
834 return &mDescriptorPool;
835}
836
Jamie Madillf4d693c2018-02-14 16:38:16 -0500837const VkClearValue &ContextVk::getClearColorValue() const
838{
839 return mClearColorValue;
840}
841
842const VkClearValue &ContextVk::getClearDepthStencilValue() const
843{
844 return mClearDepthStencilValue;
845}
846
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400847} // namespace rx