blob: b0d3816b58a04a7349763a150598fba93d8dca91 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040014#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050015#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040016#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill49ac74b2017-12-21 14:42:33 -050017#include "libANGLE/renderer/vulkan/CommandBufferNode.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/CompilerVk.h"
19#include "libANGLE/renderer/vulkan/ContextVk.h"
20#include "libANGLE/renderer/vulkan/DeviceVk.h"
21#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
23#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080024#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040025#include "libANGLE/renderer/vulkan/ProgramVk.h"
26#include "libANGLE/renderer/vulkan/QueryVk.h"
27#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
28#include "libANGLE/renderer/vulkan/RendererVk.h"
29#include "libANGLE/renderer/vulkan/SamplerVk.h"
30#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040031#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040032#include "libANGLE/renderer/vulkan/TextureVk.h"
33#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
34#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050035#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040036
37namespace rx
38{
39
Jamie Madilld03a8492017-10-03 15:46:06 -040040namespace
41{
42
43VkIndexType GetVkIndexType(GLenum glIndexType)
44{
45 switch (glIndexType)
46 {
47 case GL_UNSIGNED_SHORT:
48 return VK_INDEX_TYPE_UINT16;
49 case GL_UNSIGNED_INT:
50 return VK_INDEX_TYPE_UINT32;
51 default:
52 UNREACHABLE();
53 return VK_INDEX_TYPE_MAX_ENUM;
54 }
55}
56
Jamie Madill76e471e2017-10-21 09:56:01 -040057enum DescriptorPoolIndex : uint8_t
58{
59 UniformBufferPool = 0,
60 TexturePool = 1,
61};
62
Jamie Madilld03a8492017-10-03 15:46:06 -040063} // anonymous namespace
64
Jamie Madillacccc6c2016-05-03 17:22:10 -040065ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050066 : ContextImpl(state),
67 mRenderer(renderer),
68 mCurrentDrawMode(GL_NONE),
69 mVertexArrayDirty(false),
Frank Henigman17448952017-01-05 15:48:26 -050070 mTexturesDirty(false),
71 mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, 1024 * 1024)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040072{
Jamie Madillf4d693c2018-02-14 16:38:16 -050073 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
74 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040075}
76
77ContextVk::~ContextVk()
78{
79}
80
Jamie Madill76e471e2017-10-21 09:56:01 -040081void ContextVk::onDestroy(const gl::Context *context)
82{
83 VkDevice device = mRenderer->getDevice();
84
85 mDescriptorPool.destroy(device);
Frank Henigman17448952017-01-05 15:48:26 -050086 mStreamingVertexData.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -040087}
88
Jamie Madill9e54b5a2016-05-25 12:57:39 -040089gl::Error ContextVk::initialize()
90{
Jamie Madill76e471e2017-10-21 09:56:01 -040091 VkDevice device = mRenderer->getDevice();
92
93 VkDescriptorPoolSize poolSizes[2];
94 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
95 poolSizes[UniformBufferPool].descriptorCount = 1024;
96 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
97 poolSizes[TexturePool].descriptorCount = 1024;
98
99 VkDescriptorPoolCreateInfo descriptorPoolInfo;
100 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
101 descriptorPoolInfo.pNext = nullptr;
102 descriptorPoolInfo.flags = 0;
103
104 // TODO(jmadill): Pick non-arbitrary max.
105 descriptorPoolInfo.maxSets = 2048;
106
107 // Reserve pools for uniform blocks and textures.
108 descriptorPoolInfo.poolSizeCount = 2;
109 descriptorPoolInfo.pPoolSizes = poolSizes;
110
111 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
112
Jamie Madillf2f6d372018-01-10 21:37:23 -0500113 mPipelineDesc.reset(new vk::PipelineDesc());
114 mPipelineDesc->initDefaults();
115
Jamie Madille09bd5d2016-11-29 16:20:35 -0500116 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400117}
118
Jamie Madillafa02a22017-11-23 12:57:38 -0500119gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400120{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500121 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400122 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500123 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400124}
125
Jamie Madillafa02a22017-11-23 12:57:38 -0500126gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400127{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500128 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400129}
130
Jamie Madill4928b7c2017-06-20 12:57:39 -0400131gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400132{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500133 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400134
Jamie Madillf2f6d372018-01-10 21:37:23 -0500135 const gl::State &state = mState.getState();
136 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
137 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
138 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500139 const gl::AttributesMask activeAttribLocationsMask =
140 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500141
142 // Ensure the topology of the pipeline description is updated.
143 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500144
Jamie Madill112a3a82018-01-23 13:04:06 -0500145 // Copy over the latest attrib and binding descriptions.
146 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500147
Jamie Madillf2f6d372018-01-10 21:37:23 -0500148 // Ensure that the RenderPass description is updated.
149 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500150
151 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500152 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
153 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500154
Jamie Madill72106562017-03-24 14:18:50 -0400155 return gl::NoError();
156}
157
Jamie Madill49ac74b2017-12-21 14:42:33 -0500158gl::Error ContextVk::setupDraw(const gl::Context *context,
159 GLenum mode,
160 DrawType drawType,
Frank Henigman17448952017-01-05 15:48:26 -0500161 int firstVertex,
162 int lastVertex,
Jamie Madill49ac74b2017-12-21 14:42:33 -0500163 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400164{
165 if (mode != mCurrentDrawMode)
166 {
167 invalidateCurrentPipeline();
168 mCurrentDrawMode = mode;
169 }
170
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500171 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400172 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400173 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400174 }
175
Frank Henigman17448952017-01-05 15:48:26 -0500176 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500177 const gl::Program *programGL = state.getProgram();
Frank Henigman17448952017-01-05 15:48:26 -0500178 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500179 const gl::VertexArray *vao = state.getVertexArray();
Frank Henigman17448952017-01-05 15:48:26 -0500180 VertexArrayVk *vkVAO = vk::GetImpl(vao);
181 const auto *drawFBO = state.getDrawFramebuffer();
182 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Luc Ferronf8be7562018-02-06 15:59:11 -0500183 Serial queueSerial = mRenderer->getCurrentQueueSerial();
184 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400185
Jamie Madill49ac74b2017-12-21 14:42:33 -0500186 // TODO(jmadill): Need to link up the TextureVk to the Secondary CB.
187 vk::CommandBufferNode *renderNode = nullptr;
188 ANGLE_TRY(vkFBO->getRenderNode(context, &renderNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500189
Jamie Madill49ac74b2017-12-21 14:42:33 -0500190 if (!renderNode->getInsideRenderPassCommands()->valid())
191 {
192 mVertexArrayDirty = true;
193 mTexturesDirty = true;
194 ANGLE_TRY(renderNode->startRenderPassRecording(mRenderer, commandBuffer));
195 }
196 else
197 {
198 *commandBuffer = renderNode->getInsideRenderPassCommands();
199 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400200
Jamie Madill49ac74b2017-12-21 14:42:33 -0500201 // Ensure any writes to the VAO buffers are flushed before we read from them.
202 if (mVertexArrayDirty)
203 {
204 mVertexArrayDirty = false;
205 vkVAO->updateDrawDependencies(renderNode, programGL->getActiveAttribLocationsMask(),
206 queueSerial, drawType);
207 }
208
209 // Ensure any writes to the textures are flushed before we read from them.
210 if (mTexturesDirty)
211 {
212 mTexturesDirty = false;
213 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
214 const auto &completeTextures = state.getCompleteTextureCache();
215 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
216 {
217 ASSERT(!samplerBinding.unreferenced);
218
219 // TODO(jmadill): Sampler arrays
220 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
221
222 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
223 const gl::Texture *texture = completeTextures[textureUnit];
224
225 // TODO(jmadill): Incomplete textures handling.
226 ASSERT(texture);
227
228 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill0e654542018-02-07 14:50:06 -0500229 textureVk->onReadResource(renderNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500230 }
231 }
232
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500233 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Frank Henigman17448952017-01-05 15:48:26 -0500234 ContextVk *contextVk = vk::GetImpl(context);
235 ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500236 (*commandBuffer)
Frank Henigman17448952017-01-05 15:48:26 -0500237 ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(),
238 vkVAO->getCurrentArrayBufferOffsets().data());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500239
240 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500241 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
242 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500243
Jamie Madill76e471e2017-10-21 09:56:01 -0400244 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500245 ANGLE_TRY(programVk->updateUniforms(this));
246 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400247
248 // Bind the graphics descriptor sets.
249 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400250 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500251 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
252 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400253 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500254 ASSERT(!descriptorSets.empty());
255 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500256 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500257 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
258 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400259 }
260
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500261 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400262}
263
Jamie Madilld03a8492017-10-03 15:46:06 -0400264gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
265{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500266 vk::CommandBuffer *commandBuffer = nullptr;
Frank Henigman17448952017-01-05 15:48:26 -0500267 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400268 commandBuffer->draw(count, 1, first, 0);
269 return gl::NoError();
270}
271
Jamie Madillc564c072017-06-01 12:45:42 -0400272gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
273 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400274 GLint first,
275 GLsizei count,
276 GLsizei instanceCount)
277{
278 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500279 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400280}
281
Jamie Madillc564c072017-06-01 12:45:42 -0400282gl::Error ContextVk::drawElements(const gl::Context *context,
283 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400284 GLsizei count,
285 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800286 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400287{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500288 vk::CommandBuffer *commandBuffer;
Frank Henigman17448952017-01-05 15:48:26 -0500289 // TODO(fjhenigman): calculate the index range and pass to setupDraw()
290 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400291
292 if (indices)
293 {
294 // TODO(jmadill): Buffer offsets and immediate data.
295 UNIMPLEMENTED();
296 return gl::InternalError() << "Only zero-offset index buffers are currently implemented.";
297 }
298
299 if (type == GL_UNSIGNED_BYTE)
300 {
301 // TODO(jmadill): Index translation.
302 UNIMPLEMENTED();
303 return gl::InternalError() << "Unsigned byte translation is not yet implemented.";
304 }
305
Jamie Madilld03a8492017-10-03 15:46:06 -0400306 const gl::Buffer *elementArrayBuffer =
307 mState.getState().getVertexArray()->getElementArrayBuffer().get();
308 ASSERT(elementArrayBuffer);
309
Jamie Madille1f3ad42017-10-28 23:00:42 -0400310 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
Jamie Madilld03a8492017-10-03 15:46:06 -0400311
312 commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type));
313 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
314
315 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400316}
317
Jamie Madillc564c072017-06-01 12:45:42 -0400318gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
319 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400320 GLsizei count,
321 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400322 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800323 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400324{
325 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500326 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400327}
328
Jamie Madillc564c072017-06-01 12:45:42 -0400329gl::Error ContextVk::drawRangeElements(const gl::Context *context,
330 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400331 GLuint start,
332 GLuint end,
333 GLsizei count,
334 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800335 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400336{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500337 return gl::NoError();
338}
339
340VkDevice ContextVk::getDevice() const
341{
342 return mRenderer->getDevice();
343}
344
Jamie Madillc564c072017-06-01 12:45:42 -0400345gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
346 GLenum mode,
347 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800348{
349 UNIMPLEMENTED();
350 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
351}
352
Jamie Madillc564c072017-06-01 12:45:42 -0400353gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
354 GLenum mode,
355 GLenum type,
356 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800357{
358 UNIMPLEMENTED();
359 return gl::InternalError()
360 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
361}
362
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400363GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400364{
365 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400366 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400367}
368
369std::string ContextVk::getVendorString() const
370{
371 UNIMPLEMENTED();
372 return std::string();
373}
374
375std::string ContextVk::getRendererDescription() const
376{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500377 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400378}
379
380void ContextVk::insertEventMarker(GLsizei length, const char *marker)
381{
382 UNIMPLEMENTED();
383}
384
385void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
386{
387 UNIMPLEMENTED();
388}
389
390void ContextVk::popGroupMarker()
391{
392 UNIMPLEMENTED();
393}
394
Geoff Lang5d5253a2017-11-22 14:51:12 -0500395void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
396{
397 UNIMPLEMENTED();
398}
399
400void ContextVk::popDebugGroup()
401{
402 UNIMPLEMENTED();
403}
404
Jamie Madillfe548342017-06-19 11:13:24 -0400405void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400406{
Jamie Madill72106562017-03-24 14:18:50 -0400407 if (dirtyBits.any())
408 {
409 invalidateCurrentPipeline();
410 }
Jamie Madillebf72992017-10-13 14:09:45 -0400411
412 const auto &glState = context->getGLState();
413
414 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400415 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400416
417 for (auto dirtyBit : dirtyBits)
418 {
419 switch (dirtyBit)
420 {
421 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500422 if (glState.isScissorTestEnabled())
423 {
424 mPipelineDesc->updateScissor(glState.getScissor());
425 }
426 else
427 {
428 mPipelineDesc->updateScissor(glState.getViewport());
429 }
Jamie Madillebf72992017-10-13 14:09:45 -0400430 break;
431 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500432 // Only modify the scissor region if the test is enabled, otherwise we want to keep
433 // the viewport size as the scissor region.
434 if (glState.isScissorTestEnabled())
435 {
436 mPipelineDesc->updateScissor(glState.getScissor());
437 }
Jamie Madillebf72992017-10-13 14:09:45 -0400438 break;
439 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500440 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
441 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500442
443 // If the scissor test isn't enabled, we have to also update the scissor to
444 // be equal to the viewport to make sure we keep rendering everything in the
445 // viewport.
446 if (!glState.isScissorTestEnabled())
447 {
448 mPipelineDesc->updateScissor(glState.getViewport());
449 }
Jamie Madillebf72992017-10-13 14:09:45 -0400450 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400451 case gl::State::DIRTY_BIT_DEPTH_RANGE:
452 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
453 break;
454 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500455 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400456 break;
457 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500458 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400459 break;
460 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500461 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400462 break;
463 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500464 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400465 break;
466 case gl::State::DIRTY_BIT_COLOR_MASK:
467 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
468 break;
469 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
470 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
471 break;
472 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
473 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
474 break;
475 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
476 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
477 break;
478 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
479 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
480 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400481 case gl::State::DIRTY_BIT_SAMPLE_MASK:
482 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400483 break;
484 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
485 WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented";
486 break;
487 case gl::State::DIRTY_BIT_DEPTH_FUNC:
488 WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented";
489 break;
490 case gl::State::DIRTY_BIT_DEPTH_MASK:
491 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
492 break;
493 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
494 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
495 break;
496 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
497 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
498 break;
499 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
500 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
501 break;
502 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
503 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
504 break;
505 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
506 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
507 break;
508 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
509 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
510 break;
511 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
512 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
513 break;
514 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
515 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500516 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400517 break;
518 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500519 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400520 break;
521 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
522 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
523 break;
524 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
525 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
526 break;
527 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
528 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
529 break;
530 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500531 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400532 break;
533 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
534 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
535 break;
536 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500537 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
538 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
539 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
540 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400541 break;
542 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500543 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400544 break;
545 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500546 mClearDepthStencilValue.depthStencil.stencil =
547 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400548 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400549 case gl::State::DIRTY_BIT_UNPACK_STATE:
550 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400551 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500552 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
553 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
554 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400555 case gl::State::DIRTY_BIT_PACK_STATE:
556 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400557 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500558 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
559 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
560 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400561 case gl::State::DIRTY_BIT_DITHER_ENABLED:
562 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
563 break;
564 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
565 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
566 break;
567 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
568 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
569 break;
570 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
571 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
572 break;
573 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
574 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
575 break;
576 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
577 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
578 break;
579 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500580 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400581 break;
582 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
583 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
584 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800585 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
586 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
587 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400588 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
589 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
590 break;
591 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
592 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500593 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
594 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400595 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400596 break;
597 }
598 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400599 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400600 break;
601 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400602 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400603 break;
Geoff Langded79232017-11-28 15:21:11 -0500604 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
605 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
606 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800607 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
608 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
609 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500610 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
611 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
612 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400613 case gl::State::DIRTY_BIT_MULTISAMPLING:
614 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
615 break;
616 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
617 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
618 break;
619 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
620 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
621 break;
622 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
623 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
624 break;
625 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
626 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
627 break;
628 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
629 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
630 break;
631 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
632 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
633 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400634 case gl::State::DIRTY_BIT_CURRENT_VALUES:
635 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
636 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400637 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400638 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400639 break;
640 }
641 }
Jamie Madill5547b382017-10-23 18:16:01 -0400642
643 if (dirtyTextures)
644 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400645 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400646 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500647 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400648 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400649}
650
651GLint ContextVk::getGPUDisjoint()
652{
653 UNIMPLEMENTED();
654 return GLint();
655}
656
657GLint64 ContextVk::getTimestamp()
658{
659 UNIMPLEMENTED();
660 return GLint64();
661}
662
Jamie Madill4928b7c2017-06-20 12:57:39 -0400663void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400664{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400665}
666
667const gl::Caps &ContextVk::getNativeCaps() const
668{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400669 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400670}
671
672const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
673{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400674 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400675}
676
677const gl::Extensions &ContextVk::getNativeExtensions() const
678{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400679 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400680}
681
682const gl::Limitations &ContextVk::getNativeLimitations() const
683{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400684 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400685}
686
687CompilerImpl *ContextVk::createCompiler()
688{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400689 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400690}
691
Jamie Madillacccc6c2016-05-03 17:22:10 -0400692ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400693{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400694 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400695}
696
Jamie Madillacccc6c2016-05-03 17:22:10 -0400697ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400698{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400699 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400700}
701
Jamie Madillacccc6c2016-05-03 17:22:10 -0400702FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400703{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500704 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400705}
706
707TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
708{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400709 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400710}
711
712RenderbufferImpl *ContextVk::createRenderbuffer()
713{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400714 return new RenderbufferVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400715}
716
Jamie Madill8f775602016-11-03 16:45:34 -0400717BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400718{
Jamie Madill8f775602016-11-03 16:45:34 -0400719 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400720}
721
Jamie Madillacccc6c2016-05-03 17:22:10 -0400722VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400723{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400724 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400725}
726
727QueryImpl *ContextVk::createQuery(GLenum type)
728{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400729 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400730}
731
732FenceNVImpl *ContextVk::createFenceNV()
733{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400734 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400735}
736
Jamie Madill70b5bb02017-08-28 13:32:37 -0400737SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400738{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400739 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400740}
741
Geoff Lang73bd2182016-07-15 13:01:24 -0400742TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400743{
Geoff Lang73bd2182016-07-15 13:01:24 -0400744 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400745}
746
Jamie Madill06ef36b2017-09-09 23:32:46 -0400747SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400748{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400749 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400750}
751
Yunchao Hea336b902017-08-02 16:05:21 +0800752ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
753{
754 return new ProgramPipelineVk(state);
755}
756
Sami Väisänene45e53b2016-05-25 10:36:04 +0300757std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
758{
759 return std::vector<PathImpl *>();
760}
761
Jamie Madill72106562017-03-24 14:18:50 -0400762void ContextVk::invalidateCurrentPipeline()
763{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500764 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400765}
766
Jamie Madill49ac74b2017-12-21 14:42:33 -0500767void ContextVk::onVertexArrayChange()
768{
769 // TODO(jmadill): Does not handle dependent state changes.
770 mVertexArrayDirty = true;
771 invalidateCurrentPipeline();
772}
773
Jamie Madillfe548342017-06-19 11:13:24 -0400774gl::Error ContextVk::dispatchCompute(const gl::Context *context,
775 GLuint numGroupsX,
776 GLuint numGroupsY,
777 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800778{
779 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500780 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800781}
782
Qin Jiajia62fcf622017-11-30 16:16:12 +0800783gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
784{
785 UNIMPLEMENTED();
786 return gl::InternalError();
787}
788
Xinghua Cao89c422a2017-11-29 18:24:20 +0800789gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
790{
791 UNIMPLEMENTED();
792 return gl::InternalError();
793}
794
795gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
796{
797 UNIMPLEMENTED();
798 return gl::InternalError();
799}
800
Jamie Madill76e471e2017-10-21 09:56:01 -0400801vk::DescriptorPool *ContextVk::getDescriptorPool()
802{
803 return &mDescriptorPool;
804}
805
Jamie Madillf4d693c2018-02-14 16:38:16 -0500806const VkClearValue &ContextVk::getClearColorValue() const
807{
808 return mClearColorValue;
809}
810
811const VkClearValue &ContextVk::getClearDepthStencilValue() const
812{
813 return mClearDepthStencilValue;
814}
815
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400816} // namespace rx