blob: 69629ed6f90548f55f44c1edc829dff8eec7c370 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040014#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050015#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040016#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill49ac74b2017-12-21 14:42:33 -050017#include "libANGLE/renderer/vulkan/CommandBufferNode.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/CompilerVk.h"
19#include "libANGLE/renderer/vulkan/ContextVk.h"
20#include "libANGLE/renderer/vulkan/DeviceVk.h"
21#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
23#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080024#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040025#include "libANGLE/renderer/vulkan/ProgramVk.h"
26#include "libANGLE/renderer/vulkan/QueryVk.h"
27#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
28#include "libANGLE/renderer/vulkan/RendererVk.h"
29#include "libANGLE/renderer/vulkan/SamplerVk.h"
30#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040031#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040032#include "libANGLE/renderer/vulkan/TextureVk.h"
33#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
34#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050035#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040036
37namespace rx
38{
39
Jamie Madilld03a8492017-10-03 15:46:06 -040040namespace
41{
42
43VkIndexType GetVkIndexType(GLenum glIndexType)
44{
45 switch (glIndexType)
46 {
47 case GL_UNSIGNED_SHORT:
48 return VK_INDEX_TYPE_UINT16;
49 case GL_UNSIGNED_INT:
50 return VK_INDEX_TYPE_UINT32;
51 default:
52 UNREACHABLE();
53 return VK_INDEX_TYPE_MAX_ENUM;
54 }
55}
56
Jamie Madill76e471e2017-10-21 09:56:01 -040057enum DescriptorPoolIndex : uint8_t
58{
59 UniformBufferPool = 0,
60 TexturePool = 1,
61};
62
Jamie Madilld03a8492017-10-03 15:46:06 -040063} // anonymous namespace
64
Jamie Madillacccc6c2016-05-03 17:22:10 -040065ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050066 : ContextImpl(state),
67 mRenderer(renderer),
68 mCurrentDrawMode(GL_NONE),
69 mVertexArrayDirty(false),
70 mTexturesDirty(false)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040071{
Jamie Madillf4d693c2018-02-14 16:38:16 -050072 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
73 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040074}
75
76ContextVk::~ContextVk()
77{
78}
79
Jamie Madill76e471e2017-10-21 09:56:01 -040080void ContextVk::onDestroy(const gl::Context *context)
81{
82 VkDevice device = mRenderer->getDevice();
83
84 mDescriptorPool.destroy(device);
85}
86
Jamie Madill9e54b5a2016-05-25 12:57:39 -040087gl::Error ContextVk::initialize()
88{
Jamie Madill76e471e2017-10-21 09:56:01 -040089 VkDevice device = mRenderer->getDevice();
90
91 VkDescriptorPoolSize poolSizes[2];
92 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
93 poolSizes[UniformBufferPool].descriptorCount = 1024;
94 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
95 poolSizes[TexturePool].descriptorCount = 1024;
96
97 VkDescriptorPoolCreateInfo descriptorPoolInfo;
98 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
99 descriptorPoolInfo.pNext = nullptr;
100 descriptorPoolInfo.flags = 0;
101
102 // TODO(jmadill): Pick non-arbitrary max.
103 descriptorPoolInfo.maxSets = 2048;
104
105 // Reserve pools for uniform blocks and textures.
106 descriptorPoolInfo.poolSizeCount = 2;
107 descriptorPoolInfo.pPoolSizes = poolSizes;
108
109 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
110
Jamie Madillf2f6d372018-01-10 21:37:23 -0500111 mPipelineDesc.reset(new vk::PipelineDesc());
112 mPipelineDesc->initDefaults();
113
Jamie Madille09bd5d2016-11-29 16:20:35 -0500114 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400115}
116
Jamie Madillafa02a22017-11-23 12:57:38 -0500117gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400118{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500119 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400120 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500121 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400122}
123
Jamie Madillafa02a22017-11-23 12:57:38 -0500124gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400125{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500126 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400127}
128
Jamie Madill4928b7c2017-06-20 12:57:39 -0400129gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400130{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500131 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400132
Jamie Madillf2f6d372018-01-10 21:37:23 -0500133 const gl::State &state = mState.getState();
134 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
135 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
136 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500137 const gl::AttributesMask activeAttribLocationsMask =
138 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500139
140 // Ensure the topology of the pipeline description is updated.
141 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500142
Jamie Madill112a3a82018-01-23 13:04:06 -0500143 // Copy over the latest attrib and binding descriptions.
144 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500145
Jamie Madillf2f6d372018-01-10 21:37:23 -0500146 // Ensure that the RenderPass description is updated.
147 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500148
149 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500150 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
151 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500152
Jamie Madill72106562017-03-24 14:18:50 -0400153 return gl::NoError();
154}
155
Jamie Madill49ac74b2017-12-21 14:42:33 -0500156gl::Error ContextVk::setupDraw(const gl::Context *context,
157 GLenum mode,
158 DrawType drawType,
159 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400160{
161 if (mode != mCurrentDrawMode)
162 {
163 invalidateCurrentPipeline();
164 mCurrentDrawMode = mode;
165 }
166
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500167 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400168 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400169 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400170 }
171
Jamie Madill72106562017-03-24 14:18:50 -0400172 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500173 const gl::Program *programGL = state.getProgram();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400174 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500175 const gl::VertexArray *vao = state.getVertexArray();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400176 VertexArrayVk *vkVAO = vk::GetImpl(vao);
Jamie Madill72106562017-03-24 14:18:50 -0400177 const auto *drawFBO = state.getDrawFramebuffer();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400178 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Luc Ferronf8be7562018-02-06 15:59:11 -0500179 Serial queueSerial = mRenderer->getCurrentQueueSerial();
180 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400181
Jamie Madillbd159f02017-10-09 19:39:06 -0400182 // Process vertex attributes. Assume zero offsets for now.
183 // TODO(jmadill): Offset handling.
Jamie Madillda854a22017-11-30 17:24:21 -0500184 const auto &vertexHandles = vkVAO->getCurrentArrayBufferHandles();
185 angle::MemoryBuffer *zeroBuf = nullptr;
Jamie Madillbd159f02017-10-09 19:39:06 -0400186 ANGLE_TRY(context->getZeroFilledBuffer(maxAttrib * sizeof(VkDeviceSize), &zeroBuf));
Jamie Madill72106562017-03-24 14:18:50 -0400187
Jamie Madill49ac74b2017-12-21 14:42:33 -0500188 // TODO(jmadill): Need to link up the TextureVk to the Secondary CB.
189 vk::CommandBufferNode *renderNode = nullptr;
190 ANGLE_TRY(vkFBO->getRenderNode(context, &renderNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500191
Jamie Madill49ac74b2017-12-21 14:42:33 -0500192 if (!renderNode->getInsideRenderPassCommands()->valid())
193 {
194 mVertexArrayDirty = true;
195 mTexturesDirty = true;
196 ANGLE_TRY(renderNode->startRenderPassRecording(mRenderer, commandBuffer));
197 }
198 else
199 {
200 *commandBuffer = renderNode->getInsideRenderPassCommands();
201 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400202
Jamie Madill49ac74b2017-12-21 14:42:33 -0500203 // Ensure any writes to the VAO buffers are flushed before we read from them.
204 if (mVertexArrayDirty)
205 {
206 mVertexArrayDirty = false;
207 vkVAO->updateDrawDependencies(renderNode, programGL->getActiveAttribLocationsMask(),
208 queueSerial, drawType);
209 }
210
211 // Ensure any writes to the textures are flushed before we read from them.
212 if (mTexturesDirty)
213 {
214 mTexturesDirty = false;
215 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
216 const auto &completeTextures = state.getCompleteTextureCache();
217 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
218 {
219 ASSERT(!samplerBinding.unreferenced);
220
221 // TODO(jmadill): Sampler arrays
222 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
223
224 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
225 const gl::Texture *texture = completeTextures[textureUnit];
226
227 // TODO(jmadill): Incomplete textures handling.
228 ASSERT(texture);
229
230 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madill0e654542018-02-07 14:50:06 -0500231 textureVk->onReadResource(renderNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500232 }
233 }
234
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500235 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500236 (*commandBuffer)
237 ->bindVertexBuffers(0, maxAttrib, vertexHandles.data(),
238 reinterpret_cast<const VkDeviceSize *>(zeroBuf->data()));
239
240 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500241 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
242 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500243
Jamie Madill76e471e2017-10-21 09:56:01 -0400244 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500245 ANGLE_TRY(programVk->updateUniforms(this));
246 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400247
248 // Bind the graphics descriptor sets.
249 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400250 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500251 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
252 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400253 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500254 ASSERT(!descriptorSets.empty());
255 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500256 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500257 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
258 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400259 }
260
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500261 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400262}
263
Jamie Madilld03a8492017-10-03 15:46:06 -0400264gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
265{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500266 vk::CommandBuffer *commandBuffer = nullptr;
267 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400268 commandBuffer->draw(count, 1, first, 0);
269 return gl::NoError();
270}
271
Jamie Madillc564c072017-06-01 12:45:42 -0400272gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
273 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400274 GLint first,
275 GLsizei count,
276 GLsizei instanceCount)
277{
278 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500279 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400280}
281
Jamie Madillc564c072017-06-01 12:45:42 -0400282gl::Error ContextVk::drawElements(const gl::Context *context,
283 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400284 GLsizei count,
285 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800286 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400287{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500288 vk::CommandBuffer *commandBuffer;
289 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400290
291 if (indices)
292 {
293 // TODO(jmadill): Buffer offsets and immediate data.
294 UNIMPLEMENTED();
295 return gl::InternalError() << "Only zero-offset index buffers are currently implemented.";
296 }
297
298 if (type == GL_UNSIGNED_BYTE)
299 {
300 // TODO(jmadill): Index translation.
301 UNIMPLEMENTED();
302 return gl::InternalError() << "Unsigned byte translation is not yet implemented.";
303 }
304
Jamie Madilld03a8492017-10-03 15:46:06 -0400305 const gl::Buffer *elementArrayBuffer =
306 mState.getState().getVertexArray()->getElementArrayBuffer().get();
307 ASSERT(elementArrayBuffer);
308
Jamie Madille1f3ad42017-10-28 23:00:42 -0400309 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
Jamie Madilld03a8492017-10-03 15:46:06 -0400310
311 commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type));
312 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
313
314 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400315}
316
Jamie Madillc564c072017-06-01 12:45:42 -0400317gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
318 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400319 GLsizei count,
320 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400321 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800322 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400323{
324 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500325 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400326}
327
Jamie Madillc564c072017-06-01 12:45:42 -0400328gl::Error ContextVk::drawRangeElements(const gl::Context *context,
329 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400330 GLuint start,
331 GLuint end,
332 GLsizei count,
333 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800334 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400335{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500336 return gl::NoError();
337}
338
339VkDevice ContextVk::getDevice() const
340{
341 return mRenderer->getDevice();
342}
343
Jamie Madillc564c072017-06-01 12:45:42 -0400344gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
345 GLenum mode,
346 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800347{
348 UNIMPLEMENTED();
349 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
350}
351
Jamie Madillc564c072017-06-01 12:45:42 -0400352gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
353 GLenum mode,
354 GLenum type,
355 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800356{
357 UNIMPLEMENTED();
358 return gl::InternalError()
359 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
360}
361
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400362GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400363{
364 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400365 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400366}
367
368std::string ContextVk::getVendorString() const
369{
370 UNIMPLEMENTED();
371 return std::string();
372}
373
374std::string ContextVk::getRendererDescription() const
375{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500376 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400377}
378
379void ContextVk::insertEventMarker(GLsizei length, const char *marker)
380{
381 UNIMPLEMENTED();
382}
383
384void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
385{
386 UNIMPLEMENTED();
387}
388
389void ContextVk::popGroupMarker()
390{
391 UNIMPLEMENTED();
392}
393
Geoff Lang5d5253a2017-11-22 14:51:12 -0500394void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
395{
396 UNIMPLEMENTED();
397}
398
399void ContextVk::popDebugGroup()
400{
401 UNIMPLEMENTED();
402}
403
Jamie Madillfe548342017-06-19 11:13:24 -0400404void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400405{
Jamie Madill72106562017-03-24 14:18:50 -0400406 if (dirtyBits.any())
407 {
408 invalidateCurrentPipeline();
409 }
Jamie Madillebf72992017-10-13 14:09:45 -0400410
411 const auto &glState = context->getGLState();
412
413 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400414 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400415
416 for (auto dirtyBit : dirtyBits)
417 {
418 switch (dirtyBit)
419 {
420 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500421 if (glState.isScissorTestEnabled())
422 {
423 mPipelineDesc->updateScissor(glState.getScissor());
424 }
425 else
426 {
427 mPipelineDesc->updateScissor(glState.getViewport());
428 }
Jamie Madillebf72992017-10-13 14:09:45 -0400429 break;
430 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500431 // Only modify the scissor region if the test is enabled, otherwise we want to keep
432 // the viewport size as the scissor region.
433 if (glState.isScissorTestEnabled())
434 {
435 mPipelineDesc->updateScissor(glState.getScissor());
436 }
Jamie Madillebf72992017-10-13 14:09:45 -0400437 break;
438 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500439 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
440 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500441
442 // If the scissor test isn't enabled, we have to also update the scissor to
443 // be equal to the viewport to make sure we keep rendering everything in the
444 // viewport.
445 if (!glState.isScissorTestEnabled())
446 {
447 mPipelineDesc->updateScissor(glState.getViewport());
448 }
Jamie Madillebf72992017-10-13 14:09:45 -0400449 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400450 case gl::State::DIRTY_BIT_DEPTH_RANGE:
451 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
452 break;
453 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500454 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400455 break;
456 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500457 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400458 break;
459 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500460 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400461 break;
462 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500463 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400464 break;
465 case gl::State::DIRTY_BIT_COLOR_MASK:
466 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
467 break;
468 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
469 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
470 break;
471 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
472 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
473 break;
474 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
475 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
476 break;
477 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
478 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
479 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400480 case gl::State::DIRTY_BIT_SAMPLE_MASK:
481 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400482 break;
483 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
484 WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented";
485 break;
486 case gl::State::DIRTY_BIT_DEPTH_FUNC:
487 WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented";
488 break;
489 case gl::State::DIRTY_BIT_DEPTH_MASK:
490 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
491 break;
492 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
493 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
494 break;
495 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
496 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
497 break;
498 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
499 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
500 break;
501 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
502 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
503 break;
504 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
505 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
506 break;
507 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
508 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
509 break;
510 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
511 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
512 break;
513 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
514 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500515 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400516 break;
517 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500518 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400519 break;
520 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
521 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
522 break;
523 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
524 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
525 break;
526 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
527 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
528 break;
529 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500530 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400531 break;
532 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
533 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
534 break;
535 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500536 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
537 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
538 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
539 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400540 break;
541 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500542 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400543 break;
544 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500545 mClearDepthStencilValue.depthStencil.stencil =
546 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400547 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400548 case gl::State::DIRTY_BIT_UNPACK_STATE:
549 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400550 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500551 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
552 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
553 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400554 case gl::State::DIRTY_BIT_PACK_STATE:
555 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400556 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500557 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
558 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
559 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400560 case gl::State::DIRTY_BIT_DITHER_ENABLED:
561 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
562 break;
563 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
564 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
565 break;
566 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
567 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
568 break;
569 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
570 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
571 break;
572 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
573 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
574 break;
575 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
576 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
577 break;
578 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500579 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400580 break;
581 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
582 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
583 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800584 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
585 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
586 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400587 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
588 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
589 break;
590 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
591 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500592 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
593 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400594 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400595 break;
596 }
597 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400598 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400599 break;
600 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400601 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400602 break;
Geoff Langded79232017-11-28 15:21:11 -0500603 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
604 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
605 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800606 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
607 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
608 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500609 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
610 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
611 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400612 case gl::State::DIRTY_BIT_MULTISAMPLING:
613 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
614 break;
615 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
616 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
617 break;
618 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
619 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
620 break;
621 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
622 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
623 break;
624 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
625 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
626 break;
627 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
628 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
629 break;
630 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
631 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
632 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400633 case gl::State::DIRTY_BIT_CURRENT_VALUES:
634 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
635 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400636 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400637 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400638 break;
639 }
640 }
Jamie Madill5547b382017-10-23 18:16:01 -0400641
642 if (dirtyTextures)
643 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400644 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400645 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500646 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400647 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400648}
649
650GLint ContextVk::getGPUDisjoint()
651{
652 UNIMPLEMENTED();
653 return GLint();
654}
655
656GLint64 ContextVk::getTimestamp()
657{
658 UNIMPLEMENTED();
659 return GLint64();
660}
661
Jamie Madill4928b7c2017-06-20 12:57:39 -0400662void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400663{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400664}
665
666const gl::Caps &ContextVk::getNativeCaps() const
667{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400668 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400669}
670
671const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
672{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400673 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400674}
675
676const gl::Extensions &ContextVk::getNativeExtensions() const
677{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400678 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400679}
680
681const gl::Limitations &ContextVk::getNativeLimitations() const
682{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400683 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400684}
685
686CompilerImpl *ContextVk::createCompiler()
687{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400688 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400689}
690
Jamie Madillacccc6c2016-05-03 17:22:10 -0400691ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400692{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400693 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400694}
695
Jamie Madillacccc6c2016-05-03 17:22:10 -0400696ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400697{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400698 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400699}
700
Jamie Madillacccc6c2016-05-03 17:22:10 -0400701FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400702{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500703 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400704}
705
706TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
707{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400708 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400709}
710
711RenderbufferImpl *ContextVk::createRenderbuffer()
712{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400713 return new RenderbufferVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400714}
715
Jamie Madill8f775602016-11-03 16:45:34 -0400716BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400717{
Jamie Madill8f775602016-11-03 16:45:34 -0400718 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400719}
720
Jamie Madillacccc6c2016-05-03 17:22:10 -0400721VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400722{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400723 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400724}
725
726QueryImpl *ContextVk::createQuery(GLenum type)
727{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400728 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400729}
730
731FenceNVImpl *ContextVk::createFenceNV()
732{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400733 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400734}
735
Jamie Madill70b5bb02017-08-28 13:32:37 -0400736SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400737{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400738 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400739}
740
Geoff Lang73bd2182016-07-15 13:01:24 -0400741TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400742{
Geoff Lang73bd2182016-07-15 13:01:24 -0400743 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400744}
745
Jamie Madill06ef36b2017-09-09 23:32:46 -0400746SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400747{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400748 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400749}
750
Yunchao Hea336b902017-08-02 16:05:21 +0800751ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
752{
753 return new ProgramPipelineVk(state);
754}
755
Sami Väisänene45e53b2016-05-25 10:36:04 +0300756std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
757{
758 return std::vector<PathImpl *>();
759}
760
Jamie Madill72106562017-03-24 14:18:50 -0400761void ContextVk::invalidateCurrentPipeline()
762{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500763 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400764}
765
Jamie Madill49ac74b2017-12-21 14:42:33 -0500766void ContextVk::onVertexArrayChange()
767{
768 // TODO(jmadill): Does not handle dependent state changes.
769 mVertexArrayDirty = true;
770 invalidateCurrentPipeline();
771}
772
Jamie Madillfe548342017-06-19 11:13:24 -0400773gl::Error ContextVk::dispatchCompute(const gl::Context *context,
774 GLuint numGroupsX,
775 GLuint numGroupsY,
776 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800777{
778 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500779 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800780}
781
Qin Jiajia62fcf622017-11-30 16:16:12 +0800782gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
783{
784 UNIMPLEMENTED();
785 return gl::InternalError();
786}
787
Xinghua Cao89c422a2017-11-29 18:24:20 +0800788gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
789{
790 UNIMPLEMENTED();
791 return gl::InternalError();
792}
793
794gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
795{
796 UNIMPLEMENTED();
797 return gl::InternalError();
798}
799
Jamie Madill76e471e2017-10-21 09:56:01 -0400800vk::DescriptorPool *ContextVk::getDescriptorPool()
801{
802 return &mDescriptorPool;
803}
804
Jamie Madillf4d693c2018-02-14 16:38:16 -0500805const VkClearValue &ContextVk::getClearColorValue() const
806{
807 return mClearColorValue;
808}
809
810const VkClearValue &ContextVk::getClearDepthStencilValue() const
811{
812 return mClearDepthStencilValue;
813}
814
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400815} // namespace rx