blob: 37f728da13af28d88cf3e1959e20596983b01f7b [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040014#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050015#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040016#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill49ac74b2017-12-21 14:42:33 -050017#include "libANGLE/renderer/vulkan/CommandBufferNode.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/CompilerVk.h"
19#include "libANGLE/renderer/vulkan/ContextVk.h"
20#include "libANGLE/renderer/vulkan/DeviceVk.h"
21#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
23#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080024#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040025#include "libANGLE/renderer/vulkan/ProgramVk.h"
26#include "libANGLE/renderer/vulkan/QueryVk.h"
27#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
28#include "libANGLE/renderer/vulkan/RendererVk.h"
29#include "libANGLE/renderer/vulkan/SamplerVk.h"
30#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040031#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040032#include "libANGLE/renderer/vulkan/TextureVk.h"
33#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
34#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050035#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040036
37namespace rx
38{
39
Jamie Madilld03a8492017-10-03 15:46:06 -040040namespace
41{
42
43VkIndexType GetVkIndexType(GLenum glIndexType)
44{
45 switch (glIndexType)
46 {
47 case GL_UNSIGNED_SHORT:
48 return VK_INDEX_TYPE_UINT16;
49 case GL_UNSIGNED_INT:
50 return VK_INDEX_TYPE_UINT32;
51 default:
52 UNREACHABLE();
53 return VK_INDEX_TYPE_MAX_ENUM;
54 }
55}
56
Jamie Madill76e471e2017-10-21 09:56:01 -040057enum DescriptorPoolIndex : uint8_t
58{
59 UniformBufferPool = 0,
60 TexturePool = 1,
61};
62
Jamie Madilld03a8492017-10-03 15:46:06 -040063} // anonymous namespace
64
Jamie Madillacccc6c2016-05-03 17:22:10 -040065ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050066 : ContextImpl(state),
67 mRenderer(renderer),
68 mCurrentDrawMode(GL_NONE),
69 mVertexArrayDirty(false),
70 mTexturesDirty(false)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040071{
72}
73
74ContextVk::~ContextVk()
75{
Jamie Madill72106562017-03-24 14:18:50 -040076 invalidateCurrentPipeline();
Jamie Madill9e54b5a2016-05-25 12:57:39 -040077}
78
Jamie Madill76e471e2017-10-21 09:56:01 -040079void ContextVk::onDestroy(const gl::Context *context)
80{
81 VkDevice device = mRenderer->getDevice();
82
83 mDescriptorPool.destroy(device);
84}
85
Jamie Madill9e54b5a2016-05-25 12:57:39 -040086gl::Error ContextVk::initialize()
87{
Jamie Madill76e471e2017-10-21 09:56:01 -040088 VkDevice device = mRenderer->getDevice();
89
90 VkDescriptorPoolSize poolSizes[2];
91 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
92 poolSizes[UniformBufferPool].descriptorCount = 1024;
93 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
94 poolSizes[TexturePool].descriptorCount = 1024;
95
96 VkDescriptorPoolCreateInfo descriptorPoolInfo;
97 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
98 descriptorPoolInfo.pNext = nullptr;
99 descriptorPoolInfo.flags = 0;
100
101 // TODO(jmadill): Pick non-arbitrary max.
102 descriptorPoolInfo.maxSets = 2048;
103
104 // Reserve pools for uniform blocks and textures.
105 descriptorPoolInfo.poolSizeCount = 2;
106 descriptorPoolInfo.pPoolSizes = poolSizes;
107
108 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
109
Jamie Madillf2f6d372018-01-10 21:37:23 -0500110 mPipelineDesc.reset(new vk::PipelineDesc());
111 mPipelineDesc->initDefaults();
112
Jamie Madille09bd5d2016-11-29 16:20:35 -0500113 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400114}
115
Jamie Madillafa02a22017-11-23 12:57:38 -0500116gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400117{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500118 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400119 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500120 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400121}
122
Jamie Madillafa02a22017-11-23 12:57:38 -0500123gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400124{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500125 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400126}
127
Jamie Madill4928b7c2017-06-20 12:57:39 -0400128gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400129{
Jamie Madill72106562017-03-24 14:18:50 -0400130 ASSERT(!mCurrentPipeline.valid());
131
Jamie Madillf2f6d372018-01-10 21:37:23 -0500132 const gl::State &state = mState.getState();
133 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
134 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
135 ProgramVk *programVk = vk::GetImpl(state.getProgram());
136
137 // Ensure the topology of the pipeline description is updated.
138 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500139
Jamie Madillebf72992017-10-13 14:09:45 -0400140 // Ensure the attribs and bindings are updated.
Jamie Madillf2f6d372018-01-10 21:37:23 -0500141 vertexArrayVk->updateVertexDescriptions(context, mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500142
Jamie Madillf2f6d372018-01-10 21:37:23 -0500143 // Ensure that the RenderPass description is updated.
144 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500145
146 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Jamie Madillf2f6d372018-01-10 21:37:23 -0500147 ANGLE_TRY(mPipelineDesc->initializePipeline(mRenderer, programVk, &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500148
Jamie Madill72106562017-03-24 14:18:50 -0400149 return gl::NoError();
150}
151
Jamie Madill49ac74b2017-12-21 14:42:33 -0500152gl::Error ContextVk::setupDraw(const gl::Context *context,
153 GLenum mode,
154 DrawType drawType,
155 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400156{
157 if (mode != mCurrentDrawMode)
158 {
159 invalidateCurrentPipeline();
160 mCurrentDrawMode = mode;
161 }
162
163 if (!mCurrentPipeline.valid())
164 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400165 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400166 ASSERT(mCurrentPipeline.valid());
167 }
168
Jamie Madill72106562017-03-24 14:18:50 -0400169 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500170 const gl::Program *programGL = state.getProgram();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400171 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500172 const gl::VertexArray *vao = state.getVertexArray();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400173 VertexArrayVk *vkVAO = vk::GetImpl(vao);
Jamie Madill72106562017-03-24 14:18:50 -0400174 const auto *drawFBO = state.getDrawFramebuffer();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400175 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Jamie Madill72106562017-03-24 14:18:50 -0400176 Serial queueSerial = mRenderer->getCurrentQueueSerial();
Jamie Madillbd159f02017-10-09 19:39:06 -0400177 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400178
Jamie Madillbd159f02017-10-09 19:39:06 -0400179 // Process vertex attributes. Assume zero offsets for now.
180 // TODO(jmadill): Offset handling.
Jamie Madillda854a22017-11-30 17:24:21 -0500181 const auto &vertexHandles = vkVAO->getCurrentArrayBufferHandles();
182 angle::MemoryBuffer *zeroBuf = nullptr;
Jamie Madillbd159f02017-10-09 19:39:06 -0400183 ANGLE_TRY(context->getZeroFilledBuffer(maxAttrib * sizeof(VkDeviceSize), &zeroBuf));
Jamie Madill72106562017-03-24 14:18:50 -0400184
Jamie Madill49ac74b2017-12-21 14:42:33 -0500185 // TODO(jmadill): Need to link up the TextureVk to the Secondary CB.
186 vk::CommandBufferNode *renderNode = nullptr;
187 ANGLE_TRY(vkFBO->getRenderNode(context, &renderNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500188
Jamie Madill49ac74b2017-12-21 14:42:33 -0500189 if (!renderNode->getInsideRenderPassCommands()->valid())
190 {
191 mVertexArrayDirty = true;
192 mTexturesDirty = true;
193 ANGLE_TRY(renderNode->startRenderPassRecording(mRenderer, commandBuffer));
194 }
195 else
196 {
197 *commandBuffer = renderNode->getInsideRenderPassCommands();
198 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400199
Jamie Madill49ac74b2017-12-21 14:42:33 -0500200 // Ensure any writes to the VAO buffers are flushed before we read from them.
201 if (mVertexArrayDirty)
202 {
203 mVertexArrayDirty = false;
204 vkVAO->updateDrawDependencies(renderNode, programGL->getActiveAttribLocationsMask(),
205 queueSerial, drawType);
206 }
207
208 // Ensure any writes to the textures are flushed before we read from them.
209 if (mTexturesDirty)
210 {
211 mTexturesDirty = false;
212 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
213 const auto &completeTextures = state.getCompleteTextureCache();
214 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
215 {
216 ASSERT(!samplerBinding.unreferenced);
217
218 // TODO(jmadill): Sampler arrays
219 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
220
221 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
222 const gl::Texture *texture = completeTextures[textureUnit];
223
224 // TODO(jmadill): Incomplete textures handling.
225 ASSERT(texture);
226
227 TextureVk *textureVk = vk::GetImpl(texture);
228 textureVk->updateDependencies(renderNode, mRenderer->getCurrentQueueSerial());
229 }
230 }
231
232 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline);
233 (*commandBuffer)
234 ->bindVertexBuffers(0, maxAttrib, vertexHandles.data(),
235 reinterpret_cast<const VkDeviceSize *>(zeroBuf->data()));
236
237 // Update the queue serial for the pipeline object.
Jamie Madillabd31352017-09-19 00:24:58 -0400238 // TODO(jmadill): the queue serial should be bound to the pipeline.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500239 updateQueueSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500240
Jamie Madill76e471e2017-10-21 09:56:01 -0400241 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500242 ANGLE_TRY(programVk->updateUniforms(this));
243 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400244
245 // Bind the graphics descriptor sets.
246 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400247 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500248 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
249 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400250 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500251 ASSERT(!descriptorSets.empty());
252 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500253 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500254 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
255 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400256 }
257
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500258 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400259}
260
Jamie Madilld03a8492017-10-03 15:46:06 -0400261gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
262{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500263 vk::CommandBuffer *commandBuffer = nullptr;
264 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400265 commandBuffer->draw(count, 1, first, 0);
266 return gl::NoError();
267}
268
Jamie Madillc564c072017-06-01 12:45:42 -0400269gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
270 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400271 GLint first,
272 GLsizei count,
273 GLsizei instanceCount)
274{
275 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500276 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400277}
278
Jamie Madillc564c072017-06-01 12:45:42 -0400279gl::Error ContextVk::drawElements(const gl::Context *context,
280 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400281 GLsizei count,
282 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800283 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400284{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500285 vk::CommandBuffer *commandBuffer;
286 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400287
288 if (indices)
289 {
290 // TODO(jmadill): Buffer offsets and immediate data.
291 UNIMPLEMENTED();
292 return gl::InternalError() << "Only zero-offset index buffers are currently implemented.";
293 }
294
295 if (type == GL_UNSIGNED_BYTE)
296 {
297 // TODO(jmadill): Index translation.
298 UNIMPLEMENTED();
299 return gl::InternalError() << "Unsigned byte translation is not yet implemented.";
300 }
301
Jamie Madilld03a8492017-10-03 15:46:06 -0400302 const gl::Buffer *elementArrayBuffer =
303 mState.getState().getVertexArray()->getElementArrayBuffer().get();
304 ASSERT(elementArrayBuffer);
305
Jamie Madille1f3ad42017-10-28 23:00:42 -0400306 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
Jamie Madilld03a8492017-10-03 15:46:06 -0400307
308 commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type));
309 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
310
311 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400312}
313
Jamie Madillc564c072017-06-01 12:45:42 -0400314gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
315 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400316 GLsizei count,
317 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400318 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800319 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400320{
321 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500322 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400323}
324
Jamie Madillc564c072017-06-01 12:45:42 -0400325gl::Error ContextVk::drawRangeElements(const gl::Context *context,
326 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400327 GLuint start,
328 GLuint end,
329 GLsizei count,
330 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800331 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400332{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500333 return gl::NoError();
334}
335
336VkDevice ContextVk::getDevice() const
337{
338 return mRenderer->getDevice();
339}
340
Jamie Madillc564c072017-06-01 12:45:42 -0400341gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
342 GLenum mode,
343 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800344{
345 UNIMPLEMENTED();
346 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
347}
348
Jamie Madillc564c072017-06-01 12:45:42 -0400349gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
350 GLenum mode,
351 GLenum type,
352 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800353{
354 UNIMPLEMENTED();
355 return gl::InternalError()
356 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
357}
358
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400359GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400360{
361 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400362 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400363}
364
365std::string ContextVk::getVendorString() const
366{
367 UNIMPLEMENTED();
368 return std::string();
369}
370
371std::string ContextVk::getRendererDescription() const
372{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500373 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400374}
375
376void ContextVk::insertEventMarker(GLsizei length, const char *marker)
377{
378 UNIMPLEMENTED();
379}
380
381void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
382{
383 UNIMPLEMENTED();
384}
385
386void ContextVk::popGroupMarker()
387{
388 UNIMPLEMENTED();
389}
390
Geoff Lang5d5253a2017-11-22 14:51:12 -0500391void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
392{
393 UNIMPLEMENTED();
394}
395
396void ContextVk::popDebugGroup()
397{
398 UNIMPLEMENTED();
399}
400
Jamie Madillfe548342017-06-19 11:13:24 -0400401void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400402{
Jamie Madill72106562017-03-24 14:18:50 -0400403 if (dirtyBits.any())
404 {
405 invalidateCurrentPipeline();
406 }
Jamie Madillebf72992017-10-13 14:09:45 -0400407
408 const auto &glState = context->getGLState();
409
410 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400411 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400412
413 for (auto dirtyBit : dirtyBits)
414 {
415 switch (dirtyBit)
416 {
417 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
418 WARN() << "DIRTY_BIT_SCISSOR_TEST_ENABLED unimplemented";
419 break;
420 case gl::State::DIRTY_BIT_SCISSOR:
421 WARN() << "DIRTY_BIT_SCISSOR unimplemented";
422 break;
423 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500424 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
425 glState.getFarPlane());
Jamie Madillebf72992017-10-13 14:09:45 -0400426 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400427 case gl::State::DIRTY_BIT_DEPTH_RANGE:
428 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
429 break;
430 case gl::State::DIRTY_BIT_BLEND_ENABLED:
431 WARN() << "DIRTY_BIT_BLEND_ENABLED unimplemented";
432 break;
433 case gl::State::DIRTY_BIT_BLEND_COLOR:
434 WARN() << "DIRTY_BIT_BLEND_COLOR unimplemented";
435 break;
436 case gl::State::DIRTY_BIT_BLEND_FUNCS:
437 WARN() << "DIRTY_BIT_BLEND_FUNCS unimplemented";
438 break;
439 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
440 WARN() << "DIRTY_BIT_BLEND_EQUATIONS unimplemented";
441 break;
442 case gl::State::DIRTY_BIT_COLOR_MASK:
443 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
444 break;
445 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
446 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
447 break;
448 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
449 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
450 break;
451 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
452 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
453 break;
454 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
455 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
456 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400457 case gl::State::DIRTY_BIT_SAMPLE_MASK:
458 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400459 break;
460 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
461 WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented";
462 break;
463 case gl::State::DIRTY_BIT_DEPTH_FUNC:
464 WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented";
465 break;
466 case gl::State::DIRTY_BIT_DEPTH_MASK:
467 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
468 break;
469 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
470 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
471 break;
472 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
473 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
474 break;
475 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
476 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
477 break;
478 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
479 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
480 break;
481 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
482 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
483 break;
484 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
485 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
486 break;
487 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
488 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
489 break;
490 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
491 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500492 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400493 break;
494 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500495 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400496 break;
497 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
498 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
499 break;
500 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
501 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
502 break;
503 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
504 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
505 break;
506 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500507 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400508 break;
509 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
510 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
511 break;
512 case gl::State::DIRTY_BIT_CLEAR_COLOR:
513 WARN() << "DIRTY_BIT_CLEAR_COLOR unimplemented";
514 break;
515 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
516 WARN() << "DIRTY_BIT_CLEAR_DEPTH unimplemented";
517 break;
518 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
519 WARN() << "DIRTY_BIT_CLEAR_STENCIL unimplemented";
520 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400521 case gl::State::DIRTY_BIT_UNPACK_STATE:
522 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400523 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500524 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
525 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
526 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400527 case gl::State::DIRTY_BIT_PACK_STATE:
528 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400529 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500530 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
531 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
532 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400533 case gl::State::DIRTY_BIT_DITHER_ENABLED:
534 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
535 break;
536 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
537 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
538 break;
539 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
540 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
541 break;
542 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
543 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
544 break;
545 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
546 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
547 break;
548 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
549 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
550 break;
551 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500552 {
553 VertexArrayVk *vertexArrayVk = vk::GetImpl(glState.getVertexArray());
554 vertexArrayVk->invalidateVertexDescriptions();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500555 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400556 break;
Jamie Madillf2f6d372018-01-10 21:37:23 -0500557 }
Jamie Madillebf72992017-10-13 14:09:45 -0400558 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
559 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
560 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800561 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
562 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
563 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400564 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
565 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
566 break;
567 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
568 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500569 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
570 mPipelineDesc->updateShaders(programVk);
Jamie Madillebf72992017-10-13 14:09:45 -0400571
572 // Also invalidate the vertex descriptions cache in the Vertex Array.
Jamie Madillf2f6d372018-01-10 21:37:23 -0500573 VertexArrayVk *vertexArrayVk = vk::GetImpl(glState.getVertexArray());
574 vertexArrayVk->invalidateVertexDescriptions();
Jamie Madill5547b382017-10-23 18:16:01 -0400575
576 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400577 break;
578 }
579 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400580 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400581 break;
582 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400583 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400584 break;
Geoff Langded79232017-11-28 15:21:11 -0500585 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
586 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
587 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800588 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
589 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
590 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500591 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
592 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
593 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400594 case gl::State::DIRTY_BIT_MULTISAMPLING:
595 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
596 break;
597 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
598 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
599 break;
600 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
601 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
602 break;
603 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
604 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
605 break;
606 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
607 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
608 break;
609 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
610 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
611 break;
612 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
613 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
614 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400615 case gl::State::DIRTY_BIT_CURRENT_VALUES:
616 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
617 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400618 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400619 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400620 break;
621 }
622 }
Jamie Madill5547b382017-10-23 18:16:01 -0400623
624 if (dirtyTextures)
625 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400626 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400627 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500628 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400629 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400630}
631
632GLint ContextVk::getGPUDisjoint()
633{
634 UNIMPLEMENTED();
635 return GLint();
636}
637
638GLint64 ContextVk::getTimestamp()
639{
640 UNIMPLEMENTED();
641 return GLint64();
642}
643
Jamie Madill4928b7c2017-06-20 12:57:39 -0400644void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400645{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400646}
647
648const gl::Caps &ContextVk::getNativeCaps() const
649{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400650 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400651}
652
653const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
654{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400655 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400656}
657
658const gl::Extensions &ContextVk::getNativeExtensions() const
659{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400660 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400661}
662
663const gl::Limitations &ContextVk::getNativeLimitations() const
664{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400665 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400666}
667
668CompilerImpl *ContextVk::createCompiler()
669{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400670 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400671}
672
Jamie Madillacccc6c2016-05-03 17:22:10 -0400673ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400674{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400675 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400676}
677
Jamie Madillacccc6c2016-05-03 17:22:10 -0400678ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400679{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400680 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400681}
682
Jamie Madillacccc6c2016-05-03 17:22:10 -0400683FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400684{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500685 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400686}
687
688TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
689{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400690 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400691}
692
693RenderbufferImpl *ContextVk::createRenderbuffer()
694{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400695 return new RenderbufferVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400696}
697
Jamie Madill8f775602016-11-03 16:45:34 -0400698BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400699{
Jamie Madill8f775602016-11-03 16:45:34 -0400700 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400701}
702
Jamie Madillacccc6c2016-05-03 17:22:10 -0400703VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400704{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400705 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400706}
707
708QueryImpl *ContextVk::createQuery(GLenum type)
709{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400710 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400711}
712
713FenceNVImpl *ContextVk::createFenceNV()
714{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400715 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400716}
717
Jamie Madill70b5bb02017-08-28 13:32:37 -0400718SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400719{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400720 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400721}
722
Geoff Lang73bd2182016-07-15 13:01:24 -0400723TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400724{
Geoff Lang73bd2182016-07-15 13:01:24 -0400725 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400726}
727
Jamie Madill06ef36b2017-09-09 23:32:46 -0400728SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400729{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400730 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400731}
732
Yunchao Hea336b902017-08-02 16:05:21 +0800733ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
734{
735 return new ProgramPipelineVk(state);
736}
737
Sami Väisänene45e53b2016-05-25 10:36:04 +0300738std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
739{
740 return std::vector<PathImpl *>();
741}
742
Jamie Madill72106562017-03-24 14:18:50 -0400743// TODO(jmadill): Use pipeline cache.
744void ContextVk::invalidateCurrentPipeline()
745{
Jamie Madille88ec8e2017-10-31 17:18:14 -0400746 mRenderer->releaseResource(*this, &mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400747}
748
Jamie Madill49ac74b2017-12-21 14:42:33 -0500749void ContextVk::onVertexArrayChange()
750{
751 // TODO(jmadill): Does not handle dependent state changes.
752 mVertexArrayDirty = true;
753 invalidateCurrentPipeline();
754}
755
Jamie Madillfe548342017-06-19 11:13:24 -0400756gl::Error ContextVk::dispatchCompute(const gl::Context *context,
757 GLuint numGroupsX,
758 GLuint numGroupsY,
759 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800760{
761 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500762 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800763}
764
Qin Jiajia62fcf622017-11-30 16:16:12 +0800765gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
766{
767 UNIMPLEMENTED();
768 return gl::InternalError();
769}
770
Xinghua Cao89c422a2017-11-29 18:24:20 +0800771gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
772{
773 UNIMPLEMENTED();
774 return gl::InternalError();
775}
776
777gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
778{
779 UNIMPLEMENTED();
780 return gl::InternalError();
781}
782
Jamie Madill76e471e2017-10-21 09:56:01 -0400783vk::DescriptorPool *ContextVk::getDescriptorPool()
784{
785 return &mDescriptorPool;
786}
787
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400788} // namespace rx