blob: cf6a62f38fca0dda1b3a7335ca761fb1e93973f8 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040014#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050015#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040016#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill49ac74b2017-12-21 14:42:33 -050017#include "libANGLE/renderer/vulkan/CommandBufferNode.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/CompilerVk.h"
19#include "libANGLE/renderer/vulkan/ContextVk.h"
20#include "libANGLE/renderer/vulkan/DeviceVk.h"
21#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
23#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080024#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040025#include "libANGLE/renderer/vulkan/ProgramVk.h"
26#include "libANGLE/renderer/vulkan/QueryVk.h"
27#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
28#include "libANGLE/renderer/vulkan/RendererVk.h"
29#include "libANGLE/renderer/vulkan/SamplerVk.h"
30#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040031#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040032#include "libANGLE/renderer/vulkan/TextureVk.h"
33#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
34#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050035#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040036
37namespace rx
38{
39
Jamie Madilld03a8492017-10-03 15:46:06 -040040namespace
41{
42
43VkIndexType GetVkIndexType(GLenum glIndexType)
44{
45 switch (glIndexType)
46 {
47 case GL_UNSIGNED_SHORT:
48 return VK_INDEX_TYPE_UINT16;
49 case GL_UNSIGNED_INT:
50 return VK_INDEX_TYPE_UINT32;
51 default:
52 UNREACHABLE();
53 return VK_INDEX_TYPE_MAX_ENUM;
54 }
55}
56
Jamie Madill76e471e2017-10-21 09:56:01 -040057enum DescriptorPoolIndex : uint8_t
58{
59 UniformBufferPool = 0,
60 TexturePool = 1,
61};
62
Jamie Madilld03a8492017-10-03 15:46:06 -040063} // anonymous namespace
64
Jamie Madillacccc6c2016-05-03 17:22:10 -040065ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050066 : ContextImpl(state),
67 mRenderer(renderer),
68 mCurrentDrawMode(GL_NONE),
69 mVertexArrayDirty(false),
70 mTexturesDirty(false)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040071{
72}
73
74ContextVk::~ContextVk()
75{
76}
77
Jamie Madill76e471e2017-10-21 09:56:01 -040078void ContextVk::onDestroy(const gl::Context *context)
79{
80 VkDevice device = mRenderer->getDevice();
81
82 mDescriptorPool.destroy(device);
83}
84
Jamie Madill9e54b5a2016-05-25 12:57:39 -040085gl::Error ContextVk::initialize()
86{
Jamie Madill76e471e2017-10-21 09:56:01 -040087 VkDevice device = mRenderer->getDevice();
88
89 VkDescriptorPoolSize poolSizes[2];
90 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
91 poolSizes[UniformBufferPool].descriptorCount = 1024;
92 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
93 poolSizes[TexturePool].descriptorCount = 1024;
94
95 VkDescriptorPoolCreateInfo descriptorPoolInfo;
96 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
97 descriptorPoolInfo.pNext = nullptr;
98 descriptorPoolInfo.flags = 0;
99
100 // TODO(jmadill): Pick non-arbitrary max.
101 descriptorPoolInfo.maxSets = 2048;
102
103 // Reserve pools for uniform blocks and textures.
104 descriptorPoolInfo.poolSizeCount = 2;
105 descriptorPoolInfo.pPoolSizes = poolSizes;
106
107 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
108
Jamie Madillf2f6d372018-01-10 21:37:23 -0500109 mPipelineDesc.reset(new vk::PipelineDesc());
110 mPipelineDesc->initDefaults();
111
Jamie Madille09bd5d2016-11-29 16:20:35 -0500112 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400113}
114
Jamie Madillafa02a22017-11-23 12:57:38 -0500115gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400116{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500117 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400118 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500119 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400120}
121
Jamie Madillafa02a22017-11-23 12:57:38 -0500122gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400123{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500124 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400125}
126
Jamie Madill4928b7c2017-06-20 12:57:39 -0400127gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400128{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500129 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400130
Jamie Madillf2f6d372018-01-10 21:37:23 -0500131 const gl::State &state = mState.getState();
132 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
133 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
134 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500135 const gl::AttributesMask activeAttribLocationsMask =
136 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500137
138 // Ensure the topology of the pipeline description is updated.
139 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500140
Jamie Madill112a3a82018-01-23 13:04:06 -0500141 // Copy over the latest attrib and binding descriptions.
142 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500143
Jamie Madillf2f6d372018-01-10 21:37:23 -0500144 // Ensure that the RenderPass description is updated.
145 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500146
147 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500148 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
149 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500150
Jamie Madill72106562017-03-24 14:18:50 -0400151 return gl::NoError();
152}
153
Jamie Madill49ac74b2017-12-21 14:42:33 -0500154gl::Error ContextVk::setupDraw(const gl::Context *context,
155 GLenum mode,
156 DrawType drawType,
157 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400158{
159 if (mode != mCurrentDrawMode)
160 {
161 invalidateCurrentPipeline();
162 mCurrentDrawMode = mode;
163 }
164
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500165 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400166 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400167 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400168 }
169
Jamie Madill72106562017-03-24 14:18:50 -0400170 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500171 const gl::Program *programGL = state.getProgram();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400172 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500173 const gl::VertexArray *vao = state.getVertexArray();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400174 VertexArrayVk *vkVAO = vk::GetImpl(vao);
Jamie Madill72106562017-03-24 14:18:50 -0400175 const auto *drawFBO = state.getDrawFramebuffer();
Jamie Madille1f3ad42017-10-28 23:00:42 -0400176 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Jamie Madill72106562017-03-24 14:18:50 -0400177 Serial queueSerial = mRenderer->getCurrentQueueSerial();
Jamie Madillbd159f02017-10-09 19:39:06 -0400178 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400179
Jamie Madillbd159f02017-10-09 19:39:06 -0400180 // Process vertex attributes. Assume zero offsets for now.
181 // TODO(jmadill): Offset handling.
Jamie Madillda854a22017-11-30 17:24:21 -0500182 const auto &vertexHandles = vkVAO->getCurrentArrayBufferHandles();
183 angle::MemoryBuffer *zeroBuf = nullptr;
Jamie Madillbd159f02017-10-09 19:39:06 -0400184 ANGLE_TRY(context->getZeroFilledBuffer(maxAttrib * sizeof(VkDeviceSize), &zeroBuf));
Jamie Madill72106562017-03-24 14:18:50 -0400185
Jamie Madill49ac74b2017-12-21 14:42:33 -0500186 // TODO(jmadill): Need to link up the TextureVk to the Secondary CB.
187 vk::CommandBufferNode *renderNode = nullptr;
188 ANGLE_TRY(vkFBO->getRenderNode(context, &renderNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500189
Jamie Madill49ac74b2017-12-21 14:42:33 -0500190 if (!renderNode->getInsideRenderPassCommands()->valid())
191 {
192 mVertexArrayDirty = true;
193 mTexturesDirty = true;
194 ANGLE_TRY(renderNode->startRenderPassRecording(mRenderer, commandBuffer));
195 }
196 else
197 {
198 *commandBuffer = renderNode->getInsideRenderPassCommands();
199 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400200
Jamie Madill49ac74b2017-12-21 14:42:33 -0500201 // Ensure any writes to the VAO buffers are flushed before we read from them.
202 if (mVertexArrayDirty)
203 {
204 mVertexArrayDirty = false;
205 vkVAO->updateDrawDependencies(renderNode, programGL->getActiveAttribLocationsMask(),
206 queueSerial, drawType);
207 }
208
209 // Ensure any writes to the textures are flushed before we read from them.
210 if (mTexturesDirty)
211 {
212 mTexturesDirty = false;
213 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
214 const auto &completeTextures = state.getCompleteTextureCache();
215 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
216 {
217 ASSERT(!samplerBinding.unreferenced);
218
219 // TODO(jmadill): Sampler arrays
220 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
221
222 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
223 const gl::Texture *texture = completeTextures[textureUnit];
224
225 // TODO(jmadill): Incomplete textures handling.
226 ASSERT(texture);
227
228 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madillefb5a5c2018-01-29 15:56:59 -0500229 textureVk->setReadNode(renderNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500230 }
231 }
232
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500233 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500234 (*commandBuffer)
235 ->bindVertexBuffers(0, maxAttrib, vertexHandles.data(),
236 reinterpret_cast<const VkDeviceSize *>(zeroBuf->data()));
237
238 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500239 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
240 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500241
Jamie Madill76e471e2017-10-21 09:56:01 -0400242 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500243 ANGLE_TRY(programVk->updateUniforms(this));
244 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400245
246 // Bind the graphics descriptor sets.
247 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400248 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500249 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
250 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400251 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500252 ASSERT(!descriptorSets.empty());
253 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500254 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500255 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
256 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400257 }
258
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500259 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400260}
261
Jamie Madilld03a8492017-10-03 15:46:06 -0400262gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
263{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500264 vk::CommandBuffer *commandBuffer = nullptr;
265 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400266 commandBuffer->draw(count, 1, first, 0);
267 return gl::NoError();
268}
269
Jamie Madillc564c072017-06-01 12:45:42 -0400270gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
271 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400272 GLint first,
273 GLsizei count,
274 GLsizei instanceCount)
275{
276 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500277 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400278}
279
Jamie Madillc564c072017-06-01 12:45:42 -0400280gl::Error ContextVk::drawElements(const gl::Context *context,
281 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400282 GLsizei count,
283 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800284 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400285{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500286 vk::CommandBuffer *commandBuffer;
287 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, &commandBuffer));
Jamie Madilld03a8492017-10-03 15:46:06 -0400288
289 if (indices)
290 {
291 // TODO(jmadill): Buffer offsets and immediate data.
292 UNIMPLEMENTED();
293 return gl::InternalError() << "Only zero-offset index buffers are currently implemented.";
294 }
295
296 if (type == GL_UNSIGNED_BYTE)
297 {
298 // TODO(jmadill): Index translation.
299 UNIMPLEMENTED();
300 return gl::InternalError() << "Unsigned byte translation is not yet implemented.";
301 }
302
Jamie Madilld03a8492017-10-03 15:46:06 -0400303 const gl::Buffer *elementArrayBuffer =
304 mState.getState().getVertexArray()->getElementArrayBuffer().get();
305 ASSERT(elementArrayBuffer);
306
Jamie Madille1f3ad42017-10-28 23:00:42 -0400307 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
Jamie Madilld03a8492017-10-03 15:46:06 -0400308
309 commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type));
310 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
311
312 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400313}
314
Jamie Madillc564c072017-06-01 12:45:42 -0400315gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
316 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400317 GLsizei count,
318 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400319 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800320 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400321{
322 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500323 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400324}
325
Jamie Madillc564c072017-06-01 12:45:42 -0400326gl::Error ContextVk::drawRangeElements(const gl::Context *context,
327 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400328 GLuint start,
329 GLuint end,
330 GLsizei count,
331 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800332 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400333{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500334 return gl::NoError();
335}
336
337VkDevice ContextVk::getDevice() const
338{
339 return mRenderer->getDevice();
340}
341
Jamie Madillc564c072017-06-01 12:45:42 -0400342gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
343 GLenum mode,
344 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800345{
346 UNIMPLEMENTED();
347 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
348}
349
Jamie Madillc564c072017-06-01 12:45:42 -0400350gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
351 GLenum mode,
352 GLenum type,
353 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800354{
355 UNIMPLEMENTED();
356 return gl::InternalError()
357 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
358}
359
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400360GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400361{
362 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400363 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400364}
365
366std::string ContextVk::getVendorString() const
367{
368 UNIMPLEMENTED();
369 return std::string();
370}
371
372std::string ContextVk::getRendererDescription() const
373{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500374 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400375}
376
377void ContextVk::insertEventMarker(GLsizei length, const char *marker)
378{
379 UNIMPLEMENTED();
380}
381
382void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
383{
384 UNIMPLEMENTED();
385}
386
387void ContextVk::popGroupMarker()
388{
389 UNIMPLEMENTED();
390}
391
Geoff Lang5d5253a2017-11-22 14:51:12 -0500392void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
393{
394 UNIMPLEMENTED();
395}
396
397void ContextVk::popDebugGroup()
398{
399 UNIMPLEMENTED();
400}
401
Jamie Madillfe548342017-06-19 11:13:24 -0400402void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400403{
Jamie Madill72106562017-03-24 14:18:50 -0400404 if (dirtyBits.any())
405 {
406 invalidateCurrentPipeline();
407 }
Jamie Madillebf72992017-10-13 14:09:45 -0400408
409 const auto &glState = context->getGLState();
410
411 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400412 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400413
414 for (auto dirtyBit : dirtyBits)
415 {
416 switch (dirtyBit)
417 {
418 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500419 if (glState.isScissorTestEnabled())
420 {
421 mPipelineDesc->updateScissor(glState.getScissor());
422 }
423 else
424 {
425 mPipelineDesc->updateScissor(glState.getViewport());
426 }
Jamie Madillebf72992017-10-13 14:09:45 -0400427 break;
428 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500429 // Only modify the scissor region if the test is enabled, otherwise we want to keep
430 // the viewport size as the scissor region.
431 if (glState.isScissorTestEnabled())
432 {
433 mPipelineDesc->updateScissor(glState.getScissor());
434 }
Jamie Madillebf72992017-10-13 14:09:45 -0400435 break;
436 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500437 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
438 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500439
440 // If the scissor test isn't enabled, we have to also update the scissor to
441 // be equal to the viewport to make sure we keep rendering everything in the
442 // viewport.
443 if (!glState.isScissorTestEnabled())
444 {
445 mPipelineDesc->updateScissor(glState.getViewport());
446 }
Jamie Madillebf72992017-10-13 14:09:45 -0400447 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400448 case gl::State::DIRTY_BIT_DEPTH_RANGE:
449 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
450 break;
451 case gl::State::DIRTY_BIT_BLEND_ENABLED:
452 WARN() << "DIRTY_BIT_BLEND_ENABLED unimplemented";
453 break;
454 case gl::State::DIRTY_BIT_BLEND_COLOR:
455 WARN() << "DIRTY_BIT_BLEND_COLOR unimplemented";
456 break;
457 case gl::State::DIRTY_BIT_BLEND_FUNCS:
458 WARN() << "DIRTY_BIT_BLEND_FUNCS unimplemented";
459 break;
460 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
461 WARN() << "DIRTY_BIT_BLEND_EQUATIONS unimplemented";
462 break;
463 case gl::State::DIRTY_BIT_COLOR_MASK:
464 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
465 break;
466 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
467 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
468 break;
469 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
470 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
471 break;
472 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
473 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
474 break;
475 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
476 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
477 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400478 case gl::State::DIRTY_BIT_SAMPLE_MASK:
479 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400480 break;
481 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
482 WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented";
483 break;
484 case gl::State::DIRTY_BIT_DEPTH_FUNC:
485 WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented";
486 break;
487 case gl::State::DIRTY_BIT_DEPTH_MASK:
488 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
489 break;
490 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
491 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
492 break;
493 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
494 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
495 break;
496 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
497 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
498 break;
499 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
500 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
501 break;
502 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
503 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
504 break;
505 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
506 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
507 break;
508 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
509 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
510 break;
511 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
512 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500513 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400514 break;
515 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500516 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400517 break;
518 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
519 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
520 break;
521 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
522 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
523 break;
524 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
525 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
526 break;
527 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500528 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400529 break;
530 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
531 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
532 break;
533 case gl::State::DIRTY_BIT_CLEAR_COLOR:
534 WARN() << "DIRTY_BIT_CLEAR_COLOR unimplemented";
535 break;
536 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
537 WARN() << "DIRTY_BIT_CLEAR_DEPTH unimplemented";
538 break;
539 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
540 WARN() << "DIRTY_BIT_CLEAR_STENCIL unimplemented";
541 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400542 case gl::State::DIRTY_BIT_UNPACK_STATE:
543 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400544 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500545 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
546 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
547 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400548 case gl::State::DIRTY_BIT_PACK_STATE:
549 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400550 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500551 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
552 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
553 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400554 case gl::State::DIRTY_BIT_DITHER_ENABLED:
555 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
556 break;
557 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
558 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
559 break;
560 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
561 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
562 break;
563 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
564 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
565 break;
566 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
567 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
568 break;
569 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
570 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
571 break;
572 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500573 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400574 break;
575 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
576 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
577 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800578 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
579 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
580 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400581 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
582 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
583 break;
584 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
585 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500586 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
587 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400588 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400589 break;
590 }
591 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400592 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400593 break;
594 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400595 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400596 break;
Geoff Langded79232017-11-28 15:21:11 -0500597 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
598 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
599 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800600 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
601 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
602 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500603 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
604 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
605 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400606 case gl::State::DIRTY_BIT_MULTISAMPLING:
607 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
608 break;
609 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
610 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
611 break;
612 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
613 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
614 break;
615 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
616 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
617 break;
618 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
619 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
620 break;
621 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
622 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
623 break;
624 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
625 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
626 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400627 case gl::State::DIRTY_BIT_CURRENT_VALUES:
628 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
629 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400630 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400631 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400632 break;
633 }
634 }
Jamie Madill5547b382017-10-23 18:16:01 -0400635
636 if (dirtyTextures)
637 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400638 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400639 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500640 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400641 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400642}
643
644GLint ContextVk::getGPUDisjoint()
645{
646 UNIMPLEMENTED();
647 return GLint();
648}
649
650GLint64 ContextVk::getTimestamp()
651{
652 UNIMPLEMENTED();
653 return GLint64();
654}
655
Jamie Madill4928b7c2017-06-20 12:57:39 -0400656void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400657{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400658}
659
660const gl::Caps &ContextVk::getNativeCaps() const
661{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400662 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400663}
664
665const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
666{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400667 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400668}
669
670const gl::Extensions &ContextVk::getNativeExtensions() const
671{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400672 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400673}
674
675const gl::Limitations &ContextVk::getNativeLimitations() const
676{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400677 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400678}
679
680CompilerImpl *ContextVk::createCompiler()
681{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400682 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400683}
684
Jamie Madillacccc6c2016-05-03 17:22:10 -0400685ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400686{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400687 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400688}
689
Jamie Madillacccc6c2016-05-03 17:22:10 -0400690ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400691{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400692 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400693}
694
Jamie Madillacccc6c2016-05-03 17:22:10 -0400695FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400696{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500697 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400698}
699
700TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
701{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400702 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400703}
704
705RenderbufferImpl *ContextVk::createRenderbuffer()
706{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400707 return new RenderbufferVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400708}
709
Jamie Madill8f775602016-11-03 16:45:34 -0400710BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400711{
Jamie Madill8f775602016-11-03 16:45:34 -0400712 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400713}
714
Jamie Madillacccc6c2016-05-03 17:22:10 -0400715VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400716{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400717 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400718}
719
720QueryImpl *ContextVk::createQuery(GLenum type)
721{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400722 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400723}
724
725FenceNVImpl *ContextVk::createFenceNV()
726{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400727 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400728}
729
Jamie Madill70b5bb02017-08-28 13:32:37 -0400730SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400731{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400732 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400733}
734
Geoff Lang73bd2182016-07-15 13:01:24 -0400735TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400736{
Geoff Lang73bd2182016-07-15 13:01:24 -0400737 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400738}
739
Jamie Madill06ef36b2017-09-09 23:32:46 -0400740SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400741{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400742 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400743}
744
Yunchao Hea336b902017-08-02 16:05:21 +0800745ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
746{
747 return new ProgramPipelineVk(state);
748}
749
Sami Väisänene45e53b2016-05-25 10:36:04 +0300750std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
751{
752 return std::vector<PathImpl *>();
753}
754
Jamie Madill72106562017-03-24 14:18:50 -0400755void ContextVk::invalidateCurrentPipeline()
756{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500757 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400758}
759
Jamie Madill49ac74b2017-12-21 14:42:33 -0500760void ContextVk::onVertexArrayChange()
761{
762 // TODO(jmadill): Does not handle dependent state changes.
763 mVertexArrayDirty = true;
764 invalidateCurrentPipeline();
765}
766
Jamie Madillfe548342017-06-19 11:13:24 -0400767gl::Error ContextVk::dispatchCompute(const gl::Context *context,
768 GLuint numGroupsX,
769 GLuint numGroupsY,
770 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800771{
772 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500773 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800774}
775
Qin Jiajia62fcf622017-11-30 16:16:12 +0800776gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
777{
778 UNIMPLEMENTED();
779 return gl::InternalError();
780}
781
Xinghua Cao89c422a2017-11-29 18:24:20 +0800782gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
783{
784 UNIMPLEMENTED();
785 return gl::InternalError();
786}
787
788gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
789{
790 UNIMPLEMENTED();
791 return gl::InternalError();
792}
793
Jamie Madill76e471e2017-10-21 09:56:01 -0400794vk::DescriptorPool *ContextVk::getDescriptorPool()
795{
796 return &mDescriptorPool;
797}
798
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400799} // namespace rx