blob: 1b998faf616a4aed0c47ade91c06d77e4eba9be7 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Frank Henigmana53d0e12018-02-13 00:06:06 -050014#include "common/utilities.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040015#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050016#include "libANGLE/Program.h"
Geoff Langcaa55cd2018-07-05 13:19:35 -040017#include "libANGLE/Surface.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050019#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040020#include "libANGLE/renderer/vulkan/CompilerVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040021#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080023#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040024#include "libANGLE/renderer/vulkan/ProgramVk.h"
25#include "libANGLE/renderer/vulkan/QueryVk.h"
26#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
27#include "libANGLE/renderer/vulkan/RendererVk.h"
28#include "libANGLE/renderer/vulkan/SamplerVk.h"
29#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040030#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040031#include "libANGLE/renderer/vulkan/TextureVk.h"
32#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
33#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040034
35namespace rx
36{
Luc Ferron14f48172018-04-11 08:43:28 -040037
38namespace
39{
Jamie Madill21061022018-07-12 23:56:30 -040040GLenum DefaultGLErrorCode(VkResult result)
41{
42 switch (result)
43 {
44 case VK_ERROR_OUT_OF_HOST_MEMORY:
45 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
46 case VK_ERROR_TOO_MANY_OBJECTS:
47 return GL_OUT_OF_MEMORY;
48 default:
49 return GL_INVALID_OPERATION;
50 }
51}
52
Luc Ferron14f48172018-04-11 08:43:28 -040053constexpr gl::Rectangle kMaxSizedScissor(0,
54 0,
55 std::numeric_limits<int>::max(),
56 std::numeric_limits<int>::max());
57
Jamie Madill9aef3672018-04-27 11:45:06 -040058constexpr VkColorComponentFlags kAllColorChannelsMask =
59 (VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT |
60 VK_COLOR_COMPONENT_A_BIT);
Jamie Madill5a4c9322018-07-16 11:01:58 -040061
62constexpr VkBufferUsageFlags kVertexBufferUsage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
63constexpr size_t kDefaultValueSize = sizeof(float) * 4;
64constexpr size_t kDefaultBufferSize = kDefaultValueSize * 16;
Luc Ferron14f48172018-04-11 08:43:28 -040065} // anonymous namespace
66
Jamie Madill5a4c9322018-07-16 11:01:58 -040067// std::array only uses aggregate init. Thus we make a helper macro to reduce on code duplication.
68#define INIT \
69 { \
70 kVertexBufferUsage, kDefaultBufferSize \
71 }
72
Jamie Madillacccc6c2016-05-03 17:22:10 -040073ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050074 : ContextImpl(state),
Jamie Madill21061022018-07-12 23:56:30 -040075 vk::Context(renderer),
Jamie Madill493f9572018-05-24 19:52:15 -040076 mCurrentDrawMode(gl::PrimitiveMode::InvalidEnum),
Frank Henigman17448952017-01-05 15:48:26 -050077 mTexturesDirty(false),
Jamie Madill9aef3672018-04-27 11:45:06 -040078 mVertexArrayBindingHasChanged(false),
Geoff Langcaa55cd2018-07-05 13:19:35 -040079 mClearColorMask(kAllColorChannelsMask),
Jamie Madill834a3a12018-07-09 13:32:39 -040080 mFlipYForCurrentSurface(false),
81 mDriverUniformsBuffer(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, sizeof(DriverUniforms) * 16),
Jamie Madill5a4c9322018-07-16 11:01:58 -040082 mDriverUniformsDescriptorSet(VK_NULL_HANDLE),
83 mDefaultAttribBuffers{{INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT,
84 INIT, INIT, INIT, INIT}}
Jamie Madill9e54b5a2016-05-25 12:57:39 -040085{
Jamie Madillf4d693c2018-02-14 16:38:16 -050086 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
87 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040088}
89
Jamie Madill84c662b2018-07-12 15:56:55 -040090ContextVk::~ContextVk() = default;
Jamie Madill9e54b5a2016-05-25 12:57:39 -040091
Jamie Madill76e471e2017-10-21 09:56:01 -040092void ContextVk::onDestroy(const gl::Context *context)
93{
Jamie Madill834a3a12018-07-09 13:32:39 -040094 mDriverUniformsSetLayout.reset();
Luc Ferron90968362018-05-04 08:47:22 -040095 mIncompleteTextures.onDestroy(context);
Jamie Madill834a3a12018-07-09 13:32:39 -040096 mDriverUniformsBuffer.destroy(getDevice());
Jamie Madilledeaa832018-06-22 09:18:41 -040097
98 for (vk::DynamicDescriptorPool &descriptorPool : mDynamicDescriptorPools)
99 {
100 descriptorPool.destroy(getDevice());
101 }
Jamie Madill5a4c9322018-07-16 11:01:58 -0400102
103 for (vk::DynamicBuffer &defaultBuffer : mDefaultAttribBuffers)
104 {
105 defaultBuffer.destroy(getDevice());
106 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400107}
108
Luc Ferron90968362018-05-04 08:47:22 -0400109gl::Error ContextVk::getIncompleteTexture(const gl::Context *context,
110 gl::TextureType type,
111 gl::Texture **textureOut)
112{
113 // At some point, we'll need to support multisample and we'll pass "this" instead of nullptr
114 // and implement the necessary interface.
115 return mIncompleteTextures.getIncompleteTexture(context, type, nullptr, textureOut);
116}
117
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400118gl::Error ContextVk::initialize()
119{
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400120 // Note that this may reserve more sets than strictly necessary for a particular layout.
Jamie Madille4a6d7a2018-07-09 13:32:37 -0400121 VkDescriptorPoolSize uniformPoolSize = {
122 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
123 GetUniformBufferDescriptorCount() * vk::kDefaultDescriptorPoolMaxSets};
Jamie Madill834a3a12018-07-09 13:32:39 -0400124
Jamie Madill21061022018-07-12 23:56:30 -0400125 ANGLE_TRY(mDynamicDescriptorPools[kUniformsDescriptorSetIndex].init(this, uniformPoolSize));
Jamie Madille4a6d7a2018-07-09 13:32:37 -0400126
127 VkDescriptorPoolSize imageSamplerPoolSize = {
128 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
129 mRenderer->getMaxActiveTextures() * vk::kDefaultDescriptorPoolMaxSets};
Jamie Madill21061022018-07-12 23:56:30 -0400130 ANGLE_TRY(mDynamicDescriptorPools[kTextureDescriptorSetIndex].init(this, imageSamplerPoolSize));
Jamie Madill76e471e2017-10-21 09:56:01 -0400131
Jamie Madill834a3a12018-07-09 13:32:39 -0400132 VkDescriptorPoolSize driverUniformsPoolSize = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
133 vk::kDefaultDescriptorPoolMaxSets};
134 ANGLE_TRY(mDynamicDescriptorPools[kDriverUniformsDescriptorSetIndex].init(
Jamie Madill21061022018-07-12 23:56:30 -0400135 this, driverUniformsPoolSize));
Jamie Madill834a3a12018-07-09 13:32:39 -0400136
Jamie Madillf2f6d372018-01-10 21:37:23 -0500137 mPipelineDesc.reset(new vk::PipelineDesc());
138 mPipelineDesc->initDefaults();
139
Jamie Madill5a4c9322018-07-16 11:01:58 -0400140 // Initialize current value/default attribute buffers.
141 for (vk::DynamicBuffer &buffer : mDefaultAttribBuffers)
142 {
143 buffer.init(1, mRenderer);
144 }
145
Jamie Madille09bd5d2016-11-29 16:20:35 -0500146 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400147}
148
Jamie Madillafa02a22017-11-23 12:57:38 -0500149gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400150{
Jamie Madilla2f043d2018-07-10 17:21:20 -0400151 // TODO(jmadill): Multiple flushes will need to insert semaphores. http://anglebug.com/2504
Luc Ferron33140402018-03-08 13:57:52 -0500152
153 // dEQP tests rely on having no errors thrown at the end of the test and they always call
154 // flush at the end of the their tests. Just returning NoError until we implement flush
155 // allow us to work on enabling many tests in the meantime.
Jamie Madilla2f043d2018-07-10 17:21:20 -0400156 WARN() << "Flush is unimplemented. http://anglebug.com/2504";
Luc Ferron33140402018-03-08 13:57:52 -0500157 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400158}
159
Jamie Madillafa02a22017-11-23 12:57:38 -0500160gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400161{
Jamie Madill21061022018-07-12 23:56:30 -0400162 return mRenderer->finish(this);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400163}
164
Jamie Madill06ca6342018-07-12 15:56:53 -0400165gl::Error ContextVk::initPipeline(const gl::DrawCallParams &drawCallParams)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400166{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500167 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400168
Jamie Madillf2f6d372018-01-10 21:37:23 -0500169 const gl::State &state = mState.getState();
170 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
171 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
172 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500173 const gl::AttributesMask activeAttribLocationsMask =
174 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500175
176 // Ensure the topology of the pipeline description is updated.
177 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500178
Jamie Madill112a3a82018-01-23 13:04:06 -0500179 // Copy over the latest attrib and binding descriptions.
Frank Henigman419acc82018-06-24 19:57:31 -0400180 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500181
Jamie Madillf2f6d372018-01-10 21:37:23 -0500182 // Ensure that the RenderPass description is updated.
Jamie Madillb90779e2018-04-27 11:45:01 -0400183 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500184
Jamie Madill06ca6342018-07-12 15:56:53 -0400185 // Trigger draw call shader patching and fill out the pipeline desc.
186 const vk::ShaderAndSerial *vertexShaderAndSerial = nullptr;
187 const vk::ShaderAndSerial *fragmentShaderAndSerial = nullptr;
Jamie Madill242c4fe2018-07-12 15:56:56 -0400188 const vk::PipelineLayout *pipelineLayout = nullptr;
Jamie Madill06ca6342018-07-12 15:56:53 -0400189 ANGLE_TRY(programVk->initShaders(this, drawCallParams, &vertexShaderAndSerial,
Jamie Madill242c4fe2018-07-12 15:56:56 -0400190 &fragmentShaderAndSerial, &pipelineLayout));
Jamie Madill06ca6342018-07-12 15:56:53 -0400191
192 mPipelineDesc->updateShaders(vertexShaderAndSerial->getSerial(),
193 fragmentShaderAndSerial->getSerial());
194
Jamie Madill06ca6342018-07-12 15:56:53 -0400195 ANGLE_TRY(mRenderer->getPipeline(this, *vertexShaderAndSerial, *fragmentShaderAndSerial,
Jamie Madill242c4fe2018-07-12 15:56:56 -0400196 *pipelineLayout, *mPipelineDesc, activeAttribLocationsMask,
Jamie Madill06ca6342018-07-12 15:56:53 -0400197 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500198
Jamie Madill72106562017-03-24 14:18:50 -0400199 return gl::NoError();
200}
201
Luc Ferron90968362018-05-04 08:47:22 -0400202gl::Error ContextVk::setupDraw(const gl::Context *context,
203 const gl::DrawCallParams &drawCallParams,
Jamie Madill316c6062018-05-29 10:49:45 -0400204 vk::CommandBuffer **commandBufferOut,
205 bool *shouldApplyVertexArrayOut)
Jamie Madill72106562017-03-24 14:18:50 -0400206{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400207 if (drawCallParams.mode() != mCurrentDrawMode)
Jamie Madill72106562017-03-24 14:18:50 -0400208 {
209 invalidateCurrentPipeline();
Jamie Madill32fd63b2018-03-31 11:20:35 -0400210 mCurrentDrawMode = drawCallParams.mode();
Jamie Madill72106562017-03-24 14:18:50 -0400211 }
212
Jamie Madill5a4c9322018-07-16 11:01:58 -0400213 if (mDirtyDefaultAttribs.any())
214 {
215 ANGLE_TRY(updateDefaultAttributes());
216 }
217
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500218 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400219 {
Jamie Madill06ca6342018-07-12 15:56:53 -0400220 ANGLE_TRY(initPipeline(drawCallParams));
Jamie Madill72106562017-03-24 14:18:50 -0400221 }
222
Jamie Madill50cf2be2018-06-15 09:46:57 -0400223 const auto &state = mState.getState();
224 const gl::Program *programGL = state.getProgram();
225 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madill316c6062018-05-29 10:49:45 -0400226 const gl::Framebuffer *framebuffer = state.getDrawFramebuffer();
227 FramebufferVk *framebufferVk = vk::GetImpl(framebuffer);
Jamie Madill50cf2be2018-06-15 09:46:57 -0400228 Serial queueSerial = mRenderer->getCurrentQueueSerial();
Jamie Madill72106562017-03-24 14:18:50 -0400229
Jamie Madill316c6062018-05-29 10:49:45 -0400230 vk::RecordingMode mode = vk::RecordingMode::Start;
231 ANGLE_TRY(framebufferVk->getCommandBufferForDraw(this, commandBufferOut, &mode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500232
Jamie Madill316c6062018-05-29 10:49:45 -0400233 if (mode == vk::RecordingMode::Start)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500234 {
Jamie Madill316c6062018-05-29 10:49:45 -0400235 mTexturesDirty = true;
236 *shouldApplyVertexArrayOut = true;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500237 }
238 else
239 {
Luc Ferron6ed167a2018-06-13 13:45:55 -0400240 *shouldApplyVertexArrayOut = mVertexArrayBindingHasChanged;
241 mVertexArrayBindingHasChanged = false;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500242 }
243
244 // Ensure any writes to the textures are flushed before we read from them.
245 if (mTexturesDirty)
246 {
247 mTexturesDirty = false;
Jamie Madill84c662b2018-07-12 15:56:55 -0400248
Jamie Madill49ac74b2017-12-21 14:42:33 -0500249 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
Jamie Madill84c662b2018-07-12 15:56:55 -0400250 for (size_t textureIndex : state.getActiveTexturesMask())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500251 {
Jamie Madill84c662b2018-07-12 15:56:55 -0400252 TextureVk *textureVk = mActiveTextures[textureIndex];
253 ANGLE_TRY(textureVk->ensureImageInitialized(this));
254 textureVk->addReadDependency(framebufferVk);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500255 }
256 }
257
Jamie Madill316c6062018-05-29 10:49:45 -0400258 (*commandBufferOut)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500259
260 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500261 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
262 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500263
Jamie Madill76e471e2017-10-21 09:56:01 -0400264 // Bind the graphics descriptor sets.
Jamie Madill242c4fe2018-07-12 15:56:56 -0400265 ANGLE_TRY(programVk->updateDescriptorSets(this, drawCallParams, mDriverUniformsDescriptorSet,
266 *commandBufferOut));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500267 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400268}
269
Jamie Madill493f9572018-05-24 19:52:15 -0400270gl::Error ContextVk::drawArrays(const gl::Context *context,
271 gl::PrimitiveMode mode,
272 GLint first,
273 GLsizei count)
Jamie Madilld03a8492017-10-03 15:46:06 -0400274{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400275 const gl::DrawCallParams &drawCallParams = context->getParams<gl::DrawCallParams>();
276
Jamie Madill316c6062018-05-29 10:49:45 -0400277 vk::CommandBuffer *commandBuffer = nullptr;
278 bool shouldApplyVertexArray = false;
279 ANGLE_TRY(setupDraw(context, drawCallParams, &commandBuffer, &shouldApplyVertexArray));
Luc Ferron360098d2018-02-21 07:33:50 -0500280
Jamie Madillc3755fc2018-04-05 08:39:13 -0400281 const gl::VertexArray *vertexArray = context->getGLState().getVertexArray();
282 VertexArrayVk *vertexArrayVk = vk::GetImpl(vertexArray);
Jamie Madill21061022018-07-12 23:56:30 -0400283 ANGLE_TRY(
284 vertexArrayVk->drawArrays(context, drawCallParams, commandBuffer, shouldApplyVertexArray));
Luc Ferron360098d2018-02-21 07:33:50 -0500285
Jamie Madilld03a8492017-10-03 15:46:06 -0400286 return gl::NoError();
287}
288
Jamie Madillc564c072017-06-01 12:45:42 -0400289gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400290 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400291 GLint first,
292 GLsizei count,
293 GLsizei instanceCount)
294{
295 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500296 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400297}
298
Jamie Madillc564c072017-06-01 12:45:42 -0400299gl::Error ContextVk::drawElements(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400300 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400301 GLsizei count,
302 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800303 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400304{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400305 const gl::DrawCallParams &drawCallParams = context->getParams<gl::DrawCallParams>();
306
Jamie Madill316c6062018-05-29 10:49:45 -0400307 vk::CommandBuffer *commandBuffer = nullptr;
308 bool shouldApplyVertexArray = false;
309 ANGLE_TRY(setupDraw(context, drawCallParams, &commandBuffer, &shouldApplyVertexArray));
Jamie Madilld03a8492017-10-03 15:46:06 -0400310
Jamie Madillc3755fc2018-04-05 08:39:13 -0400311 gl::VertexArray *vao = mState.getState().getVertexArray();
312 VertexArrayVk *vertexArrayVk = vk::GetImpl(vao);
Jamie Madill21061022018-07-12 23:56:30 -0400313 ANGLE_TRY(vertexArrayVk->drawElements(context, drawCallParams, commandBuffer,
Jamie Madill316c6062018-05-29 10:49:45 -0400314 shouldApplyVertexArray));
Jamie Madilld03a8492017-10-03 15:46:06 -0400315
316 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400317}
318
Jamie Madillc564c072017-06-01 12:45:42 -0400319gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400320 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400321 GLsizei count,
322 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400323 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800324 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400325{
326 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500327 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400328}
329
Jamie Madillc564c072017-06-01 12:45:42 -0400330gl::Error ContextVk::drawRangeElements(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400331 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400332 GLuint start,
333 GLuint end,
334 GLsizei count,
335 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800336 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400337{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500338 return gl::NoError();
339}
340
341VkDevice ContextVk::getDevice() const
342{
343 return mRenderer->getDevice();
344}
345
Jamie Madillc564c072017-06-01 12:45:42 -0400346gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400347 gl::PrimitiveMode mode,
Jamie Madillc564c072017-06-01 12:45:42 -0400348 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800349{
350 UNIMPLEMENTED();
351 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
352}
353
Jamie Madillc564c072017-06-01 12:45:42 -0400354gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400355 gl::PrimitiveMode mode,
Jamie Madillc564c072017-06-01 12:45:42 -0400356 GLenum type,
357 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800358{
359 UNIMPLEMENTED();
360 return gl::InternalError()
361 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
362}
363
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400364GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400365{
366 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400367 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400368}
369
370std::string ContextVk::getVendorString() const
371{
372 UNIMPLEMENTED();
373 return std::string();
374}
375
376std::string ContextVk::getRendererDescription() const
377{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500378 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400379}
380
381void ContextVk::insertEventMarker(GLsizei length, const char *marker)
382{
383 UNIMPLEMENTED();
384}
385
386void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
387{
388 UNIMPLEMENTED();
389}
390
391void ContextVk::popGroupMarker()
392{
393 UNIMPLEMENTED();
394}
395
Geoff Lang5d5253a2017-11-22 14:51:12 -0500396void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
397{
398 UNIMPLEMENTED();
399}
400
401void ContextVk::popDebugGroup()
402{
403 UNIMPLEMENTED();
404}
405
Luc Ferron82eda932018-07-09 15:10:22 -0400406bool ContextVk::isViewportFlipEnabledForDrawFBO() const
Luc Ferronbf6dc372018-06-28 15:24:19 -0400407{
Luc Ferron82eda932018-07-09 15:10:22 -0400408 return mFlipViewportForDrawFramebuffer && mFlipYForCurrentSurface;
409}
410
411bool ContextVk::isViewportFlipEnabledForReadFBO() const
412{
413 return mFlipViewportForReadFramebuffer;
Luc Ferronbf6dc372018-06-28 15:24:19 -0400414}
415
Luc Ferron0bb940a2018-06-22 09:59:34 -0400416void ContextVk::updateColorMask(const gl::BlendState &blendState)
Luc Ferron5fd36932018-06-19 14:55:50 -0400417{
418 mClearColorMask =
419 gl_vk::GetColorComponentFlags(blendState.colorMaskRed, blendState.colorMaskGreen,
420 blendState.colorMaskBlue, blendState.colorMaskAlpha);
421
422 FramebufferVk *framebufferVk = vk::GetImpl(mState.getState().getDrawFramebuffer());
423 mPipelineDesc->updateColorWriteMask(mClearColorMask,
424 framebufferVk->getEmulatedAlphaAttachmentMask());
425}
426
Jamie Madill84c662b2018-07-12 15:56:55 -0400427void ContextVk::updateScissor(const gl::State &glState) const
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400428{
Jamie Madill84c662b2018-07-12 15:56:55 -0400429 FramebufferVk *framebufferVk = vk::GetImpl(glState.getDrawFramebuffer());
Luc Ferronbf6dc372018-06-28 15:24:19 -0400430 gl::Box dimensions = framebufferVk->getState().getDimensions();
431 gl::Rectangle renderArea(0, 0, dimensions.width, dimensions.height);
432
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400433 if (glState.isScissorTestEnabled())
434 {
Luc Ferron82eda932018-07-09 15:10:22 -0400435 mPipelineDesc->updateScissor(glState.getScissor(), isViewportFlipEnabledForDrawFBO(),
436 renderArea);
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400437 }
438 else
439 {
Luc Ferron14f48172018-04-11 08:43:28 -0400440 // If the scissor test isn't enabled, we can simply use a really big scissor that's
441 // certainly larger than the current surface using the maximum size of a 2D texture
442 // for the width and height.
Luc Ferron82eda932018-07-09 15:10:22 -0400443 mPipelineDesc->updateScissor(kMaxSizedScissor, isViewportFlipEnabledForDrawFBO(),
444 renderArea);
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400445 }
446}
447
Jamie Madill189ad872018-07-09 13:32:37 -0400448gl::Error ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400449{
Jamie Madill72106562017-03-24 14:18:50 -0400450 if (dirtyBits.any())
451 {
452 invalidateCurrentPipeline();
453 }
Jamie Madillebf72992017-10-13 14:09:45 -0400454
455 const auto &glState = context->getGLState();
456
457 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400458 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400459
460 for (auto dirtyBit : dirtyBits)
461 {
462 switch (dirtyBit)
463 {
464 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400465 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron14f48172018-04-11 08:43:28 -0400466 updateScissor(glState);
Jamie Madillebf72992017-10-13 14:09:45 -0400467 break;
468 case gl::State::DIRTY_BIT_VIEWPORT:
Luc Ferron1a135ad2018-07-04 10:35:31 -0400469 {
470 FramebufferVk *framebufferVk = vk::GetImpl(glState.getDrawFramebuffer());
471 mPipelineDesc->updateViewport(framebufferVk, glState.getViewport(),
472 glState.getNearPlane(), glState.getFarPlane(),
Luc Ferron82eda932018-07-09 15:10:22 -0400473 isViewportFlipEnabledForDrawFBO());
Luc Ferrone8356092018-07-12 12:36:47 -0400474 ANGLE_TRY(updateDriverUniforms(glState));
Jamie Madillebf72992017-10-13 14:09:45 -0400475 break;
Luc Ferron1a135ad2018-07-04 10:35:31 -0400476 }
Jamie Madillebf72992017-10-13 14:09:45 -0400477 case gl::State::DIRTY_BIT_DEPTH_RANGE:
Luc Ferron0986f1c2018-04-16 13:47:23 -0400478 mPipelineDesc->updateDepthRange(glState.getNearPlane(), glState.getFarPlane());
Luc Ferrone8356092018-07-12 12:36:47 -0400479 ANGLE_TRY(updateDriverUniforms(glState));
Jamie Madillebf72992017-10-13 14:09:45 -0400480 break;
481 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500482 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400483 break;
484 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500485 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400486 break;
487 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500488 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400489 break;
490 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500491 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400492 break;
493 case gl::State::DIRTY_BIT_COLOR_MASK:
Luc Ferron0bb940a2018-06-22 09:59:34 -0400494 updateColorMask(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400495 break;
496 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400497 break;
498 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400499 break;
500 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
Jamie Madillebf72992017-10-13 14:09:45 -0400501 break;
502 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400503 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400504 case gl::State::DIRTY_BIT_SAMPLE_MASK:
Jamie Madillebf72992017-10-13 14:09:45 -0400505 break;
506 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
Geoff Langc16f5182018-07-18 10:40:03 -0400507 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState(),
508 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400509 break;
510 case gl::State::DIRTY_BIT_DEPTH_FUNC:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400511 mPipelineDesc->updateDepthFunc(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400512 break;
513 case gl::State::DIRTY_BIT_DEPTH_MASK:
Geoff Langc16f5182018-07-18 10:40:03 -0400514 mPipelineDesc->updateDepthWriteEnabled(glState.getDepthStencilState(),
515 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400516 break;
517 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
Geoff Langc16f5182018-07-18 10:40:03 -0400518 mPipelineDesc->updateStencilTestEnabled(glState.getDepthStencilState(),
519 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400520 break;
521 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
Luc Ferron364a9552018-03-29 09:44:51 -0400522 mPipelineDesc->updateStencilFrontFuncs(glState.getStencilRef(),
523 glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400524 break;
525 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
Luc Ferron364a9552018-03-29 09:44:51 -0400526 mPipelineDesc->updateStencilBackFuncs(glState.getStencilBackRef(),
527 glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400528 break;
529 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
Luc Ferron364a9552018-03-29 09:44:51 -0400530 mPipelineDesc->updateStencilFrontOps(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400531 break;
532 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
Luc Ferron364a9552018-03-29 09:44:51 -0400533 mPipelineDesc->updateStencilBackOps(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400534 break;
535 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
Geoff Langc16f5182018-07-18 10:40:03 -0400536 mPipelineDesc->updateStencilFrontWriteMask(glState.getDepthStencilState(),
537 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400538 break;
539 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
Geoff Langc16f5182018-07-18 10:40:03 -0400540 mPipelineDesc->updateStencilBackWriteMask(glState.getDepthStencilState(),
541 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400542 break;
543 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
544 case gl::State::DIRTY_BIT_CULL_FACE:
Luc Ferronb70ad522018-07-09 16:06:26 -0400545 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400546 break;
547 case gl::State::DIRTY_BIT_FRONT_FACE:
Luc Ferronb70ad522018-07-09 16:06:26 -0400548 mPipelineDesc->updateFrontFace(glState.getRasterizerState(),
549 isViewportFlipEnabledForDrawFBO());
Jamie Madillebf72992017-10-13 14:09:45 -0400550 break;
551 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400552 break;
553 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
Jamie Madillebf72992017-10-13 14:09:45 -0400554 break;
555 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400556 break;
557 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500558 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400559 break;
560 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400561 break;
562 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500563 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
564 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
565 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
566 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400567 break;
568 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500569 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400570 break;
571 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500572 mClearDepthStencilValue.depthStencil.stencil =
573 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400574 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400575 case gl::State::DIRTY_BIT_UNPACK_STATE:
Luc Ferronf9749ea2018-04-24 15:34:53 -0400576 // This is a no-op, its only important to use the right unpack state when we do
577 // setImage or setSubImage in TextureVk, which is plumbed through the frontend call
Jamie Madillebf72992017-10-13 14:09:45 -0400578 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500579 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
Corentin Wallez29a20992017-11-06 18:23:16 -0500580 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400581 case gl::State::DIRTY_BIT_PACK_STATE:
Luc Ferrona1c72422018-05-14 15:58:28 -0400582 // This is a no-op, its only important to use the right pack state when we do
583 // call readPixels later on.
Jamie Madillebf72992017-10-13 14:09:45 -0400584 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500585 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
Corentin Wallez29a20992017-11-06 18:23:16 -0500586 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400587 case gl::State::DIRTY_BIT_DITHER_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400588 break;
589 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
Jamie Madillebf72992017-10-13 14:09:45 -0400590 break;
591 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
Jamie Madillebf72992017-10-13 14:09:45 -0400592 break;
593 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
Luc Ferron82eda932018-07-09 15:10:22 -0400594 updateFlipViewportReadFramebuffer(context->getGLState());
Jamie Madillebf72992017-10-13 14:09:45 -0400595 break;
596 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
Luc Ferron1a135ad2018-07-04 10:35:31 -0400597 {
Luc Ferrone8356092018-07-12 12:36:47 -0400598 ANGLE_TRY(updateDriverUniforms(glState));
599 updateFlipViewportDrawFramebuffer(glState);
Luc Ferron1a135ad2018-07-04 10:35:31 -0400600 FramebufferVk *framebufferVk = vk::GetImpl(glState.getDrawFramebuffer());
601 mPipelineDesc->updateViewport(framebufferVk, glState.getViewport(),
602 glState.getNearPlane(), glState.getFarPlane(),
Luc Ferron82eda932018-07-09 15:10:22 -0400603 isViewportFlipEnabledForDrawFBO());
Luc Ferron0bb940a2018-06-22 09:59:34 -0400604 updateColorMask(glState.getBlendState());
Luc Ferronb70ad522018-07-09 16:06:26 -0400605 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Luc Ferronbf6dc372018-06-28 15:24:19 -0400606 updateScissor(glState);
Geoff Langc16f5182018-07-18 10:40:03 -0400607 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState(),
608 glState.getDrawFramebuffer());
609 mPipelineDesc->updateDepthWriteEnabled(glState.getDepthStencilState(),
610 glState.getDrawFramebuffer());
611 mPipelineDesc->updateStencilTestEnabled(glState.getDepthStencilState(),
612 glState.getDrawFramebuffer());
613 mPipelineDesc->updateStencilFrontWriteMask(glState.getDepthStencilState(),
614 glState.getDrawFramebuffer());
615 mPipelineDesc->updateStencilBackWriteMask(glState.getDepthStencilState(),
616 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400617 break;
Luc Ferron1a135ad2018-07-04 10:35:31 -0400618 }
Jamie Madillebf72992017-10-13 14:09:45 -0400619 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
Jamie Madillebf72992017-10-13 14:09:45 -0400620 break;
621 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill5a4c9322018-07-16 11:01:58 -0400622 {
Jamie Madillc3755fc2018-04-05 08:39:13 -0400623 mVertexArrayBindingHasChanged = true;
Jamie Madill5a4c9322018-07-16 11:01:58 -0400624
625 // Note that we should implement faster dirty bits for VAO changes in ES 3.0.
626 // This might require keeping separate dirty info for the data and state.
627 mDirtyDefaultAttribs.set();
Jamie Madillebf72992017-10-13 14:09:45 -0400628 break;
Jamie Madill5a4c9322018-07-16 11:01:58 -0400629 }
Jamie Madillebf72992017-10-13 14:09:45 -0400630 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
Jamie Madillebf72992017-10-13 14:09:45 -0400631 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800632 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800633 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400634 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
Jamie Madillebf72992017-10-13 14:09:45 -0400635 break;
636 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
637 {
Jamie Madill5547b382017-10-23 18:16:01 -0400638 dirtyTextures = true;
Jamie Madill06ca6342018-07-12 15:56:53 -0400639 // No additional work is needed here. We will update the pipeline desc later.
Jamie Madillebf72992017-10-13 14:09:45 -0400640 break;
641 }
642 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400643 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400644 break;
645 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400646 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400647 break;
Geoff Langded79232017-11-28 15:21:11 -0500648 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
Geoff Langded79232017-11-28 15:21:11 -0500649 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800650 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
Xinghua Cao10a4d432017-11-28 14:46:26 +0800651 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500652 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
Jamie Madillf4141212017-12-12 15:08:07 -0500653 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400654 case gl::State::DIRTY_BIT_MULTISAMPLING:
Jamie Madillebf72992017-10-13 14:09:45 -0400655 break;
656 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
Jamie Madillebf72992017-10-13 14:09:45 -0400657 break;
658 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
Jamie Madillebf72992017-10-13 14:09:45 -0400659 break;
660 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
Jamie Madillebf72992017-10-13 14:09:45 -0400661 break;
662 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
Jamie Madillebf72992017-10-13 14:09:45 -0400663 break;
664 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
Jamie Madillebf72992017-10-13 14:09:45 -0400665 break;
666 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
Jamie Madillebf72992017-10-13 14:09:45 -0400667 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400668 case gl::State::DIRTY_BIT_CURRENT_VALUES:
Jamie Madill5a4c9322018-07-16 11:01:58 -0400669 {
670 for (size_t attribIndex : glState.getAndResetDirtyCurrentValues())
671 {
672 invalidateDefaultAttribute(attribIndex);
673 }
Jamie Madillc67323a2017-11-02 23:11:41 -0400674 break;
Jamie Madill5a4c9322018-07-16 11:01:58 -0400675 }
676 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400677 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400678 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400679 break;
680 }
681 }
Jamie Madill5547b382017-10-23 18:16:01 -0400682
683 if (dirtyTextures)
684 {
Jamie Madill84c662b2018-07-12 15:56:55 -0400685 ANGLE_TRY(updateActiveTextures(context));
686
Jamie Madille1f3ad42017-10-28 23:00:42 -0400687 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400688 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500689 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400690 }
Jamie Madill189ad872018-07-09 13:32:37 -0400691
692 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400693}
694
695GLint ContextVk::getGPUDisjoint()
696{
697 UNIMPLEMENTED();
698 return GLint();
699}
700
701GLint64 ContextVk::getTimestamp()
702{
703 UNIMPLEMENTED();
704 return GLint64();
705}
706
Luc Ferron5396f2a2018-07-12 08:24:23 -0400707gl::Error ContextVk::onMakeCurrent(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400708{
Geoff Langcaa55cd2018-07-05 13:19:35 -0400709 // Flip viewports if FeaturesVk::flipViewportY is enabled and the user did not request that the
710 // surface is flipped.
711 egl::Surface *drawSurface = context->getCurrentDrawSurface();
712 mFlipYForCurrentSurface =
713 drawSurface != nullptr && mRenderer->getFeatures().flipViewportY &&
714 !IsMaskFlagSet(drawSurface->getOrientation(), EGL_SURFACE_ORIENTATION_INVERT_Y_ANGLE);
Luc Ferron82eda932018-07-09 15:10:22 -0400715
716 const gl::State &glState = context->getGLState();
717 updateFlipViewportDrawFramebuffer(glState);
718 updateFlipViewportReadFramebuffer(glState);
Luc Ferrone8356092018-07-12 12:36:47 -0400719 ANGLE_TRY(updateDriverUniforms(glState));
Luc Ferron5396f2a2018-07-12 08:24:23 -0400720 return gl::NoError();
Luc Ferron82eda932018-07-09 15:10:22 -0400721}
722
723void ContextVk::updateFlipViewportDrawFramebuffer(const gl::State &glState)
724{
725 gl::Framebuffer *drawFramebuffer = glState.getDrawFramebuffer();
726 mFlipViewportForDrawFramebuffer =
727 drawFramebuffer->isDefault() && mRenderer->getFeatures().flipViewportY;
728}
729
730void ContextVk::updateFlipViewportReadFramebuffer(const gl::State &glState)
731{
732 gl::Framebuffer *readFramebuffer = glState.getReadFramebuffer();
733 mFlipViewportForReadFramebuffer =
734 readFramebuffer->isDefault() && mRenderer->getFeatures().flipViewportY;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400735}
736
Jiawei Shaod0a7d102018-05-07 12:40:20 +0800737gl::Caps ContextVk::getNativeCaps() const
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400738{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400739 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400740}
741
742const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
743{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400744 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400745}
746
747const gl::Extensions &ContextVk::getNativeExtensions() const
748{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400749 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400750}
751
752const gl::Limitations &ContextVk::getNativeLimitations() const
753{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400754 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400755}
756
757CompilerImpl *ContextVk::createCompiler()
758{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400759 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400760}
761
Jamie Madillacccc6c2016-05-03 17:22:10 -0400762ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400763{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400764 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400765}
766
Jamie Madillacccc6c2016-05-03 17:22:10 -0400767ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400768{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400769 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400770}
771
Jamie Madillacccc6c2016-05-03 17:22:10 -0400772FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400773{
Jamie Madill639bc902018-07-18 17:08:27 -0400774 return FramebufferVk::CreateUserFBO(mRenderer, state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400775}
776
777TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
778{
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400779 return new TextureVk(state, mRenderer);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400780}
781
Jamie Madille703c602018-02-20 10:21:48 -0500782RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400783{
Jamie Madille703c602018-02-20 10:21:48 -0500784 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400785}
786
Jamie Madill8f775602016-11-03 16:45:34 -0400787BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400788{
Jamie Madill8f775602016-11-03 16:45:34 -0400789 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400790}
791
Jamie Madillacccc6c2016-05-03 17:22:10 -0400792VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400793{
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400794 return new VertexArrayVk(state, mRenderer);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400795}
796
Corentin Wallezad3ae902018-03-09 13:40:42 -0500797QueryImpl *ContextVk::createQuery(gl::QueryType type)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400798{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400799 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400800}
801
802FenceNVImpl *ContextVk::createFenceNV()
803{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400804 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400805}
806
Jamie Madill70b5bb02017-08-28 13:32:37 -0400807SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400808{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400809 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400810}
811
Geoff Lang73bd2182016-07-15 13:01:24 -0400812TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400813{
Geoff Lang73bd2182016-07-15 13:01:24 -0400814 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400815}
816
Jamie Madill06ef36b2017-09-09 23:32:46 -0400817SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400818{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400819 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400820}
821
Yunchao Hea336b902017-08-02 16:05:21 +0800822ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
823{
824 return new ProgramPipelineVk(state);
825}
826
Sami Väisänene45e53b2016-05-25 10:36:04 +0300827std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
828{
829 return std::vector<PathImpl *>();
830}
831
Jamie Madill72106562017-03-24 14:18:50 -0400832void ContextVk::invalidateCurrentPipeline()
833{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500834 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400835}
836
Jamie Madillfe548342017-06-19 11:13:24 -0400837gl::Error ContextVk::dispatchCompute(const gl::Context *context,
838 GLuint numGroupsX,
839 GLuint numGroupsY,
840 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800841{
842 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500843 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800844}
845
Qin Jiajia62fcf622017-11-30 16:16:12 +0800846gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
847{
848 UNIMPLEMENTED();
849 return gl::InternalError();
850}
851
Xinghua Cao89c422a2017-11-29 18:24:20 +0800852gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
853{
854 UNIMPLEMENTED();
855 return gl::InternalError();
856}
857
858gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
859{
860 UNIMPLEMENTED();
861 return gl::InternalError();
862}
863
Jamie Madilledeaa832018-06-22 09:18:41 -0400864vk::DynamicDescriptorPool *ContextVk::getDynamicDescriptorPool(uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400865{
Jamie Madilledeaa832018-06-22 09:18:41 -0400866 return &mDynamicDescriptorPools[descriptorSetIndex];
Jamie Madill76e471e2017-10-21 09:56:01 -0400867}
868
Jamie Madillf4d693c2018-02-14 16:38:16 -0500869const VkClearValue &ContextVk::getClearColorValue() const
870{
871 return mClearColorValue;
872}
873
874const VkClearValue &ContextVk::getClearDepthStencilValue() const
875{
876 return mClearDepthStencilValue;
877}
878
Jamie Madill9aef3672018-04-27 11:45:06 -0400879VkColorComponentFlags ContextVk::getClearColorMask() const
880{
881 return mClearColorMask;
882}
Jamie Madill834a3a12018-07-09 13:32:39 -0400883
Jamie Madill1266d202018-06-29 09:11:34 -0400884const FeaturesVk &ContextVk::getFeatures() const
885{
886 return mRenderer->getFeatures();
887}
Jamie Madill834a3a12018-07-09 13:32:39 -0400888
Luc Ferrone8356092018-07-12 12:36:47 -0400889angle::Result ContextVk::updateDriverUniforms(const gl::State &glState)
Jamie Madill834a3a12018-07-09 13:32:39 -0400890{
891 if (!mDriverUniformsBuffer.valid())
892 {
893 size_t minAlignment = static_cast<size_t>(
894 mRenderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
895 mDriverUniformsBuffer.init(minAlignment, mRenderer);
896 }
897
898 // Release any previously retained buffers.
899 mDriverUniformsBuffer.releaseRetainedBuffers(mRenderer);
900
Luc Ferrone8356092018-07-12 12:36:47 -0400901 const gl::Rectangle &glViewport = glState.getViewport();
Jamie Madill834a3a12018-07-09 13:32:39 -0400902
903 // Allocate a new region in the dynamic buffer.
904 uint8_t *ptr = nullptr;
905 VkBuffer buffer = VK_NULL_HANDLE;
906 uint32_t offset = 0;
907 bool newBufferAllocated = false;
Jamie Madill21061022018-07-12 23:56:30 -0400908 ANGLE_TRY(mDriverUniformsBuffer.allocate(this, sizeof(DriverUniforms), &ptr, &buffer, &offset,
909 &newBufferAllocated));
Luc Ferron9ff9c772018-07-11 13:08:18 -0400910 float scaleY = isViewportFlipEnabledForDrawFBO() ? 1.0f : -1.0f;
Jamie Madill834a3a12018-07-09 13:32:39 -0400911
Luc Ferrone8356092018-07-12 12:36:47 -0400912 float depthRangeNear = glState.getNearPlane();
913 float depthRangeFar = glState.getFarPlane();
914 float depthRangeDiff = depthRangeFar - depthRangeNear;
915
Jamie Madill834a3a12018-07-09 13:32:39 -0400916 // Copy and flush to the device.
917 DriverUniforms *driverUniforms = reinterpret_cast<DriverUniforms *>(ptr);
Luc Ferron9ff9c772018-07-11 13:08:18 -0400918 *driverUniforms = {
919 {static_cast<float>(glViewport.x), static_cast<float>(glViewport.y),
920 static_cast<float>(glViewport.width), static_cast<float>(glViewport.height)},
Luc Ferrone8356092018-07-12 12:36:47 -0400921 {1.0f, scaleY, 1.0f, 1.0f},
922 {depthRangeNear, depthRangeFar, depthRangeDiff, 0.0f}};
Jamie Madill834a3a12018-07-09 13:32:39 -0400923
Jamie Madill21061022018-07-12 23:56:30 -0400924 ANGLE_TRY(mDriverUniformsBuffer.flush(this));
Jamie Madill834a3a12018-07-09 13:32:39 -0400925
926 // Get the descriptor set layout.
927 if (!mDriverUniformsSetLayout.valid())
928 {
929 vk::DescriptorSetLayoutDesc desc;
930 desc.update(0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1);
931
Jamie Madill21061022018-07-12 23:56:30 -0400932 ANGLE_TRY(mRenderer->getDescriptorSetLayout(this, desc, &mDriverUniformsSetLayout));
Jamie Madill834a3a12018-07-09 13:32:39 -0400933 }
934
935 // Allocate a new descriptor set.
936 ANGLE_TRY(mDynamicDescriptorPools[kDriverUniformsDescriptorSetIndex].allocateSets(
937 this, mDriverUniformsSetLayout.get().ptr(), 1, &mDriverUniformsDescriptorSet));
938
939 // Update the driver uniform descriptor set.
940 VkDescriptorBufferInfo bufferInfo;
941 bufferInfo.buffer = buffer;
942 bufferInfo.offset = offset;
943 bufferInfo.range = sizeof(DriverUniforms);
944
945 VkWriteDescriptorSet writeInfo;
946 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
947 writeInfo.pNext = nullptr;
948 writeInfo.dstSet = mDriverUniformsDescriptorSet;
949 writeInfo.dstBinding = 0;
950 writeInfo.dstArrayElement = 0;
951 writeInfo.descriptorCount = 1;
952 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
953 writeInfo.pImageInfo = nullptr;
954 writeInfo.pTexelBufferView = nullptr;
955 writeInfo.pBufferInfo = &bufferInfo;
956
957 vkUpdateDescriptorSets(getDevice(), 1, &writeInfo, 0, nullptr);
958
Jamie Madill21061022018-07-12 23:56:30 -0400959 return angle::Result::Continue();
960}
961
962void ContextVk::handleError(VkResult errorCode, const char *file, unsigned int line)
963{
964 GLenum glErrorCode = DefaultGLErrorCode(errorCode);
965
966 std::stringstream errorStream;
967 errorStream << "Internal Vulkan error: " << VulkanResultString(errorCode) << ", in " << file
968 << ", line " << line << ".";
969
970 mErrors->handleError(gl::Error(glErrorCode, glErrorCode, errorStream.str()));
Jamie Madill834a3a12018-07-09 13:32:39 -0400971}
Jamie Madill84c662b2018-07-12 15:56:55 -0400972
973gl::Error ContextVk::updateActiveTextures(const gl::Context *context)
974{
975 const auto &completeTextures = mState.getState().getCompleteTextureCache();
Jamie Madill242c4fe2018-07-12 15:56:56 -0400976 const gl::Program *program = mState.getState().getProgram();
Jamie Madill84c662b2018-07-12 15:56:55 -0400977
978 mActiveTextures.fill(nullptr);
979
980 for (const gl::SamplerBinding &samplerBinding : program->getSamplerBindings())
981 {
982 ASSERT(!samplerBinding.unreferenced);
983
984 for (GLuint textureUnit : samplerBinding.boundTextureUnits)
985 {
986 gl::Texture *texture = completeTextures[textureUnit];
987
988 // Null textures represent incomplete textures.
989 if (texture == nullptr)
990 {
991 ANGLE_TRY(getIncompleteTexture(context, samplerBinding.textureType, &texture));
992 }
993
994 mActiveTextures[textureUnit] = vk::GetImpl(texture);
995 }
996 }
997
998 return gl::NoError();
999}
1000
1001const gl::ActiveTextureArray<TextureVk *> &ContextVk::getActiveTextures() const
1002{
1003 return mActiveTextures;
1004}
Jamie Madill5a4c9322018-07-16 11:01:58 -04001005
1006void ContextVk::invalidateDefaultAttribute(size_t attribIndex)
1007{
1008 mDirtyDefaultAttribs.set(attribIndex);
1009}
1010
1011angle::Result ContextVk::updateDefaultAttributes()
1012{
1013 ASSERT(mDirtyDefaultAttribs.any());
1014
1015 const gl::Program *program = mState.getState().getProgram();
1016 ASSERT(program);
1017
1018 const gl::AttributesMask &programAttribs = program->getActiveAttribLocationsMask();
1019 const gl::AttributesMask &attribsToUpdate = (programAttribs & mDirtyDefaultAttribs);
1020
1021 for (size_t attribIndex : attribsToUpdate)
1022 {
1023 ANGLE_TRY(updateDefaultAttribute(attribIndex))
1024 }
1025
1026 mDirtyDefaultAttribs &= ~attribsToUpdate;
1027 return angle::Result::Continue();
1028}
1029
1030angle::Result ContextVk::updateDefaultAttribute(size_t attribIndex)
1031{
1032 vk::DynamicBuffer &defaultBuffer = mDefaultAttribBuffers[attribIndex];
1033
1034 defaultBuffer.releaseRetainedBuffers(mRenderer);
1035
1036 uint8_t *ptr;
1037 VkBuffer bufferHandle = VK_NULL_HANDLE;
1038 uint32_t offset = 0;
1039 ANGLE_TRY(
1040 defaultBuffer.allocate(this, kDefaultValueSize, &ptr, &bufferHandle, &offset, nullptr));
1041
1042 const gl::State &glState = mState.getState();
1043 const gl::VertexAttribCurrentValueData &defaultValue =
1044 glState.getVertexAttribCurrentValues()[attribIndex];
1045
1046 ASSERT(defaultValue.Type == GL_FLOAT);
1047
1048 memcpy(ptr, defaultValue.FloatValues, kDefaultValueSize);
1049
1050 ANGLE_TRY(defaultBuffer.flush(this));
1051
1052 VertexArrayVk *vertexArrayVk = vk::GetImpl(glState.getVertexArray());
1053 vertexArrayVk->updateDefaultAttrib(mRenderer, attribIndex, bufferHandle, offset);
1054 return angle::Result::Continue();
1055}
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001056} // namespace rx