blob: 006c78298a475d963f722821fca0a264c31e2b24 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Frank Henigmana53d0e12018-02-13 00:06:06 -050014#include "common/utilities.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040015#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050016#include "libANGLE/Program.h"
Geoff Langcaa55cd2018-07-05 13:19:35 -040017#include "libANGLE/Surface.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050019#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040020#include "libANGLE/renderer/vulkan/CompilerVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040021#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080023#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040024#include "libANGLE/renderer/vulkan/ProgramVk.h"
25#include "libANGLE/renderer/vulkan/QueryVk.h"
26#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
27#include "libANGLE/renderer/vulkan/RendererVk.h"
28#include "libANGLE/renderer/vulkan/SamplerVk.h"
29#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040030#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040031#include "libANGLE/renderer/vulkan/TextureVk.h"
32#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
33#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040034
35namespace rx
36{
Luc Ferron14f48172018-04-11 08:43:28 -040037
38namespace
39{
Jamie Madill21061022018-07-12 23:56:30 -040040GLenum DefaultGLErrorCode(VkResult result)
41{
42 switch (result)
43 {
44 case VK_ERROR_OUT_OF_HOST_MEMORY:
45 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
46 case VK_ERROR_TOO_MANY_OBJECTS:
47 return GL_OUT_OF_MEMORY;
48 default:
49 return GL_INVALID_OPERATION;
50 }
51}
52
Jamie Madill88fc6da2018-08-30 16:18:36 -040053void BindNonNullVertexBufferRanges(vk::CommandBuffer *commandBuffer,
54 const gl::AttributesMask &nonNullAttribMask,
55 uint32_t maxAttrib,
56 const gl::AttribArray<VkBuffer> &arrayBufferHandles,
57 const gl::AttribArray<VkDeviceSize> &arrayBufferOffsets)
58{
59 // Vulkan does not allow binding a null vertex buffer but the default state of null buffers is
60 // valid.
61
62 // We can detect if there are no gaps in active attributes by using the mask of the program
63 // attribs and the max enabled attrib.
64 ASSERT(maxAttrib > 0);
65 if (nonNullAttribMask.to_ulong() == (maxAttrib - 1))
66 {
67 commandBuffer->bindVertexBuffers(0, maxAttrib, arrayBufferHandles.data(),
68 arrayBufferOffsets.data());
69 return;
70 }
71
72 // Find ranges of non-null buffers and bind them all together.
73 for (uint32_t attribIdx = 0; attribIdx < maxAttrib; attribIdx++)
74 {
75 if (arrayBufferHandles[attribIdx] != VK_NULL_HANDLE)
76 {
77 // Find the end of this range of non-null handles
78 uint32_t rangeCount = 1;
79 while (attribIdx + rangeCount < maxAttrib &&
80 arrayBufferHandles[attribIdx + rangeCount] != VK_NULL_HANDLE)
81 {
82 rangeCount++;
83 }
84
85 commandBuffer->bindVertexBuffers(attribIdx, rangeCount, &arrayBufferHandles[attribIdx],
86 &arrayBufferOffsets[attribIdx]);
87 attribIdx += rangeCount;
88 }
89 }
90}
91
Luc Ferron14f48172018-04-11 08:43:28 -040092constexpr gl::Rectangle kMaxSizedScissor(0,
93 0,
94 std::numeric_limits<int>::max(),
95 std::numeric_limits<int>::max());
96
Jamie Madill9aef3672018-04-27 11:45:06 -040097constexpr VkColorComponentFlags kAllColorChannelsMask =
98 (VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT |
99 VK_COLOR_COMPONENT_A_BIT);
Jamie Madill5a4c9322018-07-16 11:01:58 -0400100
101constexpr VkBufferUsageFlags kVertexBufferUsage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
102constexpr size_t kDefaultValueSize = sizeof(float) * 4;
103constexpr size_t kDefaultBufferSize = kDefaultValueSize * 16;
Luc Ferron14f48172018-04-11 08:43:28 -0400104} // anonymous namespace
105
Jamie Madill5a4c9322018-07-16 11:01:58 -0400106// std::array only uses aggregate init. Thus we make a helper macro to reduce on code duplication.
107#define INIT \
108 { \
109 kVertexBufferUsage, kDefaultBufferSize \
110 }
111
Jamie Madillacccc6c2016-05-03 17:22:10 -0400112ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500113 : ContextImpl(state),
Jamie Madill21061022018-07-12 23:56:30 -0400114 vk::Context(renderer),
Jamie Madill493f9572018-05-24 19:52:15 -0400115 mCurrentDrawMode(gl::PrimitiveMode::InvalidEnum),
Jamie Madill37386b02018-08-30 16:18:37 -0400116 mVertexArray(nullptr),
117 mDrawFramebuffer(nullptr),
118 mProgram(nullptr),
Jamie Madill88fc6da2018-08-30 16:18:36 -0400119 mLastIndexBufferOffset(0),
120 mCurrentDrawElementsType(GL_NONE),
Geoff Langcaa55cd2018-07-05 13:19:35 -0400121 mClearColorMask(kAllColorChannelsMask),
Jamie Madill834a3a12018-07-09 13:32:39 -0400122 mFlipYForCurrentSurface(false),
123 mDriverUniformsBuffer(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, sizeof(DriverUniforms) * 16),
Jamie Madill5a4c9322018-07-16 11:01:58 -0400124 mDriverUniformsDescriptorSet(VK_NULL_HANDLE),
125 mDefaultAttribBuffers{{INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT, INIT,
126 INIT, INIT, INIT, INIT}}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400127{
Jamie Madillf4d693c2018-02-14 16:38:16 -0500128 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
129 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madillef3b9b42018-08-30 16:18:38 -0400130
131 mNonIndexedDirtyBitsMask.set();
132 mNonIndexedDirtyBitsMask.reset(DIRTY_BIT_INDEX_BUFFER);
133
134 mIndexedDirtyBitsMask.set();
135
136 mNewCommandBufferDirtyBits.set(DIRTY_BIT_PIPELINE);
137 mNewCommandBufferDirtyBits.set(DIRTY_BIT_TEXTURES);
138 mNewCommandBufferDirtyBits.set(DIRTY_BIT_VERTEX_BUFFERS);
139 mNewCommandBufferDirtyBits.set(DIRTY_BIT_INDEX_BUFFER);
140 mNewCommandBufferDirtyBits.set(DIRTY_BIT_DESCRIPTOR_SETS);
Jamie Madill2b858c22018-09-03 13:58:14 -0400141
142 mDirtyBitHandlers[DIRTY_BIT_DEFAULT_ATTRIBS] = &ContextVk::handleDirtyDefaultAttribs;
143 mDirtyBitHandlers[DIRTY_BIT_PIPELINE] = &ContextVk::handleDirtyPipeline;
144 mDirtyBitHandlers[DIRTY_BIT_TEXTURES] = &ContextVk::handleDirtyTextures;
145 mDirtyBitHandlers[DIRTY_BIT_VERTEX_BUFFERS] = &ContextVk::handleDirtyVertexBuffers;
146 mDirtyBitHandlers[DIRTY_BIT_INDEX_BUFFER] = &ContextVk::handleDirtyIndexBuffer;
Jamie Madillef6023e2018-09-06 16:24:38 -0400147 mDirtyBitHandlers[DIRTY_BIT_DRIVER_UNIFORMS] = &ContextVk::handleDirtyDriverUniforms;
Jamie Madill2b858c22018-09-03 13:58:14 -0400148 mDirtyBitHandlers[DIRTY_BIT_DESCRIPTOR_SETS] = &ContextVk::handleDirtyDescriptorSets;
Jamie Madillb36a4812018-09-25 10:15:11 -0400149
150 mDirtyBits = mNewCommandBufferDirtyBits;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400151}
152
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400153#undef INIT
154
Jamie Madill84c662b2018-07-12 15:56:55 -0400155ContextVk::~ContextVk() = default;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400156
Jamie Madill76e471e2017-10-21 09:56:01 -0400157void ContextVk::onDestroy(const gl::Context *context)
158{
Jamie Madill834a3a12018-07-09 13:32:39 -0400159 mDriverUniformsSetLayout.reset();
Luc Ferron90968362018-05-04 08:47:22 -0400160 mIncompleteTextures.onDestroy(context);
Jamie Madill834a3a12018-07-09 13:32:39 -0400161 mDriverUniformsBuffer.destroy(getDevice());
Jamie Madill78bcd2b2018-10-16 15:05:20 -0400162 mDriverUniformsDescriptorPoolBinding.reset();
Jamie Madilledeaa832018-06-22 09:18:41 -0400163
164 for (vk::DynamicDescriptorPool &descriptorPool : mDynamicDescriptorPools)
165 {
166 descriptorPool.destroy(getDevice());
167 }
Jamie Madill5a4c9322018-07-16 11:01:58 -0400168
169 for (vk::DynamicBuffer &defaultBuffer : mDefaultAttribBuffers)
170 {
171 defaultBuffer.destroy(getDevice());
172 }
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400173
174 for (vk::DynamicQueryPool &queryPool : mQueryPools)
175 {
176 queryPool.destroy(getDevice());
177 }
Jamie Madill76e471e2017-10-21 09:56:01 -0400178}
179
Luc Ferron90968362018-05-04 08:47:22 -0400180gl::Error ContextVk::getIncompleteTexture(const gl::Context *context,
181 gl::TextureType type,
182 gl::Texture **textureOut)
183{
184 // At some point, we'll need to support multisample and we'll pass "this" instead of nullptr
185 // and implement the necessary interface.
186 return mIncompleteTextures.getIncompleteTexture(context, type, nullptr, textureOut);
187}
188
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400189gl::Error ContextVk::initialize()
190{
Jamie Madill8a4c49f2018-06-21 15:43:06 -0400191 // Note that this may reserve more sets than strictly necessary for a particular layout.
Jamie Madill78bcd2b2018-10-16 15:05:20 -0400192 ANGLE_TRY(mDynamicDescriptorPools[kUniformsDescriptorSetIndex].init(
193 this, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, GetUniformBufferDescriptorCount()));
194 ANGLE_TRY(mDynamicDescriptorPools[kTextureDescriptorSetIndex].init(
195 this, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, mRenderer->getMaxActiveTextures()));
Jamie Madill834a3a12018-07-09 13:32:39 -0400196 ANGLE_TRY(mDynamicDescriptorPools[kDriverUniformsDescriptorSetIndex].init(
Jamie Madill78bcd2b2018-10-16 15:05:20 -0400197 this, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1));
Jamie Madill834a3a12018-07-09 13:32:39 -0400198
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400199 ANGLE_TRY(mQueryPools[gl::QueryType::AnySamples].init(this, VK_QUERY_TYPE_OCCLUSION,
200 vk::kDefaultOcclusionQueryPoolSize));
201 ANGLE_TRY(mQueryPools[gl::QueryType::AnySamplesConservative].init(
202 this, VK_QUERY_TYPE_OCCLUSION, vk::kDefaultOcclusionQueryPoolSize));
203 // TODO(syoussefi): Initialize other query pools as they get implemented.
204
Frank Henigman18f7e502018-07-19 16:06:43 -0400205 size_t minAlignment = static_cast<size_t>(
206 mRenderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
207 mDriverUniformsBuffer.init(minAlignment, mRenderer);
208
Jamie Madillf2f6d372018-01-10 21:37:23 -0500209 mPipelineDesc.reset(new vk::PipelineDesc());
210 mPipelineDesc->initDefaults();
211
Jamie Madill5a4c9322018-07-16 11:01:58 -0400212 // Initialize current value/default attribute buffers.
213 for (vk::DynamicBuffer &buffer : mDefaultAttribBuffers)
214 {
215 buffer.init(1, mRenderer);
216 }
217
Jamie Madille09bd5d2016-11-29 16:20:35 -0500218 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400219}
220
Jamie Madillafa02a22017-11-23 12:57:38 -0500221gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400222{
Jamie Madilla2f043d2018-07-10 17:21:20 -0400223 // TODO(jmadill): Multiple flushes will need to insert semaphores. http://anglebug.com/2504
Luc Ferron33140402018-03-08 13:57:52 -0500224
225 // dEQP tests rely on having no errors thrown at the end of the test and they always call
226 // flush at the end of the their tests. Just returning NoError until we implement flush
227 // allow us to work on enabling many tests in the meantime.
Jamie Madilla2f043d2018-07-10 17:21:20 -0400228 WARN() << "Flush is unimplemented. http://anglebug.com/2504";
Luc Ferron33140402018-03-08 13:57:52 -0500229 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400230}
231
Jamie Madillafa02a22017-11-23 12:57:38 -0500232gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400233{
Jamie Madill21061022018-07-12 23:56:30 -0400234 return mRenderer->finish(this);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400235}
236
Jamie Madill88fc6da2018-08-30 16:18:36 -0400237angle::Result ContextVk::initPipeline(const gl::DrawCallParams &drawCallParams)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400238{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500239 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400240
Luc Ferronceb71902018-02-05 15:18:47 -0500241 const gl::AttributesMask activeAttribLocationsMask =
Jamie Madill37386b02018-08-30 16:18:37 -0400242 mProgram->getState().getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500243
244 // Ensure the topology of the pipeline description is updated.
245 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500246
Jamie Madill112a3a82018-01-23 13:04:06 -0500247 // Copy over the latest attrib and binding descriptions.
Jamie Madill37386b02018-08-30 16:18:37 -0400248 mVertexArray->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500249
Jamie Madillf2f6d372018-01-10 21:37:23 -0500250 // Ensure that the RenderPass description is updated.
Jamie Madill37386b02018-08-30 16:18:37 -0400251 mPipelineDesc->updateRenderPassDesc(mDrawFramebuffer->getRenderPassDesc());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500252
Jamie Madill06ca6342018-07-12 15:56:53 -0400253 // Trigger draw call shader patching and fill out the pipeline desc.
254 const vk::ShaderAndSerial *vertexShaderAndSerial = nullptr;
255 const vk::ShaderAndSerial *fragmentShaderAndSerial = nullptr;
Jamie Madill242c4fe2018-07-12 15:56:56 -0400256 const vk::PipelineLayout *pipelineLayout = nullptr;
Jamie Madill37386b02018-08-30 16:18:37 -0400257 ANGLE_TRY(mProgram->initShaders(this, drawCallParams, &vertexShaderAndSerial,
258 &fragmentShaderAndSerial, &pipelineLayout));
Jamie Madill06ca6342018-07-12 15:56:53 -0400259
260 mPipelineDesc->updateShaders(vertexShaderAndSerial->getSerial(),
261 fragmentShaderAndSerial->getSerial());
262
Jamie Madill06ca6342018-07-12 15:56:53 -0400263 ANGLE_TRY(mRenderer->getPipeline(this, *vertexShaderAndSerial, *fragmentShaderAndSerial,
Jamie Madill242c4fe2018-07-12 15:56:56 -0400264 *pipelineLayout, *mPipelineDesc, activeAttribLocationsMask,
Jamie Madill06ca6342018-07-12 15:56:53 -0400265 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500266
Jamie Madill88fc6da2018-08-30 16:18:36 -0400267 return angle::Result::Continue();
Jamie Madill72106562017-03-24 14:18:50 -0400268}
269
Jamie Madill88fc6da2018-08-30 16:18:36 -0400270angle::Result ContextVk::setupDraw(const gl::Context *context,
271 const gl::DrawCallParams &drawCallParams,
Jamie Madill2b858c22018-09-03 13:58:14 -0400272 DirtyBits dirtyBitMask,
Jamie Madill88fc6da2018-08-30 16:18:36 -0400273 vk::CommandBuffer **commandBufferOut)
Jamie Madill72106562017-03-24 14:18:50 -0400274{
Jamie Madilld1249de2018-08-28 16:58:53 -0400275 // Set any dirty bits that depend on draw call parameters or other objects.
Jamie Madill32fd63b2018-03-31 11:20:35 -0400276 if (drawCallParams.mode() != mCurrentDrawMode)
Jamie Madill72106562017-03-24 14:18:50 -0400277 {
Jamie Madill2b858c22018-09-03 13:58:14 -0400278 mCurrentPipeline = nullptr;
279 mDirtyBits.set(DIRTY_BIT_PIPELINE);
Jamie Madill32fd63b2018-03-31 11:20:35 -0400280 mCurrentDrawMode = drawCallParams.mode();
Jamie Madill72106562017-03-24 14:18:50 -0400281 }
282
Jamie Madilld1249de2018-08-28 16:58:53 -0400283 if (!mDrawFramebuffer->appendToStartedRenderPass(mRenderer, commandBufferOut))
Jamie Madill49ac74b2017-12-21 14:42:33 -0500284 {
Jamie Madilld1249de2018-08-28 16:58:53 -0400285 ANGLE_TRY(mDrawFramebuffer->startNewRenderPass(this, commandBufferOut));
Jamie Madillef3b9b42018-08-30 16:18:38 -0400286 mDirtyBits |= mNewCommandBufferDirtyBits;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500287 }
Jamie Madill88fc6da2018-08-30 16:18:36 -0400288
289 if (context->getStateCache().hasAnyActiveClientAttrib())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500290 {
Jamie Madill37386b02018-08-30 16:18:37 -0400291 ANGLE_TRY(mVertexArray->updateClientAttribs(context, drawCallParams));
Jamie Madillef3b9b42018-08-30 16:18:38 -0400292 mDirtyBits.set(DIRTY_BIT_VERTEX_BUFFERS);
Jamie Madill88fc6da2018-08-30 16:18:36 -0400293 }
294
Jamie Madill37386b02018-08-30 16:18:37 -0400295 if (mProgram->dirtyUniforms())
Jamie Madill88fc6da2018-08-30 16:18:36 -0400296 {
Jamie Madill37386b02018-08-30 16:18:37 -0400297 ANGLE_TRY(mProgram->updateUniforms(this));
Jamie Madillef3b9b42018-08-30 16:18:38 -0400298 mDirtyBits.set(DIRTY_BIT_DESCRIPTOR_SETS);
Jamie Madill88fc6da2018-08-30 16:18:36 -0400299 }
300
Jamie Madill2b858c22018-09-03 13:58:14 -0400301 DirtyBits dirtyBits = mDirtyBits & dirtyBitMask;
302
303 if (dirtyBits.none())
304 return angle::Result::Continue();
305
Jamie Madill88fc6da2018-08-30 16:18:36 -0400306 // Flush any relevant dirty bits.
Jamie Madill2b858c22018-09-03 13:58:14 -0400307 for (size_t dirtyBit : dirtyBits)
Jamie Madill88fc6da2018-08-30 16:18:36 -0400308 {
Jamie Madillef3b9b42018-08-30 16:18:38 -0400309 mDirtyBits.reset(dirtyBit);
Jamie Madill2b858c22018-09-03 13:58:14 -0400310 ANGLE_TRY((this->*mDirtyBitHandlers[dirtyBit])(context, drawCallParams, *commandBufferOut));
Jamie Madill88fc6da2018-08-30 16:18:36 -0400311 }
312
313 return angle::Result::Continue();
314}
315
316angle::Result ContextVk::setupIndexedDraw(const gl::Context *context,
317 const gl::DrawCallParams &drawCallParams,
318 vk::CommandBuffer **commandBufferOut)
319{
Jamie Madill88fc6da2018-08-30 16:18:36 -0400320 if (drawCallParams.type() != mCurrentDrawElementsType)
321 {
Jamie Madillef3b9b42018-08-30 16:18:38 -0400322 mDirtyBits.set(DIRTY_BIT_INDEX_BUFFER);
Jamie Madill88fc6da2018-08-30 16:18:36 -0400323 mCurrentDrawElementsType = drawCallParams.type();
324 }
325
Jamie Madill37386b02018-08-30 16:18:37 -0400326 const gl::Buffer *elementArrayBuffer = mVertexArray->getState().getElementArrayBuffer().get();
Jamie Madill88fc6da2018-08-30 16:18:36 -0400327 if (!elementArrayBuffer)
328 {
Jamie Madillef3b9b42018-08-30 16:18:38 -0400329 mDirtyBits.set(DIRTY_BIT_INDEX_BUFFER);
Jamie Madill37386b02018-08-30 16:18:37 -0400330 ANGLE_TRY(mVertexArray->updateIndexTranslation(this, drawCallParams));
Jamie Madill88fc6da2018-08-30 16:18:36 -0400331 }
332 else
333 {
334 if (drawCallParams.indices() != mLastIndexBufferOffset)
335 {
Jamie Madillef3b9b42018-08-30 16:18:38 -0400336 mDirtyBits.set(DIRTY_BIT_INDEX_BUFFER);
Jamie Madill88fc6da2018-08-30 16:18:36 -0400337 mLastIndexBufferOffset = drawCallParams.indices();
Jamie Madill37386b02018-08-30 16:18:37 -0400338 mVertexArray->updateCurrentElementArrayBufferOffset(mLastIndexBufferOffset);
Jamie Madill88fc6da2018-08-30 16:18:36 -0400339 }
340
Jamie Madillef3b9b42018-08-30 16:18:38 -0400341 if (drawCallParams.type() == GL_UNSIGNED_BYTE && mDirtyBits[DIRTY_BIT_INDEX_BUFFER])
Jamie Madill88fc6da2018-08-30 16:18:36 -0400342 {
Jamie Madill37386b02018-08-30 16:18:37 -0400343 ANGLE_TRY(mVertexArray->updateIndexTranslation(this, drawCallParams));
Jamie Madill88fc6da2018-08-30 16:18:36 -0400344 }
345 }
346
Jamie Madillef3b9b42018-08-30 16:18:38 -0400347 return setupDraw(context, drawCallParams, mIndexedDirtyBitsMask, commandBufferOut);
Jamie Madill88fc6da2018-08-30 16:18:36 -0400348}
349
350angle::Result ContextVk::setupLineLoopDraw(const gl::Context *context,
351 const gl::DrawCallParams &drawCallParams,
352 vk::CommandBuffer **commandBufferOut)
353{
Jamie Madill37386b02018-08-30 16:18:37 -0400354 ANGLE_TRY(mVertexArray->handleLineLoop(this, drawCallParams));
Jamie Madillef3b9b42018-08-30 16:18:38 -0400355 mDirtyBits.set(DIRTY_BIT_INDEX_BUFFER);
Jamie Madill88fc6da2018-08-30 16:18:36 -0400356 mCurrentDrawElementsType =
357 drawCallParams.isDrawElements() ? drawCallParams.type() : GL_UNSIGNED_INT;
Jamie Madillef3b9b42018-08-30 16:18:38 -0400358 return setupDraw(context, drawCallParams, mIndexedDirtyBitsMask, commandBufferOut);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400359}
360
Jamie Madill2b858c22018-09-03 13:58:14 -0400361angle::Result ContextVk::handleDirtyDefaultAttribs(const gl::Context *context,
362 const gl::DrawCallParams &drawCallParams,
363 vk::CommandBuffer *commandBuffer)
364{
365 ASSERT(mDirtyDefaultAttribsMask.any());
366
367 for (size_t attribIndex : mDirtyDefaultAttribsMask)
368 {
369 ANGLE_TRY(updateDefaultAttribute(attribIndex))
370 }
371
372 mDirtyDefaultAttribsMask.reset();
373 return angle::Result::Continue();
374}
375
376angle::Result ContextVk::handleDirtyPipeline(const gl::Context *context,
377 const gl::DrawCallParams &drawCallParams,
378 vk::CommandBuffer *commandBuffer)
379{
380 if (!mCurrentPipeline)
381 {
382 ANGLE_TRY(initPipeline(drawCallParams));
383 }
384
385 commandBuffer->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
386
387 // Update the queue serial for the pipeline object.
388 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
389 mCurrentPipeline->updateSerial(mRenderer->getCurrentQueueSerial());
390 return angle::Result::Continue();
391}
392
393angle::Result ContextVk::handleDirtyTextures(const gl::Context *context,
394 const gl::DrawCallParams &drawCallParams,
395 vk::CommandBuffer *commandBuffer)
396{
397 ANGLE_TRY(updateActiveTextures(context));
398
399 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
400 for (size_t textureIndex : mProgram->getState().getActiveSamplersMask())
401 {
402 // Ensure any writes to the textures are flushed before we read from them.
403 TextureVk *textureVk = mActiveTextures[textureIndex];
404 ANGLE_TRY(textureVk->ensureImageInitialized(this));
Jamie Madille8dd0792018-09-27 15:04:27 -0400405 textureVk->getImage().addReadDependency(mDrawFramebuffer->getFramebuffer());
Jamie Madill2b858c22018-09-03 13:58:14 -0400406 }
407
408 if (mProgram->hasTextures())
409 {
410 ANGLE_TRY(mProgram->updateTexturesDescriptorSet(this));
411 }
412 return angle::Result::Continue();
413}
414
415angle::Result ContextVk::handleDirtyVertexBuffers(const gl::Context *context,
416 const gl::DrawCallParams &drawCallParams,
417 vk::CommandBuffer *commandBuffer)
418{
419 BindNonNullVertexBufferRanges(
420 commandBuffer, mProgram->getState().getActiveAttribLocationsMask(),
421 mProgram->getState().getMaxActiveAttribLocation(),
422 mVertexArray->getCurrentArrayBufferHandles(), mVertexArray->getCurrentArrayBufferOffsets());
423
424 const auto &arrayBufferResources = mVertexArray->getCurrentArrayBufferResources();
425
426 for (size_t attribIndex : context->getStateCache().getActiveBufferedAttribsMask())
427 {
428 if (arrayBufferResources[attribIndex])
Jamie Madille8dd0792018-09-27 15:04:27 -0400429 arrayBufferResources[attribIndex]->addReadDependency(
430 mDrawFramebuffer->getFramebuffer());
Jamie Madill2b858c22018-09-03 13:58:14 -0400431 }
432 return angle::Result::Continue();
433}
434
435angle::Result ContextVk::handleDirtyIndexBuffer(const gl::Context *context,
436 const gl::DrawCallParams &drawCallParams,
437 vk::CommandBuffer *commandBuffer)
438{
439 commandBuffer->bindIndexBuffer(mVertexArray->getCurrentElementArrayBufferHandle(),
440 mVertexArray->getCurrentElementArrayBufferOffset(),
441 gl_vk::GetIndexType(mCurrentDrawElementsType));
442
443 vk::CommandGraphResource *elementArrayBufferResource =
444 mVertexArray->getCurrentElementArrayBufferResource();
445 if (elementArrayBufferResource)
446 {
Jamie Madille8dd0792018-09-27 15:04:27 -0400447 elementArrayBufferResource->addReadDependency(mDrawFramebuffer->getFramebuffer());
Jamie Madill2b858c22018-09-03 13:58:14 -0400448 }
449 return angle::Result::Continue();
450}
451
452angle::Result ContextVk::handleDirtyDescriptorSets(const gl::Context *context,
453 const gl::DrawCallParams &drawCallParams,
454 vk::CommandBuffer *commandBuffer)
455{
456 ANGLE_TRY(mProgram->updateDescriptorSets(this, drawCallParams, commandBuffer));
457
458 // Bind the graphics descriptor sets.
459 commandBuffer->bindDescriptorSets(
460 VK_PIPELINE_BIND_POINT_GRAPHICS, mProgram->getPipelineLayout(),
461 kDriverUniformsDescriptorSetIndex, 1, &mDriverUniformsDescriptorSet, 0, nullptr);
462 return angle::Result::Continue();
463}
464
Jamie Madill6f755b22018-10-09 12:48:54 -0400465angle::Result ContextVk::drawArrays(const gl::Context *context,
466 gl::PrimitiveMode mode,
467 GLint first,
468 GLsizei count)
Jamie Madilld03a8492017-10-03 15:46:06 -0400469{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400470 const gl::DrawCallParams &drawCallParams = context->getParams<gl::DrawCallParams>();
471
Jamie Madill316c6062018-05-29 10:49:45 -0400472 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madill88fc6da2018-08-30 16:18:36 -0400473 uint32_t clampedVertexCount = drawCallParams.getClampedVertexCount<uint32_t>();
Luc Ferron360098d2018-02-21 07:33:50 -0500474
Jamie Madill88fc6da2018-08-30 16:18:36 -0400475 if (mode == gl::PrimitiveMode::LineLoop)
476 {
477 ANGLE_TRY(setupLineLoopDraw(context, drawCallParams, &commandBuffer));
478 vk::LineLoopHelper::Draw(clampedVertexCount, commandBuffer);
479 }
480 else
481 {
Jamie Madillef3b9b42018-08-30 16:18:38 -0400482 ANGLE_TRY(setupDraw(context, drawCallParams, mNonIndexedDirtyBitsMask, &commandBuffer));
Jamie Madill88fc6da2018-08-30 16:18:36 -0400483 commandBuffer->draw(clampedVertexCount, 1, drawCallParams.firstVertex(), 0);
484 }
Luc Ferron360098d2018-02-21 07:33:50 -0500485
Jamie Madill6f755b22018-10-09 12:48:54 -0400486 return angle::Result::Continue();
Jamie Madilld03a8492017-10-03 15:46:06 -0400487}
488
Jamie Madillc564c072017-06-01 12:45:42 -0400489gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400490 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400491 GLint first,
492 GLsizei count,
493 GLsizei instanceCount)
494{
495 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500496 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400497}
498
Jamie Madillc564c072017-06-01 12:45:42 -0400499gl::Error ContextVk::drawElements(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400500 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400501 GLsizei count,
502 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800503 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400504{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400505 const gl::DrawCallParams &drawCallParams = context->getParams<gl::DrawCallParams>();
506
Jamie Madill316c6062018-05-29 10:49:45 -0400507 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madill88fc6da2018-08-30 16:18:36 -0400508 if (mode == gl::PrimitiveMode::LineLoop)
509 {
510 ANGLE_TRY(setupLineLoopDraw(context, drawCallParams, &commandBuffer));
511 vk::LineLoopHelper::Draw(count, commandBuffer);
512 }
513 else
514 {
515 ANGLE_TRY(setupIndexedDraw(context, drawCallParams, &commandBuffer));
516 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
517 }
Jamie Madilld03a8492017-10-03 15:46:06 -0400518
519 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400520}
521
Jamie Madillc564c072017-06-01 12:45:42 -0400522gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400523 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400524 GLsizei count,
525 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400526 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800527 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400528{
529 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500530 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400531}
532
Jamie Madillc564c072017-06-01 12:45:42 -0400533gl::Error ContextVk::drawRangeElements(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400534 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400535 GLuint start,
536 GLuint end,
537 GLsizei count,
538 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800539 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400540{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500541 return gl::NoError();
542}
543
544VkDevice ContextVk::getDevice() const
545{
546 return mRenderer->getDevice();
547}
548
Jamie Madillc564c072017-06-01 12:45:42 -0400549gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400550 gl::PrimitiveMode mode,
Jamie Madillc564c072017-06-01 12:45:42 -0400551 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800552{
553 UNIMPLEMENTED();
554 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
555}
556
Jamie Madillc564c072017-06-01 12:45:42 -0400557gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400558 gl::PrimitiveMode mode,
Jamie Madillc564c072017-06-01 12:45:42 -0400559 GLenum type,
560 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800561{
562 UNIMPLEMENTED();
563 return gl::InternalError()
564 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
565}
566
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400567GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400568{
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400569 if (mRenderer->isDeviceLost())
570 {
571 // TODO(geofflang): It may be possible to track which context caused the device lost and
572 // return either GL_GUILTY_CONTEXT_RESET or GL_INNOCENT_CONTEXT_RESET.
573 // http://anglebug.com/2787
574 return GL_UNKNOWN_CONTEXT_RESET;
575 }
576
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400577 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400578}
579
580std::string ContextVk::getVendorString() const
581{
582 UNIMPLEMENTED();
583 return std::string();
584}
585
586std::string ContextVk::getRendererDescription() const
587{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500588 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400589}
590
591void ContextVk::insertEventMarker(GLsizei length, const char *marker)
592{
Geoff Lang3ddd6422018-09-26 11:10:51 -0400593 // TODO: Forward this to a Vulkan debug marker. http://anglebug.com/2853
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400594}
595
596void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
597{
Geoff Lang3ddd6422018-09-26 11:10:51 -0400598 // TODO: Forward this to a Vulkan debug marker. http://anglebug.com/2853
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400599}
600
601void ContextVk::popGroupMarker()
602{
Geoff Lang3ddd6422018-09-26 11:10:51 -0400603 // TODO: Forward this to a Vulkan debug marker. http://anglebug.com/2853
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400604}
605
Geoff Lang5d5253a2017-11-22 14:51:12 -0500606void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
607{
Geoff Lang3ddd6422018-09-26 11:10:51 -0400608 // TODO: Forward this to a Vulkan debug marker. http://anglebug.com/2853
Geoff Lang5d5253a2017-11-22 14:51:12 -0500609}
610
611void ContextVk::popDebugGroup()
612{
Geoff Lang3ddd6422018-09-26 11:10:51 -0400613 // TODO: Forward this to a Vulkan debug marker. http://anglebug.com/2853
Geoff Lang5d5253a2017-11-22 14:51:12 -0500614}
615
Luc Ferron82eda932018-07-09 15:10:22 -0400616bool ContextVk::isViewportFlipEnabledForDrawFBO() const
Luc Ferronbf6dc372018-06-28 15:24:19 -0400617{
Luc Ferron82eda932018-07-09 15:10:22 -0400618 return mFlipViewportForDrawFramebuffer && mFlipYForCurrentSurface;
619}
620
621bool ContextVk::isViewportFlipEnabledForReadFBO() const
622{
623 return mFlipViewportForReadFramebuffer;
Luc Ferronbf6dc372018-06-28 15:24:19 -0400624}
625
Luc Ferron0bb940a2018-06-22 09:59:34 -0400626void ContextVk::updateColorMask(const gl::BlendState &blendState)
Luc Ferron5fd36932018-06-19 14:55:50 -0400627{
628 mClearColorMask =
629 gl_vk::GetColorComponentFlags(blendState.colorMaskRed, blendState.colorMaskGreen,
630 blendState.colorMaskBlue, blendState.colorMaskAlpha);
631
632 FramebufferVk *framebufferVk = vk::GetImpl(mState.getState().getDrawFramebuffer());
633 mPipelineDesc->updateColorWriteMask(mClearColorMask,
634 framebufferVk->getEmulatedAlphaAttachmentMask());
635}
636
Jamie Madill84c662b2018-07-12 15:56:55 -0400637void ContextVk::updateScissor(const gl::State &glState) const
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400638{
Jamie Madill84c662b2018-07-12 15:56:55 -0400639 FramebufferVk *framebufferVk = vk::GetImpl(glState.getDrawFramebuffer());
Luc Ferronbf6dc372018-06-28 15:24:19 -0400640 gl::Box dimensions = framebufferVk->getState().getDimensions();
641 gl::Rectangle renderArea(0, 0, dimensions.width, dimensions.height);
642
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400643 if (glState.isScissorTestEnabled())
644 {
Luc Ferron82eda932018-07-09 15:10:22 -0400645 mPipelineDesc->updateScissor(glState.getScissor(), isViewportFlipEnabledForDrawFBO(),
646 renderArea);
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400647 }
648 else
649 {
Luc Ferron14f48172018-04-11 08:43:28 -0400650 // If the scissor test isn't enabled, we can simply use a really big scissor that's
651 // certainly larger than the current surface using the maximum size of a 2D texture
652 // for the width and height.
Luc Ferron82eda932018-07-09 15:10:22 -0400653 mPipelineDesc->updateScissor(kMaxSizedScissor, isViewportFlipEnabledForDrawFBO(),
654 renderArea);
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400655 }
656}
657
Jamie Madill6f755b22018-10-09 12:48:54 -0400658angle::Result ContextVk::syncState(const gl::Context *context,
Jamie Madill9d0bb3d2018-10-09 20:29:13 -0400659 const gl::State::DirtyBits &dirtyBits,
660 const gl::State::DirtyBits &bitMask)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400661{
Jamie Madill72106562017-03-24 14:18:50 -0400662 if (dirtyBits.any())
663 {
664 invalidateCurrentPipeline();
665 }
Jamie Madillebf72992017-10-13 14:09:45 -0400666
Jamie Madill88fc6da2018-08-30 16:18:36 -0400667 const gl::State &glState = context->getGLState();
Jamie Madillebf72992017-10-13 14:09:45 -0400668
Jamie Madill88fc6da2018-08-30 16:18:36 -0400669 for (size_t dirtyBit : dirtyBits)
Jamie Madillebf72992017-10-13 14:09:45 -0400670 {
671 switch (dirtyBit)
672 {
673 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400674 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron14f48172018-04-11 08:43:28 -0400675 updateScissor(glState);
Jamie Madillebf72992017-10-13 14:09:45 -0400676 break;
677 case gl::State::DIRTY_BIT_VIEWPORT:
Luc Ferron1a135ad2018-07-04 10:35:31 -0400678 {
679 FramebufferVk *framebufferVk = vk::GetImpl(glState.getDrawFramebuffer());
680 mPipelineDesc->updateViewport(framebufferVk, glState.getViewport(),
681 glState.getNearPlane(), glState.getFarPlane(),
Luc Ferron82eda932018-07-09 15:10:22 -0400682 isViewportFlipEnabledForDrawFBO());
Jamie Madill74c179b2018-09-24 10:53:23 -0400683 invalidateDriverUniforms();
Jamie Madillebf72992017-10-13 14:09:45 -0400684 break;
Luc Ferron1a135ad2018-07-04 10:35:31 -0400685 }
Jamie Madillebf72992017-10-13 14:09:45 -0400686 case gl::State::DIRTY_BIT_DEPTH_RANGE:
Luc Ferron0986f1c2018-04-16 13:47:23 -0400687 mPipelineDesc->updateDepthRange(glState.getNearPlane(), glState.getFarPlane());
Jamie Madill74c179b2018-09-24 10:53:23 -0400688 invalidateDriverUniforms();
Jamie Madillebf72992017-10-13 14:09:45 -0400689 break;
690 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500691 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400692 break;
693 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500694 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400695 break;
696 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500697 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400698 break;
699 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500700 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400701 break;
702 case gl::State::DIRTY_BIT_COLOR_MASK:
Luc Ferron0bb940a2018-06-22 09:59:34 -0400703 updateColorMask(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400704 break;
705 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400706 break;
707 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400708 break;
709 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
Jamie Madillebf72992017-10-13 14:09:45 -0400710 break;
711 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400712 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400713 case gl::State::DIRTY_BIT_SAMPLE_MASK:
Jamie Madillebf72992017-10-13 14:09:45 -0400714 break;
715 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
Geoff Langc16f5182018-07-18 10:40:03 -0400716 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState(),
717 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400718 break;
719 case gl::State::DIRTY_BIT_DEPTH_FUNC:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400720 mPipelineDesc->updateDepthFunc(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400721 break;
722 case gl::State::DIRTY_BIT_DEPTH_MASK:
Geoff Langc16f5182018-07-18 10:40:03 -0400723 mPipelineDesc->updateDepthWriteEnabled(glState.getDepthStencilState(),
724 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400725 break;
726 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
Geoff Langc16f5182018-07-18 10:40:03 -0400727 mPipelineDesc->updateStencilTestEnabled(glState.getDepthStencilState(),
728 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400729 break;
730 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
Luc Ferron364a9552018-03-29 09:44:51 -0400731 mPipelineDesc->updateStencilFrontFuncs(glState.getStencilRef(),
732 glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400733 break;
734 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
Luc Ferron364a9552018-03-29 09:44:51 -0400735 mPipelineDesc->updateStencilBackFuncs(glState.getStencilBackRef(),
736 glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400737 break;
738 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
Luc Ferron364a9552018-03-29 09:44:51 -0400739 mPipelineDesc->updateStencilFrontOps(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400740 break;
741 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
Luc Ferron364a9552018-03-29 09:44:51 -0400742 mPipelineDesc->updateStencilBackOps(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400743 break;
744 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
Geoff Langc16f5182018-07-18 10:40:03 -0400745 mPipelineDesc->updateStencilFrontWriteMask(glState.getDepthStencilState(),
746 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400747 break;
748 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
Geoff Langc16f5182018-07-18 10:40:03 -0400749 mPipelineDesc->updateStencilBackWriteMask(glState.getDepthStencilState(),
750 glState.getDrawFramebuffer());
Jamie Madillebf72992017-10-13 14:09:45 -0400751 break;
752 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
753 case gl::State::DIRTY_BIT_CULL_FACE:
Luc Ferronb70ad522018-07-09 16:06:26 -0400754 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400755 break;
756 case gl::State::DIRTY_BIT_FRONT_FACE:
Luc Ferronb70ad522018-07-09 16:06:26 -0400757 mPipelineDesc->updateFrontFace(glState.getRasterizerState(),
758 isViewportFlipEnabledForDrawFBO());
Jamie Madillebf72992017-10-13 14:09:45 -0400759 break;
760 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
Frank Henigmand731ff82018-08-13 18:18:51 -0400761 mPipelineDesc->updatePolygonOffsetFillEnabled(glState.isPolygonOffsetFillEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400762 break;
763 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
Frank Henigmand731ff82018-08-13 18:18:51 -0400764 mPipelineDesc->updatePolygonOffset(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400765 break;
766 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400767 break;
768 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500769 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400770 break;
771 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400772 break;
773 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500774 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
775 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
776 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
777 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400778 break;
779 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500780 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400781 break;
782 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500783 mClearDepthStencilValue.depthStencil.stencil =
784 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400785 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400786 case gl::State::DIRTY_BIT_UNPACK_STATE:
Luc Ferronf9749ea2018-04-24 15:34:53 -0400787 // This is a no-op, its only important to use the right unpack state when we do
788 // setImage or setSubImage in TextureVk, which is plumbed through the frontend call
Jamie Madillebf72992017-10-13 14:09:45 -0400789 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500790 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
Corentin Wallez29a20992017-11-06 18:23:16 -0500791 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400792 case gl::State::DIRTY_BIT_PACK_STATE:
Luc Ferrona1c72422018-05-14 15:58:28 -0400793 // This is a no-op, its only important to use the right pack state when we do
794 // call readPixels later on.
Jamie Madillebf72992017-10-13 14:09:45 -0400795 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500796 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
Corentin Wallez29a20992017-11-06 18:23:16 -0500797 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400798 case gl::State::DIRTY_BIT_DITHER_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400799 break;
800 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
Jamie Madillebf72992017-10-13 14:09:45 -0400801 break;
802 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
Jamie Madillebf72992017-10-13 14:09:45 -0400803 break;
804 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
Luc Ferron82eda932018-07-09 15:10:22 -0400805 updateFlipViewportReadFramebuffer(context->getGLState());
Jamie Madillebf72992017-10-13 14:09:45 -0400806 break;
807 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
Luc Ferron1a135ad2018-07-04 10:35:31 -0400808 {
Jamie Madill37386b02018-08-30 16:18:37 -0400809 mDrawFramebuffer = vk::GetImpl(glState.getDrawFramebuffer());
Luc Ferrone8356092018-07-12 12:36:47 -0400810 updateFlipViewportDrawFramebuffer(glState);
Jamie Madill37386b02018-08-30 16:18:37 -0400811 mPipelineDesc->updateViewport(mDrawFramebuffer, glState.getViewport(),
Luc Ferron1a135ad2018-07-04 10:35:31 -0400812 glState.getNearPlane(), glState.getFarPlane(),
Luc Ferron82eda932018-07-09 15:10:22 -0400813 isViewportFlipEnabledForDrawFBO());
Luc Ferron0bb940a2018-06-22 09:59:34 -0400814 updateColorMask(glState.getBlendState());
Luc Ferronb70ad522018-07-09 16:06:26 -0400815 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Luc Ferronbf6dc372018-06-28 15:24:19 -0400816 updateScissor(glState);
Geoff Langc16f5182018-07-18 10:40:03 -0400817 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState(),
818 glState.getDrawFramebuffer());
819 mPipelineDesc->updateDepthWriteEnabled(glState.getDepthStencilState(),
820 glState.getDrawFramebuffer());
821 mPipelineDesc->updateStencilTestEnabled(glState.getDepthStencilState(),
822 glState.getDrawFramebuffer());
823 mPipelineDesc->updateStencilFrontWriteMask(glState.getDepthStencilState(),
824 glState.getDrawFramebuffer());
825 mPipelineDesc->updateStencilBackWriteMask(glState.getDepthStencilState(),
826 glState.getDrawFramebuffer());
Jamie Madill74c179b2018-09-24 10:53:23 -0400827 invalidateDriverUniforms();
Jamie Madillebf72992017-10-13 14:09:45 -0400828 break;
Luc Ferron1a135ad2018-07-04 10:35:31 -0400829 }
Jamie Madillebf72992017-10-13 14:09:45 -0400830 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
Jamie Madillebf72992017-10-13 14:09:45 -0400831 break;
832 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill5a4c9322018-07-16 11:01:58 -0400833 {
Jamie Madill37386b02018-08-30 16:18:37 -0400834 mVertexArray = vk::GetImpl(glState.getVertexArray());
Jamie Madill88fc6da2018-08-30 16:18:36 -0400835 invalidateDefaultAttributes(context->getStateCache().getActiveDefaultAttribsMask());
Jamie Madillebf72992017-10-13 14:09:45 -0400836 break;
Jamie Madill5a4c9322018-07-16 11:01:58 -0400837 }
Jamie Madillebf72992017-10-13 14:09:45 -0400838 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
Jamie Madillebf72992017-10-13 14:09:45 -0400839 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800840 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800841 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400842 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
Jamie Madill37386b02018-08-30 16:18:37 -0400843 mProgram = vk::GetImpl(glState.getProgram());
Jamie Madillebf72992017-10-13 14:09:45 -0400844 break;
845 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
846 {
Jamie Madillef3b9b42018-08-30 16:18:38 -0400847 invalidateCurrentTextures();
Jamie Madill06ca6342018-07-12 15:56:53 -0400848 // No additional work is needed here. We will update the pipeline desc later.
Jamie Madill88fc6da2018-08-30 16:18:36 -0400849 invalidateDefaultAttributes(context->getStateCache().getActiveDefaultAttribsMask());
Jamie Madillef3b9b42018-08-30 16:18:38 -0400850 bool useVertexBuffer = (mProgram->getState().getMaxActiveAttribLocation());
851 mNonIndexedDirtyBitsMask.set(DIRTY_BIT_VERTEX_BUFFERS, useVertexBuffer);
852 mIndexedDirtyBitsMask.set(DIRTY_BIT_VERTEX_BUFFERS, useVertexBuffer);
Jamie Madillebf72992017-10-13 14:09:45 -0400853 break;
854 }
855 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madillef3b9b42018-08-30 16:18:38 -0400856 invalidateCurrentTextures();
Jamie Madillebf72992017-10-13 14:09:45 -0400857 break;
858 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madillef3b9b42018-08-30 16:18:38 -0400859 invalidateCurrentTextures();
Jamie Madillebf72992017-10-13 14:09:45 -0400860 break;
Geoff Langded79232017-11-28 15:21:11 -0500861 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
Geoff Langded79232017-11-28 15:21:11 -0500862 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800863 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
Xinghua Cao10a4d432017-11-28 14:46:26 +0800864 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500865 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
Jamie Madillf4141212017-12-12 15:08:07 -0500866 break;
jchen1099118c12018-09-10 16:28:51 +0800867 case gl::State::DIRTY_BIT_ATOMIC_COUNTER_BUFFER_BINDING:
868 break;
869 case gl::State::DIRTY_BIT_IMAGE_BINDINGS:
870 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400871 case gl::State::DIRTY_BIT_MULTISAMPLING:
Jamie Madillebf72992017-10-13 14:09:45 -0400872 break;
873 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
Jamie Madillebf72992017-10-13 14:09:45 -0400874 break;
875 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
Jamie Madillebf72992017-10-13 14:09:45 -0400876 break;
jchen10bb2f2c42018-09-16 09:47:38 +0800877 case gl::State::DIRTY_BIT_PATH_RENDERING:
Jamie Madillebf72992017-10-13 14:09:45 -0400878 break;
879 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
Jamie Madillebf72992017-10-13 14:09:45 -0400880 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400881 case gl::State::DIRTY_BIT_CURRENT_VALUES:
Jamie Madill5a4c9322018-07-16 11:01:58 -0400882 {
Jamie Madill88fc6da2018-08-30 16:18:36 -0400883 invalidateDefaultAttributes(glState.getAndResetDirtyCurrentValues());
Jamie Madillc67323a2017-11-02 23:11:41 -0400884 break;
Jamie Madill5a4c9322018-07-16 11:01:58 -0400885 }
886 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400887 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400888 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400889 break;
890 }
891 }
Jamie Madill5547b382017-10-23 18:16:01 -0400892
Jamie Madill6f755b22018-10-09 12:48:54 -0400893 return angle::Result::Continue();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400894}
895
896GLint ContextVk::getGPUDisjoint()
897{
898 UNIMPLEMENTED();
899 return GLint();
900}
901
902GLint64 ContextVk::getTimestamp()
903{
904 UNIMPLEMENTED();
905 return GLint64();
906}
907
Luc Ferron5396f2a2018-07-12 08:24:23 -0400908gl::Error ContextVk::onMakeCurrent(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400909{
Geoff Langcaa55cd2018-07-05 13:19:35 -0400910 // Flip viewports if FeaturesVk::flipViewportY is enabled and the user did not request that the
911 // surface is flipped.
912 egl::Surface *drawSurface = context->getCurrentDrawSurface();
913 mFlipYForCurrentSurface =
914 drawSurface != nullptr && mRenderer->getFeatures().flipViewportY &&
915 !IsMaskFlagSet(drawSurface->getOrientation(), EGL_SURFACE_ORIENTATION_INVERT_Y_ANGLE);
Luc Ferron82eda932018-07-09 15:10:22 -0400916
917 const gl::State &glState = context->getGLState();
918 updateFlipViewportDrawFramebuffer(glState);
919 updateFlipViewportReadFramebuffer(glState);
Jamie Madill74c179b2018-09-24 10:53:23 -0400920 invalidateDriverUniforms();
Luc Ferron5396f2a2018-07-12 08:24:23 -0400921 return gl::NoError();
Luc Ferron82eda932018-07-09 15:10:22 -0400922}
923
924void ContextVk::updateFlipViewportDrawFramebuffer(const gl::State &glState)
925{
926 gl::Framebuffer *drawFramebuffer = glState.getDrawFramebuffer();
927 mFlipViewportForDrawFramebuffer =
928 drawFramebuffer->isDefault() && mRenderer->getFeatures().flipViewportY;
929}
930
931void ContextVk::updateFlipViewportReadFramebuffer(const gl::State &glState)
932{
933 gl::Framebuffer *readFramebuffer = glState.getReadFramebuffer();
934 mFlipViewportForReadFramebuffer =
935 readFramebuffer->isDefault() && mRenderer->getFeatures().flipViewportY;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400936}
937
Jiawei Shaod0a7d102018-05-07 12:40:20 +0800938gl::Caps ContextVk::getNativeCaps() const
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400939{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400940 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400941}
942
943const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
944{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400945 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400946}
947
948const gl::Extensions &ContextVk::getNativeExtensions() const
949{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400950 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400951}
952
953const gl::Limitations &ContextVk::getNativeLimitations() const
954{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400955 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400956}
957
958CompilerImpl *ContextVk::createCompiler()
959{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400960 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400961}
962
Jamie Madillacccc6c2016-05-03 17:22:10 -0400963ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400964{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400965 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400966}
967
Jamie Madillacccc6c2016-05-03 17:22:10 -0400968ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400969{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400970 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400971}
972
Jamie Madillacccc6c2016-05-03 17:22:10 -0400973FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400974{
Jamie Madill639bc902018-07-18 17:08:27 -0400975 return FramebufferVk::CreateUserFBO(mRenderer, state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400976}
977
978TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
979{
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400980 return new TextureVk(state, mRenderer);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400981}
982
Jamie Madille703c602018-02-20 10:21:48 -0500983RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400984{
Jamie Madille703c602018-02-20 10:21:48 -0500985 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400986}
987
Jamie Madill8f775602016-11-03 16:45:34 -0400988BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400989{
Jamie Madill8f775602016-11-03 16:45:34 -0400990 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400991}
992
Jamie Madillacccc6c2016-05-03 17:22:10 -0400993VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400994{
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400995 return new VertexArrayVk(state, mRenderer);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400996}
997
Corentin Wallezad3ae902018-03-09 13:40:42 -0500998QueryImpl *ContextVk::createQuery(gl::QueryType type)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400999{
Jamie Madillacccc6c2016-05-03 17:22:10 -04001000 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001001}
1002
1003FenceNVImpl *ContextVk::createFenceNV()
1004{
Jamie Madillacccc6c2016-05-03 17:22:10 -04001005 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001006}
1007
Jamie Madill70b5bb02017-08-28 13:32:37 -04001008SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001009{
Jamie Madill70b5bb02017-08-28 13:32:37 -04001010 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001011}
1012
Geoff Lang73bd2182016-07-15 13:01:24 -04001013TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001014{
Geoff Lang73bd2182016-07-15 13:01:24 -04001015 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001016}
1017
Jamie Madill06ef36b2017-09-09 23:32:46 -04001018SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001019{
Jamie Madill06ef36b2017-09-09 23:32:46 -04001020 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001021}
1022
Yunchao Hea336b902017-08-02 16:05:21 +08001023ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
1024{
1025 return new ProgramPipelineVk(state);
1026}
1027
Sami Väisänene45e53b2016-05-25 10:36:04 +03001028std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
1029{
1030 return std::vector<PathImpl *>();
1031}
1032
Jamie Madill72106562017-03-24 14:18:50 -04001033void ContextVk::invalidateCurrentPipeline()
1034{
Jamie Madillef3b9b42018-08-30 16:18:38 -04001035 mDirtyBits.set(DIRTY_BIT_PIPELINE);
1036 mDirtyBits.set(DIRTY_BIT_VERTEX_BUFFERS);
1037 mDirtyBits.set(DIRTY_BIT_INDEX_BUFFER);
Jamie Madillffa4cbb2018-01-23 13:04:07 -05001038 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -04001039}
1040
Jamie Madillef3b9b42018-08-30 16:18:38 -04001041void ContextVk::invalidateCurrentTextures()
1042{
1043 ASSERT(mProgram);
1044 if (mProgram->hasTextures())
1045 {
1046 mDirtyBits.set(DIRTY_BIT_TEXTURES);
1047 mDirtyBits.set(DIRTY_BIT_DESCRIPTOR_SETS);
1048 }
1049}
1050
Jamie Madill74c179b2018-09-24 10:53:23 -04001051void ContextVk::invalidateDriverUniforms()
1052{
1053 mDirtyBits.set(DIRTY_BIT_DRIVER_UNIFORMS);
1054 mDirtyBits.set(DIRTY_BIT_DESCRIPTOR_SETS);
1055}
1056
Jamie Madillfe548342017-06-19 11:13:24 -04001057gl::Error ContextVk::dispatchCompute(const gl::Context *context,
1058 GLuint numGroupsX,
1059 GLuint numGroupsY,
1060 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +08001061{
1062 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -05001063 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +08001064}
1065
Qin Jiajia62fcf622017-11-30 16:16:12 +08001066gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
1067{
1068 UNIMPLEMENTED();
1069 return gl::InternalError();
1070}
1071
Xinghua Cao89c422a2017-11-29 18:24:20 +08001072gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
1073{
1074 UNIMPLEMENTED();
1075 return gl::InternalError();
1076}
1077
1078gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
1079{
1080 UNIMPLEMENTED();
1081 return gl::InternalError();
1082}
1083
Jamie Madilledeaa832018-06-22 09:18:41 -04001084vk::DynamicDescriptorPool *ContextVk::getDynamicDescriptorPool(uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -04001085{
Jamie Madilledeaa832018-06-22 09:18:41 -04001086 return &mDynamicDescriptorPools[descriptorSetIndex];
Jamie Madill76e471e2017-10-21 09:56:01 -04001087}
1088
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -04001089vk::DynamicQueryPool *ContextVk::getQueryPool(gl::QueryType queryType)
1090{
1091 ASSERT(queryType == gl::QueryType::AnySamples ||
1092 queryType == gl::QueryType::AnySamplesConservative);
1093 ASSERT(mQueryPools[queryType].isValid());
1094 return &mQueryPools[queryType];
1095}
1096
Jamie Madillf4d693c2018-02-14 16:38:16 -05001097const VkClearValue &ContextVk::getClearColorValue() const
1098{
1099 return mClearColorValue;
1100}
1101
1102const VkClearValue &ContextVk::getClearDepthStencilValue() const
1103{
1104 return mClearDepthStencilValue;
1105}
1106
Jamie Madill9aef3672018-04-27 11:45:06 -04001107VkColorComponentFlags ContextVk::getClearColorMask() const
1108{
1109 return mClearColorMask;
1110}
Jamie Madill834a3a12018-07-09 13:32:39 -04001111
Jamie Madill1266d202018-06-29 09:11:34 -04001112const FeaturesVk &ContextVk::getFeatures() const
1113{
1114 return mRenderer->getFeatures();
1115}
Jamie Madill834a3a12018-07-09 13:32:39 -04001116
Jamie Madillef6023e2018-09-06 16:24:38 -04001117angle::Result ContextVk::handleDirtyDriverUniforms(const gl::Context *context,
1118 const gl::DrawCallParams &drawCallParams,
1119 vk::CommandBuffer *commandBuffer)
Jamie Madill834a3a12018-07-09 13:32:39 -04001120{
Jamie Madill834a3a12018-07-09 13:32:39 -04001121 // Release any previously retained buffers.
1122 mDriverUniformsBuffer.releaseRetainedBuffers(mRenderer);
1123
Jamie Madillef6023e2018-09-06 16:24:38 -04001124 const gl::Rectangle &glViewport = mState.getState().getViewport();
Jamie Madill8e9d2342018-09-10 13:29:37 -04001125 float halfRenderAreaHeight =
1126 static_cast<float>(mDrawFramebuffer->getState().getDimensions().height) * 0.5f;
Jamie Madill834a3a12018-07-09 13:32:39 -04001127
1128 // Allocate a new region in the dynamic buffer.
1129 uint8_t *ptr = nullptr;
1130 VkBuffer buffer = VK_NULL_HANDLE;
Jamie Madill4c310832018-08-29 13:43:17 -04001131 VkDeviceSize offset = 0;
Jamie Madill834a3a12018-07-09 13:32:39 -04001132 bool newBufferAllocated = false;
Jamie Madill21061022018-07-12 23:56:30 -04001133 ANGLE_TRY(mDriverUniformsBuffer.allocate(this, sizeof(DriverUniforms), &ptr, &buffer, &offset,
1134 &newBufferAllocated));
Jamie Madillfb19e082018-09-06 15:39:09 -04001135 float scaleY = isViewportFlipEnabledForDrawFBO() ? -1.0f : 1.0f;
Jamie Madill834a3a12018-07-09 13:32:39 -04001136
Jamie Madillef6023e2018-09-06 16:24:38 -04001137 float depthRangeNear = mState.getState().getNearPlane();
1138 float depthRangeFar = mState.getState().getFarPlane();
Luc Ferrone8356092018-07-12 12:36:47 -04001139 float depthRangeDiff = depthRangeFar - depthRangeNear;
1140
Jamie Madill834a3a12018-07-09 13:32:39 -04001141 // Copy and flush to the device.
1142 DriverUniforms *driverUniforms = reinterpret_cast<DriverUniforms *>(ptr);
Luc Ferron9ff9c772018-07-11 13:08:18 -04001143 *driverUniforms = {
1144 {static_cast<float>(glViewport.x), static_cast<float>(glViewport.y),
1145 static_cast<float>(glViewport.width), static_cast<float>(glViewport.height)},
Jamie Madill8e9d2342018-09-10 13:29:37 -04001146 halfRenderAreaHeight,
1147 scaleY,
1148 -scaleY,
1149 0.0f,
Luc Ferrone8356092018-07-12 12:36:47 -04001150 {depthRangeNear, depthRangeFar, depthRangeDiff, 0.0f}};
Jamie Madill834a3a12018-07-09 13:32:39 -04001151
Jamie Madill21061022018-07-12 23:56:30 -04001152 ANGLE_TRY(mDriverUniformsBuffer.flush(this));
Jamie Madill834a3a12018-07-09 13:32:39 -04001153
1154 // Get the descriptor set layout.
1155 if (!mDriverUniformsSetLayout.valid())
1156 {
1157 vk::DescriptorSetLayoutDesc desc;
1158 desc.update(0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1);
1159
Jamie Madill21061022018-07-12 23:56:30 -04001160 ANGLE_TRY(mRenderer->getDescriptorSetLayout(this, desc, &mDriverUniformsSetLayout));
Jamie Madill834a3a12018-07-09 13:32:39 -04001161 }
1162
1163 // Allocate a new descriptor set.
1164 ANGLE_TRY(mDynamicDescriptorPools[kDriverUniformsDescriptorSetIndex].allocateSets(
Jamie Madill78bcd2b2018-10-16 15:05:20 -04001165 this, mDriverUniformsSetLayout.get().ptr(), 1, &mDriverUniformsDescriptorPoolBinding,
1166 &mDriverUniformsDescriptorSet));
Jamie Madill834a3a12018-07-09 13:32:39 -04001167
1168 // Update the driver uniform descriptor set.
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001169 VkDescriptorBufferInfo bufferInfo = {};
Jamie Madill834a3a12018-07-09 13:32:39 -04001170 bufferInfo.buffer = buffer;
1171 bufferInfo.offset = offset;
1172 bufferInfo.range = sizeof(DriverUniforms);
1173
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001174 VkWriteDescriptorSet writeInfo = {};
Jamie Madill834a3a12018-07-09 13:32:39 -04001175 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
Jamie Madill834a3a12018-07-09 13:32:39 -04001176 writeInfo.dstSet = mDriverUniformsDescriptorSet;
1177 writeInfo.dstBinding = 0;
1178 writeInfo.dstArrayElement = 0;
1179 writeInfo.descriptorCount = 1;
1180 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1181 writeInfo.pImageInfo = nullptr;
1182 writeInfo.pTexelBufferView = nullptr;
1183 writeInfo.pBufferInfo = &bufferInfo;
1184
1185 vkUpdateDescriptorSets(getDevice(), 1, &writeInfo, 0, nullptr);
1186
Jamie Madill21061022018-07-12 23:56:30 -04001187 return angle::Result::Continue();
1188}
1189
1190void ContextVk::handleError(VkResult errorCode, const char *file, unsigned int line)
1191{
1192 GLenum glErrorCode = DefaultGLErrorCode(errorCode);
1193
1194 std::stringstream errorStream;
1195 errorStream << "Internal Vulkan error: " << VulkanResultString(errorCode) << ", in " << file
1196 << ", line " << line << ".";
1197
Geoff Lang2fe5e1d2018-08-28 14:00:24 -04001198 if (errorCode == VK_ERROR_DEVICE_LOST)
1199 {
1200 mRenderer->markDeviceLost();
1201 }
1202
Jamie Madill21061022018-07-12 23:56:30 -04001203 mErrors->handleError(gl::Error(glErrorCode, glErrorCode, errorStream.str()));
Jamie Madill834a3a12018-07-09 13:32:39 -04001204}
Jamie Madill84c662b2018-07-12 15:56:55 -04001205
Jamie Madill88fc6da2018-08-30 16:18:36 -04001206angle::Result ContextVk::updateActiveTextures(const gl::Context *context)
Jamie Madill84c662b2018-07-12 15:56:55 -04001207{
Jamie Madill4787d702018-08-08 15:49:26 -04001208 const gl::State &glState = mState.getState();
1209 const gl::Program *program = glState.getProgram();
Jamie Madill84c662b2018-07-12 15:56:55 -04001210
1211 mActiveTextures.fill(nullptr);
1212
Jamie Madill4787d702018-08-08 15:49:26 -04001213 const gl::ActiveTexturePointerArray &textures = glState.getActiveTexturesCache();
1214 const gl::ActiveTextureMask &activeTextures = program->getActiveSamplersMask();
1215 const gl::ActiveTextureTypeArray &textureTypes = program->getActiveSamplerTypes();
1216
1217 for (size_t textureUnit : activeTextures)
Jamie Madill84c662b2018-07-12 15:56:55 -04001218 {
Jamie Madill4787d702018-08-08 15:49:26 -04001219 gl::Texture *texture = textures[textureUnit];
1220 gl::TextureType textureType = textureTypes[textureUnit];
Jamie Madill84c662b2018-07-12 15:56:55 -04001221
Jamie Madill4787d702018-08-08 15:49:26 -04001222 // Null textures represent incomplete textures.
1223 if (texture == nullptr)
Jamie Madill84c662b2018-07-12 15:56:55 -04001224 {
Jamie Madill88fc6da2018-08-30 16:18:36 -04001225 ANGLE_TRY_HANDLE(context, getIncompleteTexture(context, textureType, &texture));
Jamie Madill84c662b2018-07-12 15:56:55 -04001226 }
Jamie Madill4787d702018-08-08 15:49:26 -04001227
1228 mActiveTextures[textureUnit] = vk::GetImpl(texture);
Jamie Madill84c662b2018-07-12 15:56:55 -04001229 }
1230
Jamie Madill88fc6da2018-08-30 16:18:36 -04001231 return angle::Result::Continue();
Jamie Madill84c662b2018-07-12 15:56:55 -04001232}
1233
1234const gl::ActiveTextureArray<TextureVk *> &ContextVk::getActiveTextures() const
1235{
1236 return mActiveTextures;
1237}
Jamie Madill5a4c9322018-07-16 11:01:58 -04001238
1239void ContextVk::invalidateDefaultAttribute(size_t attribIndex)
1240{
Jamie Madill88fc6da2018-08-30 16:18:36 -04001241 mDirtyDefaultAttribsMask.set(attribIndex);
Jamie Madillef3b9b42018-08-30 16:18:38 -04001242 mDirtyBits.set(DIRTY_BIT_DEFAULT_ATTRIBS);
Jamie Madill88fc6da2018-08-30 16:18:36 -04001243}
1244
1245void ContextVk::invalidateDefaultAttributes(const gl::AttributesMask &dirtyMask)
1246{
1247 if (dirtyMask.any())
1248 {
1249 mDirtyDefaultAttribsMask = dirtyMask;
Jamie Madillef3b9b42018-08-30 16:18:38 -04001250 mDirtyBits.set(DIRTY_BIT_DEFAULT_ATTRIBS);
Jamie Madill88fc6da2018-08-30 16:18:36 -04001251 }
Jamie Madill5a4c9322018-07-16 11:01:58 -04001252}
1253
Jamie Madill5a4c9322018-07-16 11:01:58 -04001254angle::Result ContextVk::updateDefaultAttribute(size_t attribIndex)
1255{
1256 vk::DynamicBuffer &defaultBuffer = mDefaultAttribBuffers[attribIndex];
1257
1258 defaultBuffer.releaseRetainedBuffers(mRenderer);
1259
1260 uint8_t *ptr;
1261 VkBuffer bufferHandle = VK_NULL_HANDLE;
Jamie Madill4c310832018-08-29 13:43:17 -04001262 VkDeviceSize offset = 0;
Jamie Madill5a4c9322018-07-16 11:01:58 -04001263 ANGLE_TRY(
1264 defaultBuffer.allocate(this, kDefaultValueSize, &ptr, &bufferHandle, &offset, nullptr));
1265
1266 const gl::State &glState = mState.getState();
1267 const gl::VertexAttribCurrentValueData &defaultValue =
1268 glState.getVertexAttribCurrentValues()[attribIndex];
1269
1270 ASSERT(defaultValue.Type == GL_FLOAT);
1271
1272 memcpy(ptr, defaultValue.FloatValues, kDefaultValueSize);
1273
1274 ANGLE_TRY(defaultBuffer.flush(this));
1275
Jamie Madill37386b02018-08-30 16:18:37 -04001276 mVertexArray->updateDefaultAttrib(mRenderer, attribIndex, bufferHandle,
1277 static_cast<uint32_t>(offset));
Jamie Madill5a4c9322018-07-16 11:01:58 -04001278 return angle::Result::Continue();
1279}
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001280} // namespace rx