blob: 015ebff844b6ab3b91d38c1310571c2a8d2de0f5 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Frank Henigmana53d0e12018-02-13 00:06:06 -050014#include "common/utilities.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040015#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050016#include "libANGLE/Program.h"
Geoff Langcaa55cd2018-07-05 13:19:35 -040017#include "libANGLE/Surface.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040018#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050019#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040020#include "libANGLE/renderer/vulkan/CompilerVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040021#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040022#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080023#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040024#include "libANGLE/renderer/vulkan/ProgramVk.h"
25#include "libANGLE/renderer/vulkan/QueryVk.h"
26#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
27#include "libANGLE/renderer/vulkan/RendererVk.h"
28#include "libANGLE/renderer/vulkan/SamplerVk.h"
29#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040030#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040031#include "libANGLE/renderer/vulkan/TextureVk.h"
32#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
33#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040034
35namespace rx
36{
Luc Ferron14f48172018-04-11 08:43:28 -040037
38namespace
39{
40constexpr gl::Rectangle kMaxSizedScissor(0,
41 0,
42 std::numeric_limits<int>::max(),
43 std::numeric_limits<int>::max());
44
Jamie Madill9aef3672018-04-27 11:45:06 -040045constexpr VkColorComponentFlags kAllColorChannelsMask =
46 (VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT |
47 VK_COLOR_COMPONENT_A_BIT);
Luc Ferron14f48172018-04-11 08:43:28 -040048} // anonymous namespace
49
Jamie Madillacccc6c2016-05-03 17:22:10 -040050ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050051 : ContextImpl(state),
52 mRenderer(renderer),
Jamie Madill493f9572018-05-24 19:52:15 -040053 mCurrentDrawMode(gl::PrimitiveMode::InvalidEnum),
Frank Henigman17448952017-01-05 15:48:26 -050054 mTexturesDirty(false),
Jamie Madill9aef3672018-04-27 11:45:06 -040055 mVertexArrayBindingHasChanged(false),
Geoff Langcaa55cd2018-07-05 13:19:35 -040056 mClearColorMask(kAllColorChannelsMask),
Jamie Madill834a3a12018-07-09 13:32:39 -040057 mFlipYForCurrentSurface(false),
58 mDriverUniformsBuffer(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, sizeof(DriverUniforms) * 16),
59 mDriverUniformsDescriptorSet(VK_NULL_HANDLE)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040060{
Jamie Madillf4d693c2018-02-14 16:38:16 -050061 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
62 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040063}
64
65ContextVk::~ContextVk()
66{
67}
68
Jamie Madill76e471e2017-10-21 09:56:01 -040069void ContextVk::onDestroy(const gl::Context *context)
70{
Jamie Madill834a3a12018-07-09 13:32:39 -040071 mDriverUniformsSetLayout.reset();
Luc Ferron90968362018-05-04 08:47:22 -040072 mIncompleteTextures.onDestroy(context);
Jamie Madill834a3a12018-07-09 13:32:39 -040073 mDriverUniformsBuffer.destroy(getDevice());
Jamie Madilledeaa832018-06-22 09:18:41 -040074
75 for (vk::DynamicDescriptorPool &descriptorPool : mDynamicDescriptorPools)
76 {
77 descriptorPool.destroy(getDevice());
78 }
Jamie Madill76e471e2017-10-21 09:56:01 -040079}
80
Luc Ferron90968362018-05-04 08:47:22 -040081gl::Error ContextVk::getIncompleteTexture(const gl::Context *context,
82 gl::TextureType type,
83 gl::Texture **textureOut)
84{
85 // At some point, we'll need to support multisample and we'll pass "this" instead of nullptr
86 // and implement the necessary interface.
87 return mIncompleteTextures.getIncompleteTexture(context, type, nullptr, textureOut);
88}
89
Jamie Madill9e54b5a2016-05-25 12:57:39 -040090gl::Error ContextVk::initialize()
91{
Jamie Madill8a4c49f2018-06-21 15:43:06 -040092 // Note that this may reserve more sets than strictly necessary for a particular layout.
Jamie Madille4a6d7a2018-07-09 13:32:37 -040093 VkDescriptorPoolSize uniformPoolSize = {
94 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
95 GetUniformBufferDescriptorCount() * vk::kDefaultDescriptorPoolMaxSets};
Jamie Madill834a3a12018-07-09 13:32:39 -040096
Jamie Madilledeaa832018-06-22 09:18:41 -040097 ANGLE_TRY(
98 mDynamicDescriptorPools[kUniformsDescriptorSetIndex].init(getDevice(), uniformPoolSize));
Jamie Madille4a6d7a2018-07-09 13:32:37 -040099
100 VkDescriptorPoolSize imageSamplerPoolSize = {
101 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
102 mRenderer->getMaxActiveTextures() * vk::kDefaultDescriptorPoolMaxSets};
Jamie Madilledeaa832018-06-22 09:18:41 -0400103 ANGLE_TRY(mDynamicDescriptorPools[kTextureDescriptorSetIndex].init(getDevice(),
104 imageSamplerPoolSize));
Jamie Madill76e471e2017-10-21 09:56:01 -0400105
Jamie Madill834a3a12018-07-09 13:32:39 -0400106 VkDescriptorPoolSize driverUniformsPoolSize = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
107 vk::kDefaultDescriptorPoolMaxSets};
108 ANGLE_TRY(mDynamicDescriptorPools[kDriverUniformsDescriptorSetIndex].init(
109 getDevice(), driverUniformsPoolSize));
110
Jamie Madillf2f6d372018-01-10 21:37:23 -0500111 mPipelineDesc.reset(new vk::PipelineDesc());
112 mPipelineDesc->initDefaults();
113
Jamie Madille09bd5d2016-11-29 16:20:35 -0500114 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400115}
116
Jamie Madillafa02a22017-11-23 12:57:38 -0500117gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400118{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500119 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400120 UNIMPLEMENTED();
Luc Ferron33140402018-03-08 13:57:52 -0500121
122 // dEQP tests rely on having no errors thrown at the end of the test and they always call
123 // flush at the end of the their tests. Just returning NoError until we implement flush
124 // allow us to work on enabling many tests in the meantime.
125 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400126}
127
Jamie Madillafa02a22017-11-23 12:57:38 -0500128gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400129{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500130 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400131}
132
Jamie Madillb90779e2018-04-27 11:45:01 -0400133gl::Error ContextVk::initPipeline()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400134{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500135 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400136
Jamie Madillf2f6d372018-01-10 21:37:23 -0500137 const gl::State &state = mState.getState();
138 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
139 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
140 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500141 const gl::AttributesMask activeAttribLocationsMask =
142 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500143
144 // Ensure the topology of the pipeline description is updated.
145 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500146
Jamie Madill112a3a82018-01-23 13:04:06 -0500147 // Copy over the latest attrib and binding descriptions.
Frank Henigman2ad498e2018-06-20 13:19:01 -0400148 vertexArrayVk->getPackedInputDescriptions(mRenderer, mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500149
Jamie Madillf2f6d372018-01-10 21:37:23 -0500150 // Ensure that the RenderPass description is updated.
Jamie Madillb90779e2018-04-27 11:45:01 -0400151 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500152
153 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Jamie Madill9aef3672018-04-27 11:45:06 -0400154 ANGLE_TRY(mRenderer->getAppPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
Jamie Madill9b168d02018-06-13 13:25:32 -0400155 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500156
Jamie Madill72106562017-03-24 14:18:50 -0400157 return gl::NoError();
158}
159
Luc Ferron90968362018-05-04 08:47:22 -0400160gl::Error ContextVk::setupDraw(const gl::Context *context,
161 const gl::DrawCallParams &drawCallParams,
Jamie Madill316c6062018-05-29 10:49:45 -0400162 vk::CommandBuffer **commandBufferOut,
163 bool *shouldApplyVertexArrayOut)
Jamie Madill72106562017-03-24 14:18:50 -0400164{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400165 if (drawCallParams.mode() != mCurrentDrawMode)
Jamie Madill72106562017-03-24 14:18:50 -0400166 {
167 invalidateCurrentPipeline();
Jamie Madill32fd63b2018-03-31 11:20:35 -0400168 mCurrentDrawMode = drawCallParams.mode();
Jamie Madill72106562017-03-24 14:18:50 -0400169 }
170
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500171 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400172 {
Jamie Madillb90779e2018-04-27 11:45:01 -0400173 ANGLE_TRY(initPipeline());
Jamie Madill72106562017-03-24 14:18:50 -0400174 }
175
Jamie Madill50cf2be2018-06-15 09:46:57 -0400176 const auto &state = mState.getState();
177 const gl::Program *programGL = state.getProgram();
178 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madill316c6062018-05-29 10:49:45 -0400179 const gl::Framebuffer *framebuffer = state.getDrawFramebuffer();
180 FramebufferVk *framebufferVk = vk::GetImpl(framebuffer);
Jamie Madill50cf2be2018-06-15 09:46:57 -0400181 Serial queueSerial = mRenderer->getCurrentQueueSerial();
Jamie Madill72106562017-03-24 14:18:50 -0400182
Jamie Madill316c6062018-05-29 10:49:45 -0400183 vk::RecordingMode mode = vk::RecordingMode::Start;
184 ANGLE_TRY(framebufferVk->getCommandBufferForDraw(this, commandBufferOut, &mode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500185
Jamie Madill316c6062018-05-29 10:49:45 -0400186 if (mode == vk::RecordingMode::Start)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500187 {
Jamie Madill316c6062018-05-29 10:49:45 -0400188 mTexturesDirty = true;
189 *shouldApplyVertexArrayOut = true;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500190 }
191 else
192 {
Luc Ferron6ed167a2018-06-13 13:45:55 -0400193 *shouldApplyVertexArrayOut = mVertexArrayBindingHasChanged;
194 mVertexArrayBindingHasChanged = false;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500195 }
196
197 // Ensure any writes to the textures are flushed before we read from them.
198 if (mTexturesDirty)
199 {
200 mTexturesDirty = false;
201 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
Luc Ferronf6e160f2018-06-12 10:13:57 -0400202 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500203 const auto &completeTextures = state.getCompleteTextureCache();
204 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
205 {
206 ASSERT(!samplerBinding.unreferenced);
207
Jamie Madill4cc753e2018-06-13 13:25:33 -0400208 for (GLuint textureUnit : samplerBinding.boundTextureUnits)
Luc Ferron90968362018-05-04 08:47:22 -0400209 {
Jamie Madill4cc753e2018-06-13 13:25:33 -0400210 gl::Texture *texture = completeTextures[textureUnit];
Jamie Madill49ac74b2017-12-21 14:42:33 -0500211
Jamie Madill4cc753e2018-06-13 13:25:33 -0400212 // Null textures represent incomplete textures.
213 if (texture == nullptr)
214 {
215 ANGLE_TRY(getIncompleteTexture(context, samplerBinding.textureType, &texture));
216 }
217
218 TextureVk *textureVk = vk::GetImpl(texture);
Luc Ferronf6e160f2018-06-12 10:13:57 -0400219 ANGLE_TRY(textureVk->ensureImageInitialized(contextVk));
Jamie Madill4cc753e2018-06-13 13:25:33 -0400220 textureVk->addReadDependency(framebufferVk);
221 }
Jamie Madill49ac74b2017-12-21 14:42:33 -0500222 }
223 }
224
Jamie Madill316c6062018-05-29 10:49:45 -0400225 (*commandBufferOut)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500226
227 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500228 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
229 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500230
Jamie Madill76e471e2017-10-21 09:56:01 -0400231 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500232 ANGLE_TRY(programVk->updateUniforms(this));
Luc Ferron90968362018-05-04 08:47:22 -0400233 ANGLE_TRY(programVk->updateTexturesDescriptorSet(context));
Jamie Madill76e471e2017-10-21 09:56:01 -0400234
235 // Bind the graphics descriptor sets.
236 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill493f9572018-05-24 19:52:15 -0400237 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500238 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
239 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400240 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500241 ASSERT(!descriptorSets.empty());
Jamie Madill9b168d02018-06-13 13:25:32 -0400242 const vk::PipelineLayout &pipelineLayout = programVk->getPipelineLayout();
Luc Ferron7a06ac12018-03-15 10:17:04 -0400243
Jamie Madill316c6062018-05-29 10:49:45 -0400244 (*commandBufferOut)
245 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
246 usedRange.length(), &descriptorSets[usedRange.low()],
247 programVk->getDynamicOffsetsCount(),
248 programVk->getDynamicOffsets());
Jamie Madill76e471e2017-10-21 09:56:01 -0400249 }
250
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500251 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400252}
253
Jamie Madill493f9572018-05-24 19:52:15 -0400254gl::Error ContextVk::drawArrays(const gl::Context *context,
255 gl::PrimitiveMode mode,
256 GLint first,
257 GLsizei count)
Jamie Madilld03a8492017-10-03 15:46:06 -0400258{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400259 const gl::DrawCallParams &drawCallParams = context->getParams<gl::DrawCallParams>();
260
Jamie Madill316c6062018-05-29 10:49:45 -0400261 vk::CommandBuffer *commandBuffer = nullptr;
262 bool shouldApplyVertexArray = false;
263 ANGLE_TRY(setupDraw(context, drawCallParams, &commandBuffer, &shouldApplyVertexArray));
Luc Ferron360098d2018-02-21 07:33:50 -0500264
Jamie Madillc3755fc2018-04-05 08:39:13 -0400265 const gl::VertexArray *vertexArray = context->getGLState().getVertexArray();
266 VertexArrayVk *vertexArrayVk = vk::GetImpl(vertexArray);
Jamie Madill316c6062018-05-29 10:49:45 -0400267 ANGLE_TRY(vertexArrayVk->drawArrays(context, mRenderer, drawCallParams, commandBuffer,
268 shouldApplyVertexArray));
Luc Ferron360098d2018-02-21 07:33:50 -0500269
Jamie Madilld03a8492017-10-03 15:46:06 -0400270 return gl::NoError();
271}
272
Jamie Madillc564c072017-06-01 12:45:42 -0400273gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400274 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400275 GLint first,
276 GLsizei count,
277 GLsizei instanceCount)
278{
279 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500280 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400281}
282
Jamie Madillc564c072017-06-01 12:45:42 -0400283gl::Error ContextVk::drawElements(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400284 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400285 GLsizei count,
286 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800287 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400288{
Jamie Madill32fd63b2018-03-31 11:20:35 -0400289 const gl::DrawCallParams &drawCallParams = context->getParams<gl::DrawCallParams>();
290
Jamie Madill316c6062018-05-29 10:49:45 -0400291 vk::CommandBuffer *commandBuffer = nullptr;
292 bool shouldApplyVertexArray = false;
293 ANGLE_TRY(setupDraw(context, drawCallParams, &commandBuffer, &shouldApplyVertexArray));
Jamie Madilld03a8492017-10-03 15:46:06 -0400294
Jamie Madillc3755fc2018-04-05 08:39:13 -0400295 gl::VertexArray *vao = mState.getState().getVertexArray();
296 VertexArrayVk *vertexArrayVk = vk::GetImpl(vao);
Jamie Madill316c6062018-05-29 10:49:45 -0400297 ANGLE_TRY(vertexArrayVk->drawElements(context, mRenderer, drawCallParams, commandBuffer,
298 shouldApplyVertexArray));
Jamie Madilld03a8492017-10-03 15:46:06 -0400299
300 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400301}
302
Jamie Madillc564c072017-06-01 12:45:42 -0400303gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400304 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400305 GLsizei count,
306 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400307 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800308 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400309{
310 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500311 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400312}
313
Jamie Madillc564c072017-06-01 12:45:42 -0400314gl::Error ContextVk::drawRangeElements(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400315 gl::PrimitiveMode mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400316 GLuint start,
317 GLuint end,
318 GLsizei count,
319 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800320 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400321{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500322 return gl::NoError();
323}
324
325VkDevice ContextVk::getDevice() const
326{
327 return mRenderer->getDevice();
328}
329
Jamie Madillc564c072017-06-01 12:45:42 -0400330gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400331 gl::PrimitiveMode mode,
Jamie Madillc564c072017-06-01 12:45:42 -0400332 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800333{
334 UNIMPLEMENTED();
335 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
336}
337
Jamie Madillc564c072017-06-01 12:45:42 -0400338gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
Jamie Madill493f9572018-05-24 19:52:15 -0400339 gl::PrimitiveMode mode,
Jamie Madillc564c072017-06-01 12:45:42 -0400340 GLenum type,
341 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800342{
343 UNIMPLEMENTED();
344 return gl::InternalError()
345 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
346}
347
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400348GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400349{
350 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400351 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400352}
353
354std::string ContextVk::getVendorString() const
355{
356 UNIMPLEMENTED();
357 return std::string();
358}
359
360std::string ContextVk::getRendererDescription() const
361{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500362 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400363}
364
365void ContextVk::insertEventMarker(GLsizei length, const char *marker)
366{
367 UNIMPLEMENTED();
368}
369
370void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
371{
372 UNIMPLEMENTED();
373}
374
375void ContextVk::popGroupMarker()
376{
377 UNIMPLEMENTED();
378}
379
Geoff Lang5d5253a2017-11-22 14:51:12 -0500380void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
381{
382 UNIMPLEMENTED();
383}
384
385void ContextVk::popDebugGroup()
386{
387 UNIMPLEMENTED();
388}
389
Luc Ferron82eda932018-07-09 15:10:22 -0400390bool ContextVk::isViewportFlipEnabledForDrawFBO() const
Luc Ferronbf6dc372018-06-28 15:24:19 -0400391{
Luc Ferron82eda932018-07-09 15:10:22 -0400392 return mFlipViewportForDrawFramebuffer && mFlipYForCurrentSurface;
393}
394
395bool ContextVk::isViewportFlipEnabledForReadFBO() const
396{
397 return mFlipViewportForReadFramebuffer;
Luc Ferronbf6dc372018-06-28 15:24:19 -0400398}
399
Luc Ferron0bb940a2018-06-22 09:59:34 -0400400void ContextVk::updateColorMask(const gl::BlendState &blendState)
Luc Ferron5fd36932018-06-19 14:55:50 -0400401{
402 mClearColorMask =
403 gl_vk::GetColorComponentFlags(blendState.colorMaskRed, blendState.colorMaskGreen,
404 blendState.colorMaskBlue, blendState.colorMaskAlpha);
405
406 FramebufferVk *framebufferVk = vk::GetImpl(mState.getState().getDrawFramebuffer());
407 mPipelineDesc->updateColorWriteMask(mClearColorMask,
408 framebufferVk->getEmulatedAlphaAttachmentMask());
409}
410
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400411void ContextVk::updateScissor(const gl::State &glState)
412{
Luc Ferronbf6dc372018-06-28 15:24:19 -0400413 FramebufferVk *framebufferVk = vk::GetImpl(getGLState().getDrawFramebuffer());
414 gl::Box dimensions = framebufferVk->getState().getDimensions();
415 gl::Rectangle renderArea(0, 0, dimensions.width, dimensions.height);
416
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400417 if (glState.isScissorTestEnabled())
418 {
Luc Ferron82eda932018-07-09 15:10:22 -0400419 mPipelineDesc->updateScissor(glState.getScissor(), isViewportFlipEnabledForDrawFBO(),
420 renderArea);
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400421 }
422 else
423 {
Luc Ferron14f48172018-04-11 08:43:28 -0400424 // If the scissor test isn't enabled, we can simply use a really big scissor that's
425 // certainly larger than the current surface using the maximum size of a 2D texture
426 // for the width and height.
Luc Ferron82eda932018-07-09 15:10:22 -0400427 mPipelineDesc->updateScissor(kMaxSizedScissor, isViewportFlipEnabledForDrawFBO(),
428 renderArea);
Luc Ferrond17bdfe2018-04-05 13:50:10 -0400429 }
430}
431
Jamie Madill189ad872018-07-09 13:32:37 -0400432gl::Error ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400433{
Jamie Madill72106562017-03-24 14:18:50 -0400434 if (dirtyBits.any())
435 {
436 invalidateCurrentPipeline();
437 }
Jamie Madillebf72992017-10-13 14:09:45 -0400438
439 const auto &glState = context->getGLState();
440
441 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400442 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400443
444 for (auto dirtyBit : dirtyBits)
445 {
446 switch (dirtyBit)
447 {
448 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Jamie Madillebf72992017-10-13 14:09:45 -0400449 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron14f48172018-04-11 08:43:28 -0400450 updateScissor(glState);
Jamie Madillebf72992017-10-13 14:09:45 -0400451 break;
452 case gl::State::DIRTY_BIT_VIEWPORT:
Luc Ferron1a135ad2018-07-04 10:35:31 -0400453 {
454 FramebufferVk *framebufferVk = vk::GetImpl(glState.getDrawFramebuffer());
455 mPipelineDesc->updateViewport(framebufferVk, glState.getViewport(),
456 glState.getNearPlane(), glState.getFarPlane(),
Luc Ferron82eda932018-07-09 15:10:22 -0400457 isViewportFlipEnabledForDrawFBO());
Jamie Madill834a3a12018-07-09 13:32:39 -0400458 ANGLE_TRY(updateDriverUniforms());
Jamie Madillebf72992017-10-13 14:09:45 -0400459 break;
Luc Ferron1a135ad2018-07-04 10:35:31 -0400460 }
Jamie Madillebf72992017-10-13 14:09:45 -0400461 case gl::State::DIRTY_BIT_DEPTH_RANGE:
Luc Ferron0986f1c2018-04-16 13:47:23 -0400462 mPipelineDesc->updateDepthRange(glState.getNearPlane(), glState.getFarPlane());
Jamie Madillebf72992017-10-13 14:09:45 -0400463 break;
464 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500465 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400466 break;
467 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500468 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400469 break;
470 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500471 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400472 break;
473 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500474 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400475 break;
476 case gl::State::DIRTY_BIT_COLOR_MASK:
Luc Ferron0bb940a2018-06-22 09:59:34 -0400477 updateColorMask(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400478 break;
479 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
480 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
481 break;
482 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
483 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
484 break;
485 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
486 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
487 break;
488 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
489 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
490 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400491 case gl::State::DIRTY_BIT_SAMPLE_MASK:
492 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400493 break;
494 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400495 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400496 break;
497 case gl::State::DIRTY_BIT_DEPTH_FUNC:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400498 mPipelineDesc->updateDepthFunc(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400499 break;
500 case gl::State::DIRTY_BIT_DEPTH_MASK:
Luc Ferrondd196e02018-04-04 11:41:44 -0400501 mPipelineDesc->updateDepthWriteEnabled(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400502 break;
503 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
Luc Ferron364a9552018-03-29 09:44:51 -0400504 mPipelineDesc->updateStencilTestEnabled(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400505 break;
506 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
Luc Ferron364a9552018-03-29 09:44:51 -0400507 mPipelineDesc->updateStencilFrontFuncs(glState.getStencilRef(),
508 glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400509 break;
510 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
Luc Ferron364a9552018-03-29 09:44:51 -0400511 mPipelineDesc->updateStencilBackFuncs(glState.getStencilBackRef(),
512 glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400513 break;
514 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
Luc Ferron364a9552018-03-29 09:44:51 -0400515 mPipelineDesc->updateStencilFrontOps(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400516 break;
517 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
Luc Ferron364a9552018-03-29 09:44:51 -0400518 mPipelineDesc->updateStencilBackOps(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400519 break;
520 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
Luc Ferron364a9552018-03-29 09:44:51 -0400521 mPipelineDesc->updateStencilFrontWriteMask(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400522 break;
523 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
Luc Ferron364a9552018-03-29 09:44:51 -0400524 mPipelineDesc->updateStencilBackWriteMask(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400525 break;
526 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
527 case gl::State::DIRTY_BIT_CULL_FACE:
Luc Ferron82eda932018-07-09 15:10:22 -0400528 {
Luc Ferronbf6dc372018-06-28 15:24:19 -0400529 mPipelineDesc->updateCullMode(glState.getRasterizerState(),
Luc Ferron82eda932018-07-09 15:10:22 -0400530 isViewportFlipEnabledForDrawFBO());
Jamie Madillebf72992017-10-13 14:09:45 -0400531 break;
Luc Ferron82eda932018-07-09 15:10:22 -0400532 }
Jamie Madillebf72992017-10-13 14:09:45 -0400533 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500534 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400535 break;
536 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
537 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
538 break;
539 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
540 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
541 break;
542 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
543 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
544 break;
545 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500546 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400547 break;
548 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
549 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
550 break;
551 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500552 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
553 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
554 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
555 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400556 break;
557 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500558 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400559 break;
560 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500561 mClearDepthStencilValue.depthStencil.stencil =
562 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400563 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400564 case gl::State::DIRTY_BIT_UNPACK_STATE:
Luc Ferronf9749ea2018-04-24 15:34:53 -0400565 // This is a no-op, its only important to use the right unpack state when we do
566 // setImage or setSubImage in TextureVk, which is plumbed through the frontend call
Jamie Madillebf72992017-10-13 14:09:45 -0400567 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500568 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
569 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
570 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400571 case gl::State::DIRTY_BIT_PACK_STATE:
Luc Ferrona1c72422018-05-14 15:58:28 -0400572 // This is a no-op, its only important to use the right pack state when we do
573 // call readPixels later on.
Jamie Madillebf72992017-10-13 14:09:45 -0400574 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500575 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
576 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
577 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400578 case gl::State::DIRTY_BIT_DITHER_ENABLED:
579 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
580 break;
581 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
582 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
583 break;
584 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
585 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
586 break;
587 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
Luc Ferron82eda932018-07-09 15:10:22 -0400588 updateFlipViewportReadFramebuffer(context->getGLState());
Jamie Madillebf72992017-10-13 14:09:45 -0400589 break;
590 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
Luc Ferron1a135ad2018-07-04 10:35:31 -0400591 {
Luc Ferron82eda932018-07-09 15:10:22 -0400592 updateFlipViewportDrawFramebuffer(context->getGLState());
Luc Ferron1a135ad2018-07-04 10:35:31 -0400593 FramebufferVk *framebufferVk = vk::GetImpl(glState.getDrawFramebuffer());
594 mPipelineDesc->updateViewport(framebufferVk, glState.getViewport(),
595 glState.getNearPlane(), glState.getFarPlane(),
Luc Ferron82eda932018-07-09 15:10:22 -0400596 isViewportFlipEnabledForDrawFBO());
Luc Ferron0bb940a2018-06-22 09:59:34 -0400597 updateColorMask(glState.getBlendState());
Luc Ferronbf6dc372018-06-28 15:24:19 -0400598 mPipelineDesc->updateCullMode(glState.getRasterizerState(),
Luc Ferron82eda932018-07-09 15:10:22 -0400599 isViewportFlipEnabledForDrawFBO());
Luc Ferronbf6dc372018-06-28 15:24:19 -0400600 updateScissor(glState);
Jamie Madillebf72992017-10-13 14:09:45 -0400601 break;
Luc Ferron1a135ad2018-07-04 10:35:31 -0400602 }
Jamie Madillebf72992017-10-13 14:09:45 -0400603 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
604 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
605 break;
606 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madillc3755fc2018-04-05 08:39:13 -0400607 invalidateCurrentPipeline();
608 mVertexArrayBindingHasChanged = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400609 break;
610 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
611 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
612 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800613 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
614 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
615 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400616 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
Jamie Madillebf72992017-10-13 14:09:45 -0400617 break;
618 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
619 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500620 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill78feddc2018-04-27 11:45:05 -0400621 mPipelineDesc->updateShaders(programVk->getVertexModuleSerial(),
622 programVk->getFragmentModuleSerial());
Jamie Madill5547b382017-10-23 18:16:01 -0400623 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400624 break;
625 }
626 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400627 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400628 break;
629 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400630 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400631 break;
Geoff Langded79232017-11-28 15:21:11 -0500632 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
633 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
634 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800635 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
636 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
637 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500638 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
639 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
640 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400641 case gl::State::DIRTY_BIT_MULTISAMPLING:
642 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
643 break;
644 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
645 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
646 break;
647 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
648 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
649 break;
650 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
651 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
652 break;
653 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
654 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
655 break;
656 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
657 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
658 break;
659 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
660 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
661 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400662 case gl::State::DIRTY_BIT_CURRENT_VALUES:
663 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
664 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400665 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400666 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400667 break;
668 }
669 }
Jamie Madill5547b382017-10-23 18:16:01 -0400670
671 if (dirtyTextures)
672 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400673 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400674 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500675 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400676 }
Jamie Madill189ad872018-07-09 13:32:37 -0400677
678 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400679}
680
681GLint ContextVk::getGPUDisjoint()
682{
683 UNIMPLEMENTED();
684 return GLint();
685}
686
687GLint64 ContextVk::getTimestamp()
688{
689 UNIMPLEMENTED();
690 return GLint64();
691}
692
Geoff Langcaa55cd2018-07-05 13:19:35 -0400693void ContextVk::onMakeCurrent(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400694{
Geoff Langcaa55cd2018-07-05 13:19:35 -0400695 // Flip viewports if FeaturesVk::flipViewportY is enabled and the user did not request that the
696 // surface is flipped.
697 egl::Surface *drawSurface = context->getCurrentDrawSurface();
698 mFlipYForCurrentSurface =
699 drawSurface != nullptr && mRenderer->getFeatures().flipViewportY &&
700 !IsMaskFlagSet(drawSurface->getOrientation(), EGL_SURFACE_ORIENTATION_INVERT_Y_ANGLE);
Luc Ferron82eda932018-07-09 15:10:22 -0400701
702 const gl::State &glState = context->getGLState();
703 updateFlipViewportDrawFramebuffer(glState);
704 updateFlipViewportReadFramebuffer(glState);
705}
706
707void ContextVk::updateFlipViewportDrawFramebuffer(const gl::State &glState)
708{
709 gl::Framebuffer *drawFramebuffer = glState.getDrawFramebuffer();
710 mFlipViewportForDrawFramebuffer =
711 drawFramebuffer->isDefault() && mRenderer->getFeatures().flipViewportY;
712}
713
714void ContextVk::updateFlipViewportReadFramebuffer(const gl::State &glState)
715{
716 gl::Framebuffer *readFramebuffer = glState.getReadFramebuffer();
717 mFlipViewportForReadFramebuffer =
718 readFramebuffer->isDefault() && mRenderer->getFeatures().flipViewportY;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400719}
720
Jiawei Shaod0a7d102018-05-07 12:40:20 +0800721gl::Caps ContextVk::getNativeCaps() const
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400722{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400723 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400724}
725
726const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
727{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400728 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400729}
730
731const gl::Extensions &ContextVk::getNativeExtensions() const
732{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400733 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400734}
735
736const gl::Limitations &ContextVk::getNativeLimitations() const
737{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400738 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400739}
740
741CompilerImpl *ContextVk::createCompiler()
742{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400743 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400744}
745
Jamie Madillacccc6c2016-05-03 17:22:10 -0400746ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400747{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400748 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400749}
750
Jamie Madillacccc6c2016-05-03 17:22:10 -0400751ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400752{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400753 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400754}
755
Jamie Madillacccc6c2016-05-03 17:22:10 -0400756FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400757{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500758 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400759}
760
761TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
762{
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400763 return new TextureVk(state, mRenderer);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400764}
765
Jamie Madille703c602018-02-20 10:21:48 -0500766RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400767{
Jamie Madille703c602018-02-20 10:21:48 -0500768 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400769}
770
Jamie Madill8f775602016-11-03 16:45:34 -0400771BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400772{
Jamie Madill8f775602016-11-03 16:45:34 -0400773 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400774}
775
Jamie Madillacccc6c2016-05-03 17:22:10 -0400776VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400777{
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400778 return new VertexArrayVk(state, mRenderer);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400779}
780
Corentin Wallezad3ae902018-03-09 13:40:42 -0500781QueryImpl *ContextVk::createQuery(gl::QueryType type)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400782{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400783 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400784}
785
786FenceNVImpl *ContextVk::createFenceNV()
787{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400788 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400789}
790
Jamie Madill70b5bb02017-08-28 13:32:37 -0400791SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400792{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400793 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400794}
795
Geoff Lang73bd2182016-07-15 13:01:24 -0400796TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400797{
Geoff Lang73bd2182016-07-15 13:01:24 -0400798 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400799}
800
Jamie Madill06ef36b2017-09-09 23:32:46 -0400801SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400802{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400803 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400804}
805
Yunchao Hea336b902017-08-02 16:05:21 +0800806ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
807{
808 return new ProgramPipelineVk(state);
809}
810
Sami Väisänene45e53b2016-05-25 10:36:04 +0300811std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
812{
813 return std::vector<PathImpl *>();
814}
815
Jamie Madill72106562017-03-24 14:18:50 -0400816void ContextVk::invalidateCurrentPipeline()
817{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500818 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400819}
820
Jamie Madillfe548342017-06-19 11:13:24 -0400821gl::Error ContextVk::dispatchCompute(const gl::Context *context,
822 GLuint numGroupsX,
823 GLuint numGroupsY,
824 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800825{
826 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500827 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800828}
829
Qin Jiajia62fcf622017-11-30 16:16:12 +0800830gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
831{
832 UNIMPLEMENTED();
833 return gl::InternalError();
834}
835
Xinghua Cao89c422a2017-11-29 18:24:20 +0800836gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
837{
838 UNIMPLEMENTED();
839 return gl::InternalError();
840}
841
842gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
843{
844 UNIMPLEMENTED();
845 return gl::InternalError();
846}
847
Jamie Madilledeaa832018-06-22 09:18:41 -0400848vk::DynamicDescriptorPool *ContextVk::getDynamicDescriptorPool(uint32_t descriptorSetIndex)
Jamie Madill76e471e2017-10-21 09:56:01 -0400849{
Jamie Madilledeaa832018-06-22 09:18:41 -0400850 return &mDynamicDescriptorPools[descriptorSetIndex];
Jamie Madill76e471e2017-10-21 09:56:01 -0400851}
852
Jamie Madillf4d693c2018-02-14 16:38:16 -0500853const VkClearValue &ContextVk::getClearColorValue() const
854{
855 return mClearColorValue;
856}
857
858const VkClearValue &ContextVk::getClearDepthStencilValue() const
859{
860 return mClearDepthStencilValue;
861}
862
Jamie Madill9aef3672018-04-27 11:45:06 -0400863VkColorComponentFlags ContextVk::getClearColorMask() const
864{
865 return mClearColorMask;
866}
Jamie Madill834a3a12018-07-09 13:32:39 -0400867
Jamie Madill1266d202018-06-29 09:11:34 -0400868const FeaturesVk &ContextVk::getFeatures() const
869{
870 return mRenderer->getFeatures();
871}
Jamie Madill834a3a12018-07-09 13:32:39 -0400872
873vk::Error ContextVk::updateDriverUniforms()
874{
875 if (!mDriverUniformsBuffer.valid())
876 {
877 size_t minAlignment = static_cast<size_t>(
878 mRenderer->getPhysicalDeviceProperties().limits.minUniformBufferOffsetAlignment);
879 mDriverUniformsBuffer.init(minAlignment, mRenderer);
880 }
881
882 // Release any previously retained buffers.
883 mDriverUniformsBuffer.releaseRetainedBuffers(mRenderer);
884
885 const gl::Rectangle &glViewport = mState.getState().getViewport();
886
887 // Allocate a new region in the dynamic buffer.
888 uint8_t *ptr = nullptr;
889 VkBuffer buffer = VK_NULL_HANDLE;
890 uint32_t offset = 0;
891 bool newBufferAllocated = false;
892 ANGLE_TRY(mDriverUniformsBuffer.allocate(mRenderer, sizeof(DriverUniforms), &ptr, &buffer,
893 &offset, &newBufferAllocated));
894
895 // Copy and flush to the device.
896 DriverUniforms *driverUniforms = reinterpret_cast<DriverUniforms *>(ptr);
897 driverUniforms->viewport[0] = static_cast<float>(glViewport.x);
898 driverUniforms->viewport[1] = static_cast<float>(glViewport.y);
899 driverUniforms->viewport[2] = static_cast<float>(glViewport.width);
900 driverUniforms->viewport[2] = static_cast<float>(glViewport.height);
901
902 ANGLE_TRY(mDriverUniformsBuffer.flush(getDevice()));
903
904 // Get the descriptor set layout.
905 if (!mDriverUniformsSetLayout.valid())
906 {
907 vk::DescriptorSetLayoutDesc desc;
908 desc.update(0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1);
909
910 ANGLE_TRY(mRenderer->getDescriptorSetLayout(desc, &mDriverUniformsSetLayout));
911 }
912
913 // Allocate a new descriptor set.
914 ANGLE_TRY(mDynamicDescriptorPools[kDriverUniformsDescriptorSetIndex].allocateSets(
915 this, mDriverUniformsSetLayout.get().ptr(), 1, &mDriverUniformsDescriptorSet));
916
917 // Update the driver uniform descriptor set.
918 VkDescriptorBufferInfo bufferInfo;
919 bufferInfo.buffer = buffer;
920 bufferInfo.offset = offset;
921 bufferInfo.range = sizeof(DriverUniforms);
922
923 VkWriteDescriptorSet writeInfo;
924 writeInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
925 writeInfo.pNext = nullptr;
926 writeInfo.dstSet = mDriverUniformsDescriptorSet;
927 writeInfo.dstBinding = 0;
928 writeInfo.dstArrayElement = 0;
929 writeInfo.descriptorCount = 1;
930 writeInfo.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
931 writeInfo.pImageInfo = nullptr;
932 writeInfo.pTexelBufferView = nullptr;
933 writeInfo.pBufferInfo = &bufferInfo;
934
935 vkUpdateDescriptorSets(getDevice(), 1, &writeInfo, 0, nullptr);
936
937 return vk::NoError();
938}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400939} // namespace rx