blob: 621720bc8c5d8d2a3c462136438b689616f16b93 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Frank Henigmana53d0e12018-02-13 00:06:06 -050014#include "common/utilities.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040015#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050016#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040017#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050018#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040019#include "libANGLE/renderer/vulkan/CompilerVk.h"
20#include "libANGLE/renderer/vulkan/ContextVk.h"
21#include "libANGLE/renderer/vulkan/DeviceVk.h"
Luc Ferrondaedf4d2018-03-16 09:28:53 -040022#include "libANGLE/renderer/vulkan/DynamicDescriptorPool.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040023#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040024#include "libANGLE/renderer/vulkan/FramebufferVk.h"
25#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080026#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040027#include "libANGLE/renderer/vulkan/ProgramVk.h"
28#include "libANGLE/renderer/vulkan/QueryVk.h"
29#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
30#include "libANGLE/renderer/vulkan/RendererVk.h"
31#include "libANGLE/renderer/vulkan/SamplerVk.h"
32#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040033#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040034#include "libANGLE/renderer/vulkan/TextureVk.h"
35#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
36#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050037#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040038
39namespace rx
40{
41
Jamie Madilld03a8492017-10-03 15:46:06 -040042namespace
43{
44
45VkIndexType GetVkIndexType(GLenum glIndexType)
46{
47 switch (glIndexType)
48 {
Luc Ferron80964f92018-03-08 10:31:24 -050049 case GL_UNSIGNED_BYTE:
Jamie Madilld03a8492017-10-03 15:46:06 -040050 case GL_UNSIGNED_SHORT:
51 return VK_INDEX_TYPE_UINT16;
52 case GL_UNSIGNED_INT:
53 return VK_INDEX_TYPE_UINT32;
54 default:
55 UNREACHABLE();
56 return VK_INDEX_TYPE_MAX_ENUM;
57 }
58}
59
Frank Henigmana53d0e12018-02-13 00:06:06 -050060constexpr size_t kStreamingVertexDataSize = 1024 * 1024;
61constexpr size_t kStreamingIndexDataSize = 1024 * 8;
62
Jamie Madilld03a8492017-10-03 15:46:06 -040063} // anonymous namespace
64
Jamie Madillacccc6c2016-05-03 17:22:10 -040065ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050066 : ContextImpl(state),
67 mRenderer(renderer),
68 mCurrentDrawMode(GL_NONE),
Luc Ferrondaedf4d2018-03-16 09:28:53 -040069 mDynamicDescriptorPool(),
Jamie Madill49ac74b2017-12-21 14:42:33 -050070 mVertexArrayDirty(false),
Frank Henigman17448952017-01-05 15:48:26 -050071 mTexturesDirty(false),
Luc Ferron7a06ac12018-03-15 10:17:04 -040072 mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, kStreamingVertexDataSize),
73 mStreamingIndexData(VK_BUFFER_USAGE_INDEX_BUFFER_BIT, kStreamingIndexDataSize)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040074{
Jamie Madillf4d693c2018-02-14 16:38:16 -050075 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
76 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Luc Ferron7a06ac12018-03-15 10:17:04 -040077 mStreamingVertexData.init(1);
78 mStreamingIndexData.init(1);
Jamie Madill9e54b5a2016-05-25 12:57:39 -040079}
80
81ContextVk::~ContextVk()
82{
83}
84
Jamie Madill76e471e2017-10-21 09:56:01 -040085void ContextVk::onDestroy(const gl::Context *context)
86{
87 VkDevice device = mRenderer->getDevice();
88
Luc Ferrondaedf4d2018-03-16 09:28:53 -040089 mDynamicDescriptorPool.destroy(mRenderer);
Frank Henigman17448952017-01-05 15:48:26 -050090 mStreamingVertexData.destroy(device);
Frank Henigmana53d0e12018-02-13 00:06:06 -050091 mStreamingIndexData.destroy(device);
Luc Ferron360098d2018-02-21 07:33:50 -050092 mLineLoopHandler.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -040093}
94
Jamie Madill9e54b5a2016-05-25 12:57:39 -040095gl::Error ContextVk::initialize()
96{
Luc Ferrondaedf4d2018-03-16 09:28:53 -040097 ANGLE_TRY(mDynamicDescriptorPool.init(this->getDevice(),
98 mRenderer->getUniformBufferDescriptorCount(),
99 mRenderer->getMaxActiveTextures()));
Jamie Madill76e471e2017-10-21 09:56:01 -0400100
Jamie Madillf2f6d372018-01-10 21:37:23 -0500101 mPipelineDesc.reset(new vk::PipelineDesc());
102 mPipelineDesc->initDefaults();
103
Jamie Madille09bd5d2016-11-29 16:20:35 -0500104 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400105}
106
Jamie Madillafa02a22017-11-23 12:57:38 -0500107gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400108{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500109 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400110 UNIMPLEMENTED();
Luc Ferron33140402018-03-08 13:57:52 -0500111
112 // dEQP tests rely on having no errors thrown at the end of the test and they always call
113 // flush at the end of the their tests. Just returning NoError until we implement flush
114 // allow us to work on enabling many tests in the meantime.
115 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400116}
117
Jamie Madillafa02a22017-11-23 12:57:38 -0500118gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400119{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500120 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400121}
122
Jamie Madill4928b7c2017-06-20 12:57:39 -0400123gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400124{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500125 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400126
Jamie Madillf2f6d372018-01-10 21:37:23 -0500127 const gl::State &state = mState.getState();
128 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
129 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
130 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500131 const gl::AttributesMask activeAttribLocationsMask =
132 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500133
134 // Ensure the topology of the pipeline description is updated.
135 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500136
Jamie Madill112a3a82018-01-23 13:04:06 -0500137 // Copy over the latest attrib and binding descriptions.
138 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500139
Jamie Madillf2f6d372018-01-10 21:37:23 -0500140 // Ensure that the RenderPass description is updated.
141 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500142
143 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500144 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
145 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500146
Jamie Madill72106562017-03-24 14:18:50 -0400147 return gl::NoError();
148}
149
Jamie Madill49ac74b2017-12-21 14:42:33 -0500150gl::Error ContextVk::setupDraw(const gl::Context *context,
151 GLenum mode,
152 DrawType drawType,
Frank Henigmana53d0e12018-02-13 00:06:06 -0500153 size_t firstVertex,
154 size_t lastVertex,
Luc Ferron78e39b32018-02-26 07:42:44 -0500155 ResourceVk *elementArrayBufferOverride,
Jamie Madill49ac74b2017-12-21 14:42:33 -0500156 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400157{
158 if (mode != mCurrentDrawMode)
159 {
160 invalidateCurrentPipeline();
161 mCurrentDrawMode = mode;
162 }
163
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500164 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400165 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400166 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400167 }
168
Frank Henigman17448952017-01-05 15:48:26 -0500169 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500170 const gl::Program *programGL = state.getProgram();
Frank Henigman17448952017-01-05 15:48:26 -0500171 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500172 const gl::VertexArray *vao = state.getVertexArray();
Frank Henigman17448952017-01-05 15:48:26 -0500173 VertexArrayVk *vkVAO = vk::GetImpl(vao);
174 const auto *drawFBO = state.getDrawFramebuffer();
175 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Luc Ferronf8be7562018-02-06 15:59:11 -0500176 Serial queueSerial = mRenderer->getCurrentQueueSerial();
177 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400178
Jamie Madille4c5a232018-03-02 21:00:31 -0500179 vk::CommandGraphNode *graphNode = nullptr;
180 ANGLE_TRY(vkFBO->getCommandGraphNodeForDraw(context, &graphNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500181
Jamie Madille4c5a232018-03-02 21:00:31 -0500182 if (!graphNode->getInsideRenderPassCommands()->valid())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500183 {
184 mVertexArrayDirty = true;
185 mTexturesDirty = true;
Jamie Madille4c5a232018-03-02 21:00:31 -0500186 ANGLE_TRY(graphNode->beginInsideRenderPassRecording(mRenderer, commandBuffer));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500187 }
188 else
189 {
Jamie Madille4c5a232018-03-02 21:00:31 -0500190 *commandBuffer = graphNode->getInsideRenderPassCommands();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500191 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400192
Jamie Madill49ac74b2017-12-21 14:42:33 -0500193 // Ensure any writes to the VAO buffers are flushed before we read from them.
Luc Ferron78e39b32018-02-26 07:42:44 -0500194 if (mVertexArrayDirty || elementArrayBufferOverride != nullptr)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500195 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500196
Jamie Madill49ac74b2017-12-21 14:42:33 -0500197 mVertexArrayDirty = false;
Jamie Madille4c5a232018-03-02 21:00:31 -0500198 vkVAO->updateDrawDependencies(graphNode, programGL->getActiveAttribLocationsMask(),
Luc Ferron78e39b32018-02-26 07:42:44 -0500199 elementArrayBufferOverride, queueSerial, drawType);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500200 }
201
202 // Ensure any writes to the textures are flushed before we read from them.
203 if (mTexturesDirty)
204 {
205 mTexturesDirty = false;
206 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
207 const auto &completeTextures = state.getCompleteTextureCache();
208 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
209 {
210 ASSERT(!samplerBinding.unreferenced);
211
212 // TODO(jmadill): Sampler arrays
213 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
214
215 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
216 const gl::Texture *texture = completeTextures[textureUnit];
217
218 // TODO(jmadill): Incomplete textures handling.
219 ASSERT(texture);
220
221 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madille4c5a232018-03-02 21:00:31 -0500222 textureVk->onReadResource(graphNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500223 }
224 }
225
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500226 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Frank Henigman17448952017-01-05 15:48:26 -0500227 ContextVk *contextVk = vk::GetImpl(context);
228 ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500229 (*commandBuffer)
Frank Henigman17448952017-01-05 15:48:26 -0500230 ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(),
231 vkVAO->getCurrentArrayBufferOffsets().data());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500232
233 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500234 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
235 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500236
Jamie Madill76e471e2017-10-21 09:56:01 -0400237 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500238 ANGLE_TRY(programVk->updateUniforms(this));
239 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400240
241 // Bind the graphics descriptor sets.
242 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400243 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500244 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
245 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400246 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500247 ASSERT(!descriptorSets.empty());
248 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Luc Ferron7a06ac12018-03-15 10:17:04 -0400249
Jamie Madill49ac74b2017-12-21 14:42:33 -0500250 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500251 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
Luc Ferron7a06ac12018-03-15 10:17:04 -0400252 usedRange.length(), &descriptorSets[usedRange.low()],
253 programVk->getDynamicOffsetsCount(),
254 programVk->getDynamicOffsets());
Jamie Madill76e471e2017-10-21 09:56:01 -0400255 }
256
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500257 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400258}
259
Jamie Madilld03a8492017-10-03 15:46:06 -0400260gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
261{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500262 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferron78e39b32018-02-26 07:42:44 -0500263 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, nullptr,
264 &commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500265
266 if (mode == GL_LINE_LOOP)
267 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500268 ANGLE_TRY(mLineLoopHandler.createIndexBuffer(this, first, count));
269 mLineLoopHandler.bindIndexBuffer(VK_INDEX_TYPE_UINT32, &commandBuffer);
270 ANGLE_TRY(mLineLoopHandler.draw(count, commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500271 }
272 else
273 {
274 commandBuffer->draw(count, 1, first, 0);
275 }
276
Jamie Madilld03a8492017-10-03 15:46:06 -0400277 return gl::NoError();
278}
279
Jamie Madillc564c072017-06-01 12:45:42 -0400280gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
281 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400282 GLint first,
283 GLsizei count,
284 GLsizei instanceCount)
285{
286 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500287 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400288}
289
Jamie Madillc564c072017-06-01 12:45:42 -0400290gl::Error ContextVk::drawElements(const gl::Context *context,
291 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400292 GLsizei count,
293 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800294 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400295{
Frank Henigmana53d0e12018-02-13 00:06:06 -0500296 gl::VertexArray *vao = mState.getState().getVertexArray();
297 const gl::Buffer *elementArrayBuffer = vao->getElementArrayBuffer().get();
Luc Ferron78e39b32018-02-26 07:42:44 -0500298 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madilld03a8492017-10-03 15:46:06 -0400299
Luc Ferron78e39b32018-02-26 07:42:44 -0500300 if (mode == GL_LINE_LOOP)
301 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500302 if (!elementArrayBuffer)
303 {
304 UNIMPLEMENTED();
305 return gl::InternalError() << "Line loop indices in client memory not supported";
306 }
307
308 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
309
Luc Ferron78e39b32018-02-26 07:42:44 -0500310 ANGLE_TRY(mLineLoopHandler.createIndexBufferFromElementArrayBuffer(
311 this, elementArrayBufferVk, GetVkIndexType(type), count));
312
313 // TODO(fjhenigman): calculate the index range and pass to setupDraw()
314 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0,
315 mLineLoopHandler.getLineLoopBufferResource(), &commandBuffer));
316
317 mLineLoopHandler.bindIndexBuffer(GetVkIndexType(type), &commandBuffer);
318 commandBuffer->drawIndexed(count + 1, 1, 0, 0, 0);
319 }
320 else
321 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500322 ContextVk *contextVk = vk::GetImpl(context);
323 const bool computeIndexRange = vk::GetImpl(vao)->attribsToStream(contextVk).any();
324 gl::IndexRange range;
325 VkBuffer buffer = VK_NULL_HANDLE;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400326 uint32_t offset = 0;
Frank Henigmana53d0e12018-02-13 00:06:06 -0500327
328 if (elementArrayBuffer)
329 {
Luc Ferron80964f92018-03-08 10:31:24 -0500330 if (type == GL_UNSIGNED_BYTE)
331 {
332 // TODO(fjhenigman): Index format translation.
333 UNIMPLEMENTED();
334 return gl::InternalError() << "Unsigned byte translation is not implemented for "
335 << "indices in a buffer object";
336 }
337
Frank Henigmana53d0e12018-02-13 00:06:06 -0500338 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
339 buffer = elementArrayBufferVk->getVkBuffer().getHandle();
340 offset = 0;
341
342 if (computeIndexRange)
343 {
344 ANGLE_TRY(elementArrayBufferVk->getIndexRange(
345 context, type, 0, count, false /*primitiveRestartEnabled*/, &range));
346 }
347 }
348 else
349 {
350 const GLsizei amount = sizeof(GLushort) * count;
Luc Ferron80964f92018-03-08 10:31:24 -0500351 GLubyte *dst = nullptr;
Frank Henigmana53d0e12018-02-13 00:06:06 -0500352
Luc Ferrone3dc5dd2018-03-16 07:37:21 -0400353 ANGLE_TRY(
354 mStreamingIndexData.allocate(contextVk, amount, &dst, &buffer, &offset, nullptr));
Luc Ferron80964f92018-03-08 10:31:24 -0500355 if (type == GL_UNSIGNED_BYTE)
356 {
357 // Unsigned bytes don't have direct support in Vulkan so we have to expand the
358 // memory to a GLushort.
359 const GLubyte *in = static_cast<const GLubyte *>(indices);
360 GLushort *expandedDst = reinterpret_cast<GLushort *>(dst);
361 for (GLsizei index = 0; index < count; index++)
362 {
363 expandedDst[index] = static_cast<GLushort>(in[index]);
364 }
365 }
366 else
367 {
368 memcpy(dst, indices, amount);
369 }
Frank Henigmana53d0e12018-02-13 00:06:06 -0500370 ANGLE_TRY(mStreamingIndexData.flush(contextVk));
371
372 if (computeIndexRange)
373 {
374 range =
375 gl::ComputeIndexRange(type, indices, count, false /*primitiveRestartEnabled*/);
376 }
377 }
378
379 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, range.start, range.end, nullptr,
380 &commandBuffer));
381 commandBuffer->bindIndexBuffer(buffer, offset, GetVkIndexType(type));
Luc Ferron78e39b32018-02-26 07:42:44 -0500382 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
383 }
Jamie Madilld03a8492017-10-03 15:46:06 -0400384
385 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400386}
387
Jamie Madillc564c072017-06-01 12:45:42 -0400388gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
389 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400390 GLsizei count,
391 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400392 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800393 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400394{
395 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500396 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400397}
398
Jamie Madillc564c072017-06-01 12:45:42 -0400399gl::Error ContextVk::drawRangeElements(const gl::Context *context,
400 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400401 GLuint start,
402 GLuint end,
403 GLsizei count,
404 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800405 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400406{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500407 return gl::NoError();
408}
409
410VkDevice ContextVk::getDevice() const
411{
412 return mRenderer->getDevice();
413}
414
Jamie Madillc564c072017-06-01 12:45:42 -0400415gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
416 GLenum mode,
417 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800418{
419 UNIMPLEMENTED();
420 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
421}
422
Jamie Madillc564c072017-06-01 12:45:42 -0400423gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
424 GLenum mode,
425 GLenum type,
426 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800427{
428 UNIMPLEMENTED();
429 return gl::InternalError()
430 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
431}
432
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400433GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400434{
435 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400436 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400437}
438
439std::string ContextVk::getVendorString() const
440{
441 UNIMPLEMENTED();
442 return std::string();
443}
444
445std::string ContextVk::getRendererDescription() const
446{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500447 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400448}
449
450void ContextVk::insertEventMarker(GLsizei length, const char *marker)
451{
452 UNIMPLEMENTED();
453}
454
455void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
456{
457 UNIMPLEMENTED();
458}
459
460void ContextVk::popGroupMarker()
461{
462 UNIMPLEMENTED();
463}
464
Geoff Lang5d5253a2017-11-22 14:51:12 -0500465void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
466{
467 UNIMPLEMENTED();
468}
469
470void ContextVk::popDebugGroup()
471{
472 UNIMPLEMENTED();
473}
474
Jamie Madillfe548342017-06-19 11:13:24 -0400475void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400476{
Jamie Madill72106562017-03-24 14:18:50 -0400477 if (dirtyBits.any())
478 {
479 invalidateCurrentPipeline();
480 }
Jamie Madillebf72992017-10-13 14:09:45 -0400481
482 const auto &glState = context->getGLState();
483
484 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400485 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400486
487 for (auto dirtyBit : dirtyBits)
488 {
489 switch (dirtyBit)
490 {
491 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500492 if (glState.isScissorTestEnabled())
493 {
494 mPipelineDesc->updateScissor(glState.getScissor());
495 }
496 else
497 {
498 mPipelineDesc->updateScissor(glState.getViewport());
499 }
Jamie Madillebf72992017-10-13 14:09:45 -0400500 break;
501 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500502 // Only modify the scissor region if the test is enabled, otherwise we want to keep
503 // the viewport size as the scissor region.
504 if (glState.isScissorTestEnabled())
505 {
506 mPipelineDesc->updateScissor(glState.getScissor());
507 }
Jamie Madillebf72992017-10-13 14:09:45 -0400508 break;
509 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500510 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
511 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500512
513 // If the scissor test isn't enabled, we have to also update the scissor to
514 // be equal to the viewport to make sure we keep rendering everything in the
515 // viewport.
516 if (!glState.isScissorTestEnabled())
517 {
518 mPipelineDesc->updateScissor(glState.getViewport());
519 }
Jamie Madillebf72992017-10-13 14:09:45 -0400520 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400521 case gl::State::DIRTY_BIT_DEPTH_RANGE:
522 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
523 break;
524 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500525 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400526 break;
527 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500528 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400529 break;
530 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500531 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400532 break;
533 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500534 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400535 break;
536 case gl::State::DIRTY_BIT_COLOR_MASK:
537 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
538 break;
539 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
540 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
541 break;
542 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
543 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
544 break;
545 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
546 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
547 break;
548 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
549 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
550 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400551 case gl::State::DIRTY_BIT_SAMPLE_MASK:
552 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400553 break;
554 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400555 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400556 break;
557 case gl::State::DIRTY_BIT_DEPTH_FUNC:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400558 mPipelineDesc->updateDepthFunc(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400559 break;
560 case gl::State::DIRTY_BIT_DEPTH_MASK:
561 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
562 break;
563 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
564 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
565 break;
566 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
567 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
568 break;
569 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
570 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
571 break;
572 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
573 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
574 break;
575 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
576 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
577 break;
578 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
579 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
580 break;
581 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
582 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
583 break;
584 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
585 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500586 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400587 break;
588 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500589 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400590 break;
591 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
592 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
593 break;
594 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
595 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
596 break;
597 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
598 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
599 break;
600 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500601 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400602 break;
603 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
604 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
605 break;
606 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500607 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
608 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
609 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
610 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400611 break;
612 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500613 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400614 break;
615 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500616 mClearDepthStencilValue.depthStencil.stencil =
617 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400618 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400619 case gl::State::DIRTY_BIT_UNPACK_STATE:
620 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400621 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500622 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
623 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
624 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400625 case gl::State::DIRTY_BIT_PACK_STATE:
626 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400627 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500628 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
629 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
630 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400631 case gl::State::DIRTY_BIT_DITHER_ENABLED:
632 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
633 break;
634 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
635 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
636 break;
637 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
638 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
639 break;
640 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
641 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
642 break;
643 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
644 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
645 break;
646 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
647 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
648 break;
649 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500650 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400651 break;
652 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
653 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
654 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800655 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
656 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
657 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400658 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
659 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
660 break;
661 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
662 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500663 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
664 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400665 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400666 break;
667 }
668 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400669 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400670 break;
671 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400672 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400673 break;
Geoff Langded79232017-11-28 15:21:11 -0500674 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
675 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
676 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800677 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
678 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
679 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500680 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
681 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
682 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400683 case gl::State::DIRTY_BIT_MULTISAMPLING:
684 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
685 break;
686 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
687 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
688 break;
689 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
690 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
691 break;
692 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
693 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
694 break;
695 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
696 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
697 break;
698 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
699 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
700 break;
701 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
702 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
703 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400704 case gl::State::DIRTY_BIT_CURRENT_VALUES:
705 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
706 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400707 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400708 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400709 break;
710 }
711 }
Jamie Madill5547b382017-10-23 18:16:01 -0400712
713 if (dirtyTextures)
714 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400715 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400716 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500717 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400718 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400719}
720
721GLint ContextVk::getGPUDisjoint()
722{
723 UNIMPLEMENTED();
724 return GLint();
725}
726
727GLint64 ContextVk::getTimestamp()
728{
729 UNIMPLEMENTED();
730 return GLint64();
731}
732
Jamie Madill4928b7c2017-06-20 12:57:39 -0400733void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400734{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400735}
736
737const gl::Caps &ContextVk::getNativeCaps() const
738{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400739 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400740}
741
742const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
743{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400744 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400745}
746
747const gl::Extensions &ContextVk::getNativeExtensions() const
748{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400749 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400750}
751
752const gl::Limitations &ContextVk::getNativeLimitations() const
753{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400754 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400755}
756
757CompilerImpl *ContextVk::createCompiler()
758{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400759 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400760}
761
Jamie Madillacccc6c2016-05-03 17:22:10 -0400762ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400763{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400764 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400765}
766
Jamie Madillacccc6c2016-05-03 17:22:10 -0400767ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400768{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400769 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400770}
771
Jamie Madillacccc6c2016-05-03 17:22:10 -0400772FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400773{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500774 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400775}
776
777TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
778{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400779 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400780}
781
Jamie Madille703c602018-02-20 10:21:48 -0500782RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400783{
Jamie Madille703c602018-02-20 10:21:48 -0500784 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400785}
786
Jamie Madill8f775602016-11-03 16:45:34 -0400787BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400788{
Jamie Madill8f775602016-11-03 16:45:34 -0400789 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400790}
791
Jamie Madillacccc6c2016-05-03 17:22:10 -0400792VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400793{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400794 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400795}
796
797QueryImpl *ContextVk::createQuery(GLenum type)
798{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400799 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400800}
801
802FenceNVImpl *ContextVk::createFenceNV()
803{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400804 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400805}
806
Jamie Madill70b5bb02017-08-28 13:32:37 -0400807SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400808{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400809 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400810}
811
Geoff Lang73bd2182016-07-15 13:01:24 -0400812TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400813{
Geoff Lang73bd2182016-07-15 13:01:24 -0400814 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400815}
816
Jamie Madill06ef36b2017-09-09 23:32:46 -0400817SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400818{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400819 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400820}
821
Yunchao Hea336b902017-08-02 16:05:21 +0800822ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
823{
824 return new ProgramPipelineVk(state);
825}
826
Sami Väisänene45e53b2016-05-25 10:36:04 +0300827std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
828{
829 return std::vector<PathImpl *>();
830}
831
Jamie Madill72106562017-03-24 14:18:50 -0400832void ContextVk::invalidateCurrentPipeline()
833{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500834 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400835}
836
Jamie Madill49ac74b2017-12-21 14:42:33 -0500837void ContextVk::onVertexArrayChange()
838{
839 // TODO(jmadill): Does not handle dependent state changes.
840 mVertexArrayDirty = true;
841 invalidateCurrentPipeline();
842}
843
Jamie Madillfe548342017-06-19 11:13:24 -0400844gl::Error ContextVk::dispatchCompute(const gl::Context *context,
845 GLuint numGroupsX,
846 GLuint numGroupsY,
847 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800848{
849 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500850 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800851}
852
Qin Jiajia62fcf622017-11-30 16:16:12 +0800853gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
854{
855 UNIMPLEMENTED();
856 return gl::InternalError();
857}
858
Xinghua Cao89c422a2017-11-29 18:24:20 +0800859gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
860{
861 UNIMPLEMENTED();
862 return gl::InternalError();
863}
864
865gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
866{
867 UNIMPLEMENTED();
868 return gl::InternalError();
869}
870
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400871DynamicDescriptorPool *ContextVk::getDynamicDescriptorPool()
Jamie Madill76e471e2017-10-21 09:56:01 -0400872{
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400873 return &mDynamicDescriptorPool;
Jamie Madill76e471e2017-10-21 09:56:01 -0400874}
875
Jamie Madillf4d693c2018-02-14 16:38:16 -0500876const VkClearValue &ContextVk::getClearColorValue() const
877{
878 return mClearColorValue;
879}
880
881const VkClearValue &ContextVk::getClearDepthStencilValue() const
882{
883 return mClearDepthStencilValue;
884}
885
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400886} // namespace rx