blob: 1889724560b12203f733d244c4d57fadd6087533 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Frank Henigmana53d0e12018-02-13 00:06:06 -050014#include "common/utilities.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040015#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050016#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040017#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050018#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040019#include "libANGLE/renderer/vulkan/CompilerVk.h"
20#include "libANGLE/renderer/vulkan/ContextVk.h"
21#include "libANGLE/renderer/vulkan/DeviceVk.h"
Luc Ferrondaedf4d2018-03-16 09:28:53 -040022#include "libANGLE/renderer/vulkan/DynamicDescriptorPool.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040023#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040024#include "libANGLE/renderer/vulkan/FramebufferVk.h"
25#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080026#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040027#include "libANGLE/renderer/vulkan/ProgramVk.h"
28#include "libANGLE/renderer/vulkan/QueryVk.h"
29#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
30#include "libANGLE/renderer/vulkan/RendererVk.h"
31#include "libANGLE/renderer/vulkan/SamplerVk.h"
32#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040033#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040034#include "libANGLE/renderer/vulkan/TextureVk.h"
35#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
36#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050037#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040038
39namespace rx
40{
41
Jamie Madilld03a8492017-10-03 15:46:06 -040042namespace
43{
44
45VkIndexType GetVkIndexType(GLenum glIndexType)
46{
47 switch (glIndexType)
48 {
Luc Ferron80964f92018-03-08 10:31:24 -050049 case GL_UNSIGNED_BYTE:
Jamie Madilld03a8492017-10-03 15:46:06 -040050 case GL_UNSIGNED_SHORT:
51 return VK_INDEX_TYPE_UINT16;
52 case GL_UNSIGNED_INT:
53 return VK_INDEX_TYPE_UINT32;
54 default:
55 UNREACHABLE();
56 return VK_INDEX_TYPE_MAX_ENUM;
57 }
58}
59
Frank Henigmana53d0e12018-02-13 00:06:06 -050060constexpr size_t kStreamingVertexDataSize = 1024 * 1024;
61constexpr size_t kStreamingIndexDataSize = 1024 * 8;
62
Jamie Madilld03a8492017-10-03 15:46:06 -040063} // anonymous namespace
64
Jamie Madillacccc6c2016-05-03 17:22:10 -040065ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050066 : ContextImpl(state),
67 mRenderer(renderer),
68 mCurrentDrawMode(GL_NONE),
Luc Ferrondaedf4d2018-03-16 09:28:53 -040069 mDynamicDescriptorPool(),
Jamie Madill49ac74b2017-12-21 14:42:33 -050070 mVertexArrayDirty(false),
Frank Henigman17448952017-01-05 15:48:26 -050071 mTexturesDirty(false),
Luc Ferrone3dc5dd2018-03-16 07:37:21 -040072 mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, kStreamingVertexDataSize, 1),
73 mStreamingIndexData(VK_BUFFER_USAGE_INDEX_BUFFER_BIT, kStreamingIndexDataSize, 1)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040074{
Jamie Madillf4d693c2018-02-14 16:38:16 -050075 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
76 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040077}
78
79ContextVk::~ContextVk()
80{
81}
82
Jamie Madill76e471e2017-10-21 09:56:01 -040083void ContextVk::onDestroy(const gl::Context *context)
84{
85 VkDevice device = mRenderer->getDevice();
86
Luc Ferrondaedf4d2018-03-16 09:28:53 -040087 mDynamicDescriptorPool.destroy(mRenderer);
Frank Henigman17448952017-01-05 15:48:26 -050088 mStreamingVertexData.destroy(device);
Frank Henigmana53d0e12018-02-13 00:06:06 -050089 mStreamingIndexData.destroy(device);
Luc Ferron360098d2018-02-21 07:33:50 -050090 mLineLoopHandler.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -040091}
92
Jamie Madill9e54b5a2016-05-25 12:57:39 -040093gl::Error ContextVk::initialize()
94{
Luc Ferrondaedf4d2018-03-16 09:28:53 -040095 ANGLE_TRY(mDynamicDescriptorPool.init(this->getDevice(),
96 mRenderer->getUniformBufferDescriptorCount(),
97 mRenderer->getMaxActiveTextures()));
Jamie Madill76e471e2017-10-21 09:56:01 -040098
Jamie Madillf2f6d372018-01-10 21:37:23 -050099 mPipelineDesc.reset(new vk::PipelineDesc());
100 mPipelineDesc->initDefaults();
101
Jamie Madille09bd5d2016-11-29 16:20:35 -0500102 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400103}
104
Jamie Madillafa02a22017-11-23 12:57:38 -0500105gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400106{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500107 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400108 UNIMPLEMENTED();
Luc Ferron33140402018-03-08 13:57:52 -0500109
110 // dEQP tests rely on having no errors thrown at the end of the test and they always call
111 // flush at the end of the their tests. Just returning NoError until we implement flush
112 // allow us to work on enabling many tests in the meantime.
113 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400114}
115
Jamie Madillafa02a22017-11-23 12:57:38 -0500116gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400117{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500118 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400119}
120
Jamie Madill4928b7c2017-06-20 12:57:39 -0400121gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400122{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500123 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400124
Jamie Madillf2f6d372018-01-10 21:37:23 -0500125 const gl::State &state = mState.getState();
126 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
127 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
128 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500129 const gl::AttributesMask activeAttribLocationsMask =
130 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500131
132 // Ensure the topology of the pipeline description is updated.
133 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500134
Jamie Madill112a3a82018-01-23 13:04:06 -0500135 // Copy over the latest attrib and binding descriptions.
136 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500137
Jamie Madillf2f6d372018-01-10 21:37:23 -0500138 // Ensure that the RenderPass description is updated.
139 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500140
141 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500142 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
143 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500144
Jamie Madill72106562017-03-24 14:18:50 -0400145 return gl::NoError();
146}
147
Jamie Madill49ac74b2017-12-21 14:42:33 -0500148gl::Error ContextVk::setupDraw(const gl::Context *context,
149 GLenum mode,
150 DrawType drawType,
Frank Henigmana53d0e12018-02-13 00:06:06 -0500151 size_t firstVertex,
152 size_t lastVertex,
Luc Ferron78e39b32018-02-26 07:42:44 -0500153 ResourceVk *elementArrayBufferOverride,
Jamie Madill49ac74b2017-12-21 14:42:33 -0500154 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400155{
156 if (mode != mCurrentDrawMode)
157 {
158 invalidateCurrentPipeline();
159 mCurrentDrawMode = mode;
160 }
161
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500162 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400163 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400164 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400165 }
166
Frank Henigman17448952017-01-05 15:48:26 -0500167 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500168 const gl::Program *programGL = state.getProgram();
Frank Henigman17448952017-01-05 15:48:26 -0500169 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500170 const gl::VertexArray *vao = state.getVertexArray();
Frank Henigman17448952017-01-05 15:48:26 -0500171 VertexArrayVk *vkVAO = vk::GetImpl(vao);
172 const auto *drawFBO = state.getDrawFramebuffer();
173 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Luc Ferronf8be7562018-02-06 15:59:11 -0500174 Serial queueSerial = mRenderer->getCurrentQueueSerial();
175 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400176
Jamie Madille4c5a232018-03-02 21:00:31 -0500177 vk::CommandGraphNode *graphNode = nullptr;
178 ANGLE_TRY(vkFBO->getCommandGraphNodeForDraw(context, &graphNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500179
Jamie Madille4c5a232018-03-02 21:00:31 -0500180 if (!graphNode->getInsideRenderPassCommands()->valid())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500181 {
182 mVertexArrayDirty = true;
183 mTexturesDirty = true;
Jamie Madille4c5a232018-03-02 21:00:31 -0500184 ANGLE_TRY(graphNode->beginInsideRenderPassRecording(mRenderer, commandBuffer));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500185 }
186 else
187 {
Jamie Madille4c5a232018-03-02 21:00:31 -0500188 *commandBuffer = graphNode->getInsideRenderPassCommands();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500189 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400190
Jamie Madill49ac74b2017-12-21 14:42:33 -0500191 // Ensure any writes to the VAO buffers are flushed before we read from them.
Luc Ferron78e39b32018-02-26 07:42:44 -0500192 if (mVertexArrayDirty || elementArrayBufferOverride != nullptr)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500193 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500194
Jamie Madill49ac74b2017-12-21 14:42:33 -0500195 mVertexArrayDirty = false;
Jamie Madille4c5a232018-03-02 21:00:31 -0500196 vkVAO->updateDrawDependencies(graphNode, programGL->getActiveAttribLocationsMask(),
Luc Ferron78e39b32018-02-26 07:42:44 -0500197 elementArrayBufferOverride, queueSerial, drawType);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500198 }
199
200 // Ensure any writes to the textures are flushed before we read from them.
201 if (mTexturesDirty)
202 {
203 mTexturesDirty = false;
204 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
205 const auto &completeTextures = state.getCompleteTextureCache();
206 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
207 {
208 ASSERT(!samplerBinding.unreferenced);
209
210 // TODO(jmadill): Sampler arrays
211 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
212
213 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
214 const gl::Texture *texture = completeTextures[textureUnit];
215
216 // TODO(jmadill): Incomplete textures handling.
217 ASSERT(texture);
218
219 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madille4c5a232018-03-02 21:00:31 -0500220 textureVk->onReadResource(graphNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500221 }
222 }
223
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500224 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Frank Henigman17448952017-01-05 15:48:26 -0500225 ContextVk *contextVk = vk::GetImpl(context);
226 ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500227 (*commandBuffer)
Frank Henigman17448952017-01-05 15:48:26 -0500228 ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(),
229 vkVAO->getCurrentArrayBufferOffsets().data());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500230
231 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500232 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
233 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500234
Jamie Madill76e471e2017-10-21 09:56:01 -0400235 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500236 ANGLE_TRY(programVk->updateUniforms(this));
237 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400238
239 // Bind the graphics descriptor sets.
240 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400241 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500242 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
243 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400244 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500245 ASSERT(!descriptorSets.empty());
246 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500247 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500248 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
249 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400250 }
251
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500252 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400253}
254
Jamie Madilld03a8492017-10-03 15:46:06 -0400255gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
256{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500257 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferron78e39b32018-02-26 07:42:44 -0500258 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, nullptr,
259 &commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500260
261 if (mode == GL_LINE_LOOP)
262 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500263 ANGLE_TRY(mLineLoopHandler.createIndexBuffer(this, first, count));
264 mLineLoopHandler.bindIndexBuffer(VK_INDEX_TYPE_UINT32, &commandBuffer);
265 ANGLE_TRY(mLineLoopHandler.draw(count, commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500266 }
267 else
268 {
269 commandBuffer->draw(count, 1, first, 0);
270 }
271
Jamie Madilld03a8492017-10-03 15:46:06 -0400272 return gl::NoError();
273}
274
Jamie Madillc564c072017-06-01 12:45:42 -0400275gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
276 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400277 GLint first,
278 GLsizei count,
279 GLsizei instanceCount)
280{
281 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500282 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400283}
284
Jamie Madillc564c072017-06-01 12:45:42 -0400285gl::Error ContextVk::drawElements(const gl::Context *context,
286 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400287 GLsizei count,
288 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800289 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400290{
Frank Henigmana53d0e12018-02-13 00:06:06 -0500291 gl::VertexArray *vao = mState.getState().getVertexArray();
292 const gl::Buffer *elementArrayBuffer = vao->getElementArrayBuffer().get();
Luc Ferron78e39b32018-02-26 07:42:44 -0500293 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madilld03a8492017-10-03 15:46:06 -0400294
Luc Ferron78e39b32018-02-26 07:42:44 -0500295 if (mode == GL_LINE_LOOP)
296 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500297 if (!elementArrayBuffer)
298 {
299 UNIMPLEMENTED();
300 return gl::InternalError() << "Line loop indices in client memory not supported";
301 }
302
303 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
304
Luc Ferron78e39b32018-02-26 07:42:44 -0500305 ANGLE_TRY(mLineLoopHandler.createIndexBufferFromElementArrayBuffer(
306 this, elementArrayBufferVk, GetVkIndexType(type), count));
307
308 // TODO(fjhenigman): calculate the index range and pass to setupDraw()
309 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0,
310 mLineLoopHandler.getLineLoopBufferResource(), &commandBuffer));
311
312 mLineLoopHandler.bindIndexBuffer(GetVkIndexType(type), &commandBuffer);
313 commandBuffer->drawIndexed(count + 1, 1, 0, 0, 0);
314 }
315 else
316 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500317 ContextVk *contextVk = vk::GetImpl(context);
318 const bool computeIndexRange = vk::GetImpl(vao)->attribsToStream(contextVk).any();
319 gl::IndexRange range;
320 VkBuffer buffer = VK_NULL_HANDLE;
321 VkDeviceSize offset = 0;
322
323 if (elementArrayBuffer)
324 {
Luc Ferron80964f92018-03-08 10:31:24 -0500325 if (type == GL_UNSIGNED_BYTE)
326 {
327 // TODO(fjhenigman): Index format translation.
328 UNIMPLEMENTED();
329 return gl::InternalError() << "Unsigned byte translation is not implemented for "
330 << "indices in a buffer object";
331 }
332
Frank Henigmana53d0e12018-02-13 00:06:06 -0500333 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
334 buffer = elementArrayBufferVk->getVkBuffer().getHandle();
335 offset = 0;
336
337 if (computeIndexRange)
338 {
339 ANGLE_TRY(elementArrayBufferVk->getIndexRange(
340 context, type, 0, count, false /*primitiveRestartEnabled*/, &range));
341 }
342 }
343 else
344 {
345 const GLsizei amount = sizeof(GLushort) * count;
Luc Ferron80964f92018-03-08 10:31:24 -0500346 GLubyte *dst = nullptr;
Frank Henigmana53d0e12018-02-13 00:06:06 -0500347
Luc Ferrone3dc5dd2018-03-16 07:37:21 -0400348 ANGLE_TRY(
349 mStreamingIndexData.allocate(contextVk, amount, &dst, &buffer, &offset, nullptr));
Luc Ferron80964f92018-03-08 10:31:24 -0500350 if (type == GL_UNSIGNED_BYTE)
351 {
352 // Unsigned bytes don't have direct support in Vulkan so we have to expand the
353 // memory to a GLushort.
354 const GLubyte *in = static_cast<const GLubyte *>(indices);
355 GLushort *expandedDst = reinterpret_cast<GLushort *>(dst);
356 for (GLsizei index = 0; index < count; index++)
357 {
358 expandedDst[index] = static_cast<GLushort>(in[index]);
359 }
360 }
361 else
362 {
363 memcpy(dst, indices, amount);
364 }
Frank Henigmana53d0e12018-02-13 00:06:06 -0500365 ANGLE_TRY(mStreamingIndexData.flush(contextVk));
366
367 if (computeIndexRange)
368 {
369 range =
370 gl::ComputeIndexRange(type, indices, count, false /*primitiveRestartEnabled*/);
371 }
372 }
373
374 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, range.start, range.end, nullptr,
375 &commandBuffer));
376 commandBuffer->bindIndexBuffer(buffer, offset, GetVkIndexType(type));
Luc Ferron78e39b32018-02-26 07:42:44 -0500377 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
378 }
Jamie Madilld03a8492017-10-03 15:46:06 -0400379
380 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400381}
382
Jamie Madillc564c072017-06-01 12:45:42 -0400383gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
384 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400385 GLsizei count,
386 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400387 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800388 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400389{
390 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500391 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400392}
393
Jamie Madillc564c072017-06-01 12:45:42 -0400394gl::Error ContextVk::drawRangeElements(const gl::Context *context,
395 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400396 GLuint start,
397 GLuint end,
398 GLsizei count,
399 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800400 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400401{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500402 return gl::NoError();
403}
404
405VkDevice ContextVk::getDevice() const
406{
407 return mRenderer->getDevice();
408}
409
Jamie Madillc564c072017-06-01 12:45:42 -0400410gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
411 GLenum mode,
412 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800413{
414 UNIMPLEMENTED();
415 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
416}
417
Jamie Madillc564c072017-06-01 12:45:42 -0400418gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
419 GLenum mode,
420 GLenum type,
421 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800422{
423 UNIMPLEMENTED();
424 return gl::InternalError()
425 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
426}
427
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400428GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400429{
430 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400431 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400432}
433
434std::string ContextVk::getVendorString() const
435{
436 UNIMPLEMENTED();
437 return std::string();
438}
439
440std::string ContextVk::getRendererDescription() const
441{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500442 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400443}
444
445void ContextVk::insertEventMarker(GLsizei length, const char *marker)
446{
447 UNIMPLEMENTED();
448}
449
450void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
451{
452 UNIMPLEMENTED();
453}
454
455void ContextVk::popGroupMarker()
456{
457 UNIMPLEMENTED();
458}
459
Geoff Lang5d5253a2017-11-22 14:51:12 -0500460void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
461{
462 UNIMPLEMENTED();
463}
464
465void ContextVk::popDebugGroup()
466{
467 UNIMPLEMENTED();
468}
469
Jamie Madillfe548342017-06-19 11:13:24 -0400470void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400471{
Jamie Madill72106562017-03-24 14:18:50 -0400472 if (dirtyBits.any())
473 {
474 invalidateCurrentPipeline();
475 }
Jamie Madillebf72992017-10-13 14:09:45 -0400476
477 const auto &glState = context->getGLState();
478
479 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400480 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400481
482 for (auto dirtyBit : dirtyBits)
483 {
484 switch (dirtyBit)
485 {
486 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500487 if (glState.isScissorTestEnabled())
488 {
489 mPipelineDesc->updateScissor(glState.getScissor());
490 }
491 else
492 {
493 mPipelineDesc->updateScissor(glState.getViewport());
494 }
Jamie Madillebf72992017-10-13 14:09:45 -0400495 break;
496 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500497 // Only modify the scissor region if the test is enabled, otherwise we want to keep
498 // the viewport size as the scissor region.
499 if (glState.isScissorTestEnabled())
500 {
501 mPipelineDesc->updateScissor(glState.getScissor());
502 }
Jamie Madillebf72992017-10-13 14:09:45 -0400503 break;
504 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500505 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
506 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500507
508 // If the scissor test isn't enabled, we have to also update the scissor to
509 // be equal to the viewport to make sure we keep rendering everything in the
510 // viewport.
511 if (!glState.isScissorTestEnabled())
512 {
513 mPipelineDesc->updateScissor(glState.getViewport());
514 }
Jamie Madillebf72992017-10-13 14:09:45 -0400515 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400516 case gl::State::DIRTY_BIT_DEPTH_RANGE:
517 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
518 break;
519 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500520 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400521 break;
522 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500523 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400524 break;
525 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500526 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400527 break;
528 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500529 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400530 break;
531 case gl::State::DIRTY_BIT_COLOR_MASK:
532 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
533 break;
534 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
535 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
536 break;
537 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
538 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
539 break;
540 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
541 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
542 break;
543 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
544 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
545 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400546 case gl::State::DIRTY_BIT_SAMPLE_MASK:
547 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400548 break;
549 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400550 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400551 break;
552 case gl::State::DIRTY_BIT_DEPTH_FUNC:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400553 mPipelineDesc->updateDepthFunc(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400554 break;
555 case gl::State::DIRTY_BIT_DEPTH_MASK:
556 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
557 break;
558 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
559 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
560 break;
561 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
562 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
563 break;
564 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
565 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
566 break;
567 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
568 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
569 break;
570 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
571 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
572 break;
573 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
574 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
575 break;
576 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
577 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
578 break;
579 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
580 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500581 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400582 break;
583 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500584 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400585 break;
586 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
587 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
588 break;
589 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
590 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
591 break;
592 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
593 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
594 break;
595 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500596 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400597 break;
598 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
599 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
600 break;
601 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500602 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
603 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
604 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
605 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400606 break;
607 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500608 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400609 break;
610 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500611 mClearDepthStencilValue.depthStencil.stencil =
612 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400613 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400614 case gl::State::DIRTY_BIT_UNPACK_STATE:
615 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400616 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500617 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
618 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
619 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400620 case gl::State::DIRTY_BIT_PACK_STATE:
621 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400622 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500623 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
624 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
625 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400626 case gl::State::DIRTY_BIT_DITHER_ENABLED:
627 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
628 break;
629 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
630 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
631 break;
632 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
633 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
634 break;
635 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
636 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
637 break;
638 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
639 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
640 break;
641 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
642 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
643 break;
644 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500645 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400646 break;
647 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
648 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
649 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800650 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
651 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
652 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400653 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
654 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
655 break;
656 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
657 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500658 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
659 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400660 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400661 break;
662 }
663 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400664 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400665 break;
666 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400667 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400668 break;
Geoff Langded79232017-11-28 15:21:11 -0500669 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
670 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
671 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800672 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
673 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
674 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500675 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
676 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
677 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400678 case gl::State::DIRTY_BIT_MULTISAMPLING:
679 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
680 break;
681 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
682 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
683 break;
684 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
685 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
686 break;
687 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
688 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
689 break;
690 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
691 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
692 break;
693 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
694 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
695 break;
696 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
697 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
698 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400699 case gl::State::DIRTY_BIT_CURRENT_VALUES:
700 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
701 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400702 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400703 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400704 break;
705 }
706 }
Jamie Madill5547b382017-10-23 18:16:01 -0400707
708 if (dirtyTextures)
709 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400710 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400711 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500712 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400713 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400714}
715
716GLint ContextVk::getGPUDisjoint()
717{
718 UNIMPLEMENTED();
719 return GLint();
720}
721
722GLint64 ContextVk::getTimestamp()
723{
724 UNIMPLEMENTED();
725 return GLint64();
726}
727
Jamie Madill4928b7c2017-06-20 12:57:39 -0400728void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400729{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400730}
731
732const gl::Caps &ContextVk::getNativeCaps() const
733{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400734 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400735}
736
737const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
738{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400739 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400740}
741
742const gl::Extensions &ContextVk::getNativeExtensions() const
743{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400744 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400745}
746
747const gl::Limitations &ContextVk::getNativeLimitations() const
748{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400749 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400750}
751
752CompilerImpl *ContextVk::createCompiler()
753{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400754 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400755}
756
Jamie Madillacccc6c2016-05-03 17:22:10 -0400757ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400758{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400759 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400760}
761
Jamie Madillacccc6c2016-05-03 17:22:10 -0400762ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400763{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400764 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400765}
766
Jamie Madillacccc6c2016-05-03 17:22:10 -0400767FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400768{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500769 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400770}
771
772TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
773{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400774 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400775}
776
Jamie Madille703c602018-02-20 10:21:48 -0500777RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400778{
Jamie Madille703c602018-02-20 10:21:48 -0500779 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400780}
781
Jamie Madill8f775602016-11-03 16:45:34 -0400782BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400783{
Jamie Madill8f775602016-11-03 16:45:34 -0400784 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400785}
786
Jamie Madillacccc6c2016-05-03 17:22:10 -0400787VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400788{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400789 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400790}
791
792QueryImpl *ContextVk::createQuery(GLenum type)
793{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400794 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400795}
796
797FenceNVImpl *ContextVk::createFenceNV()
798{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400799 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400800}
801
Jamie Madill70b5bb02017-08-28 13:32:37 -0400802SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400803{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400804 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400805}
806
Geoff Lang73bd2182016-07-15 13:01:24 -0400807TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400808{
Geoff Lang73bd2182016-07-15 13:01:24 -0400809 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400810}
811
Jamie Madill06ef36b2017-09-09 23:32:46 -0400812SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400813{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400814 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400815}
816
Yunchao Hea336b902017-08-02 16:05:21 +0800817ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
818{
819 return new ProgramPipelineVk(state);
820}
821
Sami Väisänene45e53b2016-05-25 10:36:04 +0300822std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
823{
824 return std::vector<PathImpl *>();
825}
826
Jamie Madill72106562017-03-24 14:18:50 -0400827void ContextVk::invalidateCurrentPipeline()
828{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500829 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400830}
831
Jamie Madill49ac74b2017-12-21 14:42:33 -0500832void ContextVk::onVertexArrayChange()
833{
834 // TODO(jmadill): Does not handle dependent state changes.
835 mVertexArrayDirty = true;
836 invalidateCurrentPipeline();
837}
838
Jamie Madillfe548342017-06-19 11:13:24 -0400839gl::Error ContextVk::dispatchCompute(const gl::Context *context,
840 GLuint numGroupsX,
841 GLuint numGroupsY,
842 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800843{
844 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500845 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800846}
847
Qin Jiajia62fcf622017-11-30 16:16:12 +0800848gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
849{
850 UNIMPLEMENTED();
851 return gl::InternalError();
852}
853
Xinghua Cao89c422a2017-11-29 18:24:20 +0800854gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
855{
856 UNIMPLEMENTED();
857 return gl::InternalError();
858}
859
860gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
861{
862 UNIMPLEMENTED();
863 return gl::InternalError();
864}
865
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400866DynamicDescriptorPool *ContextVk::getDynamicDescriptorPool()
Jamie Madill76e471e2017-10-21 09:56:01 -0400867{
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400868 return &mDynamicDescriptorPool;
Jamie Madill76e471e2017-10-21 09:56:01 -0400869}
870
Jamie Madillf4d693c2018-02-14 16:38:16 -0500871const VkClearValue &ContextVk::getClearColorValue() const
872{
873 return mClearColorValue;
874}
875
876const VkClearValue &ContextVk::getClearDepthStencilValue() const
877{
878 return mClearDepthStencilValue;
879}
880
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400881} // namespace rx