blob: 17c0bec8eb8f4226a9614ba1930fe9f53fdf5bca [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Frank Henigmana53d0e12018-02-13 00:06:06 -050014#include "common/utilities.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040015#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050016#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040017#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050018#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040019#include "libANGLE/renderer/vulkan/CompilerVk.h"
20#include "libANGLE/renderer/vulkan/ContextVk.h"
21#include "libANGLE/renderer/vulkan/DeviceVk.h"
22#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040023#include "libANGLE/renderer/vulkan/FramebufferVk.h"
24#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080025#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040026#include "libANGLE/renderer/vulkan/ProgramVk.h"
27#include "libANGLE/renderer/vulkan/QueryVk.h"
28#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
29#include "libANGLE/renderer/vulkan/RendererVk.h"
30#include "libANGLE/renderer/vulkan/SamplerVk.h"
31#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040032#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040033#include "libANGLE/renderer/vulkan/TextureVk.h"
34#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
35#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050036#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040037
38namespace rx
39{
40
Jamie Madilld03a8492017-10-03 15:46:06 -040041namespace
42{
43
44VkIndexType GetVkIndexType(GLenum glIndexType)
45{
46 switch (glIndexType)
47 {
Luc Ferron80964f92018-03-08 10:31:24 -050048 case GL_UNSIGNED_BYTE:
Jamie Madilld03a8492017-10-03 15:46:06 -040049 case GL_UNSIGNED_SHORT:
50 return VK_INDEX_TYPE_UINT16;
51 case GL_UNSIGNED_INT:
52 return VK_INDEX_TYPE_UINT32;
53 default:
54 UNREACHABLE();
55 return VK_INDEX_TYPE_MAX_ENUM;
56 }
57}
58
Jamie Madill76e471e2017-10-21 09:56:01 -040059enum DescriptorPoolIndex : uint8_t
60{
61 UniformBufferPool = 0,
62 TexturePool = 1,
63};
64
Frank Henigmana53d0e12018-02-13 00:06:06 -050065constexpr size_t kStreamingVertexDataSize = 1024 * 1024;
66constexpr size_t kStreamingIndexDataSize = 1024 * 8;
67
Jamie Madilld03a8492017-10-03 15:46:06 -040068} // anonymous namespace
69
Jamie Madillacccc6c2016-05-03 17:22:10 -040070ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050071 : ContextImpl(state),
72 mRenderer(renderer),
73 mCurrentDrawMode(GL_NONE),
74 mVertexArrayDirty(false),
Frank Henigman17448952017-01-05 15:48:26 -050075 mTexturesDirty(false),
Frank Henigmana53d0e12018-02-13 00:06:06 -050076 mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, kStreamingVertexDataSize),
77 mStreamingIndexData(VK_BUFFER_USAGE_INDEX_BUFFER_BIT, kStreamingIndexDataSize)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040078{
Jamie Madillf4d693c2018-02-14 16:38:16 -050079 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
80 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040081}
82
83ContextVk::~ContextVk()
84{
85}
86
Jamie Madill76e471e2017-10-21 09:56:01 -040087void ContextVk::onDestroy(const gl::Context *context)
88{
89 VkDevice device = mRenderer->getDevice();
90
91 mDescriptorPool.destroy(device);
Frank Henigman17448952017-01-05 15:48:26 -050092 mStreamingVertexData.destroy(device);
Frank Henigmana53d0e12018-02-13 00:06:06 -050093 mStreamingIndexData.destroy(device);
Luc Ferron360098d2018-02-21 07:33:50 -050094 mLineLoopHandler.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -040095}
96
Jamie Madill9e54b5a2016-05-25 12:57:39 -040097gl::Error ContextVk::initialize()
98{
Jamie Madill76e471e2017-10-21 09:56:01 -040099 VkDevice device = mRenderer->getDevice();
100
101 VkDescriptorPoolSize poolSizes[2];
102 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
103 poolSizes[UniformBufferPool].descriptorCount = 1024;
104 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
105 poolSizes[TexturePool].descriptorCount = 1024;
106
107 VkDescriptorPoolCreateInfo descriptorPoolInfo;
108 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
109 descriptorPoolInfo.pNext = nullptr;
Jamie Madill67ae6c52018-03-09 11:49:01 -0500110 descriptorPoolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
Jamie Madill76e471e2017-10-21 09:56:01 -0400111
112 // TODO(jmadill): Pick non-arbitrary max.
113 descriptorPoolInfo.maxSets = 2048;
114
115 // Reserve pools for uniform blocks and textures.
116 descriptorPoolInfo.poolSizeCount = 2;
117 descriptorPoolInfo.pPoolSizes = poolSizes;
118
119 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
120
Jamie Madillf2f6d372018-01-10 21:37:23 -0500121 mPipelineDesc.reset(new vk::PipelineDesc());
122 mPipelineDesc->initDefaults();
123
Jamie Madille09bd5d2016-11-29 16:20:35 -0500124 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400125}
126
Jamie Madillafa02a22017-11-23 12:57:38 -0500127gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400128{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500129 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400130 UNIMPLEMENTED();
Luc Ferron33140402018-03-08 13:57:52 -0500131
132 // dEQP tests rely on having no errors thrown at the end of the test and they always call
133 // flush at the end of the their tests. Just returning NoError until we implement flush
134 // allow us to work on enabling many tests in the meantime.
135 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400136}
137
Jamie Madillafa02a22017-11-23 12:57:38 -0500138gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400139{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500140 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400141}
142
Jamie Madill4928b7c2017-06-20 12:57:39 -0400143gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400144{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500145 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400146
Jamie Madillf2f6d372018-01-10 21:37:23 -0500147 const gl::State &state = mState.getState();
148 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
149 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
150 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500151 const gl::AttributesMask activeAttribLocationsMask =
152 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500153
154 // Ensure the topology of the pipeline description is updated.
155 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500156
Jamie Madill112a3a82018-01-23 13:04:06 -0500157 // Copy over the latest attrib and binding descriptions.
158 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500159
Jamie Madillf2f6d372018-01-10 21:37:23 -0500160 // Ensure that the RenderPass description is updated.
161 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500162
163 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500164 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
165 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500166
Jamie Madill72106562017-03-24 14:18:50 -0400167 return gl::NoError();
168}
169
Jamie Madill49ac74b2017-12-21 14:42:33 -0500170gl::Error ContextVk::setupDraw(const gl::Context *context,
171 GLenum mode,
172 DrawType drawType,
Frank Henigmana53d0e12018-02-13 00:06:06 -0500173 size_t firstVertex,
174 size_t lastVertex,
Luc Ferron78e39b32018-02-26 07:42:44 -0500175 ResourceVk *elementArrayBufferOverride,
Jamie Madill49ac74b2017-12-21 14:42:33 -0500176 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400177{
178 if (mode != mCurrentDrawMode)
179 {
180 invalidateCurrentPipeline();
181 mCurrentDrawMode = mode;
182 }
183
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500184 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400185 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400186 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400187 }
188
Frank Henigman17448952017-01-05 15:48:26 -0500189 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500190 const gl::Program *programGL = state.getProgram();
Frank Henigman17448952017-01-05 15:48:26 -0500191 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500192 const gl::VertexArray *vao = state.getVertexArray();
Frank Henigman17448952017-01-05 15:48:26 -0500193 VertexArrayVk *vkVAO = vk::GetImpl(vao);
194 const auto *drawFBO = state.getDrawFramebuffer();
195 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Luc Ferronf8be7562018-02-06 15:59:11 -0500196 Serial queueSerial = mRenderer->getCurrentQueueSerial();
197 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400198
Jamie Madille4c5a232018-03-02 21:00:31 -0500199 vk::CommandGraphNode *graphNode = nullptr;
200 ANGLE_TRY(vkFBO->getCommandGraphNodeForDraw(context, &graphNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500201
Jamie Madille4c5a232018-03-02 21:00:31 -0500202 if (!graphNode->getInsideRenderPassCommands()->valid())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500203 {
204 mVertexArrayDirty = true;
205 mTexturesDirty = true;
Jamie Madille4c5a232018-03-02 21:00:31 -0500206 ANGLE_TRY(graphNode->beginInsideRenderPassRecording(mRenderer, commandBuffer));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500207 }
208 else
209 {
Jamie Madille4c5a232018-03-02 21:00:31 -0500210 *commandBuffer = graphNode->getInsideRenderPassCommands();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500211 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400212
Jamie Madill49ac74b2017-12-21 14:42:33 -0500213 // Ensure any writes to the VAO buffers are flushed before we read from them.
Luc Ferron78e39b32018-02-26 07:42:44 -0500214 if (mVertexArrayDirty || elementArrayBufferOverride != nullptr)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500215 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500216
Jamie Madill49ac74b2017-12-21 14:42:33 -0500217 mVertexArrayDirty = false;
Jamie Madille4c5a232018-03-02 21:00:31 -0500218 vkVAO->updateDrawDependencies(graphNode, programGL->getActiveAttribLocationsMask(),
Luc Ferron78e39b32018-02-26 07:42:44 -0500219 elementArrayBufferOverride, queueSerial, drawType);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500220 }
221
222 // Ensure any writes to the textures are flushed before we read from them.
223 if (mTexturesDirty)
224 {
225 mTexturesDirty = false;
226 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
227 const auto &completeTextures = state.getCompleteTextureCache();
228 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
229 {
230 ASSERT(!samplerBinding.unreferenced);
231
232 // TODO(jmadill): Sampler arrays
233 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
234
235 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
236 const gl::Texture *texture = completeTextures[textureUnit];
237
238 // TODO(jmadill): Incomplete textures handling.
239 ASSERT(texture);
240
241 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madille4c5a232018-03-02 21:00:31 -0500242 textureVk->onReadResource(graphNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500243 }
244 }
245
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500246 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Frank Henigman17448952017-01-05 15:48:26 -0500247 ContextVk *contextVk = vk::GetImpl(context);
248 ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500249 (*commandBuffer)
Frank Henigman17448952017-01-05 15:48:26 -0500250 ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(),
251 vkVAO->getCurrentArrayBufferOffsets().data());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500252
253 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500254 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
255 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500256
Jamie Madill76e471e2017-10-21 09:56:01 -0400257 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500258 ANGLE_TRY(programVk->updateUniforms(this));
259 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400260
261 // Bind the graphics descriptor sets.
262 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400263 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500264 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
265 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400266 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500267 ASSERT(!descriptorSets.empty());
268 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500269 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500270 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
271 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400272 }
273
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500274 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400275}
276
Jamie Madilld03a8492017-10-03 15:46:06 -0400277gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
278{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500279 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferron78e39b32018-02-26 07:42:44 -0500280 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, nullptr,
281 &commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500282
283 if (mode == GL_LINE_LOOP)
284 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500285 ANGLE_TRY(mLineLoopHandler.createIndexBuffer(this, first, count));
286 mLineLoopHandler.bindIndexBuffer(VK_INDEX_TYPE_UINT32, &commandBuffer);
287 ANGLE_TRY(mLineLoopHandler.draw(count, commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500288 }
289 else
290 {
291 commandBuffer->draw(count, 1, first, 0);
292 }
293
Jamie Madilld03a8492017-10-03 15:46:06 -0400294 return gl::NoError();
295}
296
Jamie Madillc564c072017-06-01 12:45:42 -0400297gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
298 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400299 GLint first,
300 GLsizei count,
301 GLsizei instanceCount)
302{
303 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500304 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400305}
306
Jamie Madillc564c072017-06-01 12:45:42 -0400307gl::Error ContextVk::drawElements(const gl::Context *context,
308 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400309 GLsizei count,
310 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800311 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400312{
Frank Henigmana53d0e12018-02-13 00:06:06 -0500313 gl::VertexArray *vao = mState.getState().getVertexArray();
314 const gl::Buffer *elementArrayBuffer = vao->getElementArrayBuffer().get();
Luc Ferron78e39b32018-02-26 07:42:44 -0500315 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madilld03a8492017-10-03 15:46:06 -0400316
Luc Ferron78e39b32018-02-26 07:42:44 -0500317 if (mode == GL_LINE_LOOP)
318 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500319 if (!elementArrayBuffer)
320 {
321 UNIMPLEMENTED();
322 return gl::InternalError() << "Line loop indices in client memory not supported";
323 }
324
325 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
326
Luc Ferron78e39b32018-02-26 07:42:44 -0500327 ANGLE_TRY(mLineLoopHandler.createIndexBufferFromElementArrayBuffer(
328 this, elementArrayBufferVk, GetVkIndexType(type), count));
329
330 // TODO(fjhenigman): calculate the index range and pass to setupDraw()
331 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0,
332 mLineLoopHandler.getLineLoopBufferResource(), &commandBuffer));
333
334 mLineLoopHandler.bindIndexBuffer(GetVkIndexType(type), &commandBuffer);
335 commandBuffer->drawIndexed(count + 1, 1, 0, 0, 0);
336 }
337 else
338 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500339 ContextVk *contextVk = vk::GetImpl(context);
340 const bool computeIndexRange = vk::GetImpl(vao)->attribsToStream(contextVk).any();
341 gl::IndexRange range;
342 VkBuffer buffer = VK_NULL_HANDLE;
343 VkDeviceSize offset = 0;
344
345 if (elementArrayBuffer)
346 {
Luc Ferron80964f92018-03-08 10:31:24 -0500347 if (type == GL_UNSIGNED_BYTE)
348 {
349 // TODO(fjhenigman): Index format translation.
350 UNIMPLEMENTED();
351 return gl::InternalError() << "Unsigned byte translation is not implemented for "
352 << "indices in a buffer object";
353 }
354
Frank Henigmana53d0e12018-02-13 00:06:06 -0500355 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
356 buffer = elementArrayBufferVk->getVkBuffer().getHandle();
357 offset = 0;
358
359 if (computeIndexRange)
360 {
361 ANGLE_TRY(elementArrayBufferVk->getIndexRange(
362 context, type, 0, count, false /*primitiveRestartEnabled*/, &range));
363 }
364 }
365 else
366 {
367 const GLsizei amount = sizeof(GLushort) * count;
Luc Ferron80964f92018-03-08 10:31:24 -0500368 GLubyte *dst = nullptr;
Frank Henigmana53d0e12018-02-13 00:06:06 -0500369
370 ANGLE_TRY(mStreamingIndexData.allocate(contextVk, amount, &dst, &buffer, &offset));
Luc Ferron80964f92018-03-08 10:31:24 -0500371 if (type == GL_UNSIGNED_BYTE)
372 {
373 // Unsigned bytes don't have direct support in Vulkan so we have to expand the
374 // memory to a GLushort.
375 const GLubyte *in = static_cast<const GLubyte *>(indices);
376 GLushort *expandedDst = reinterpret_cast<GLushort *>(dst);
377 for (GLsizei index = 0; index < count; index++)
378 {
379 expandedDst[index] = static_cast<GLushort>(in[index]);
380 }
381 }
382 else
383 {
384 memcpy(dst, indices, amount);
385 }
Frank Henigmana53d0e12018-02-13 00:06:06 -0500386 ANGLE_TRY(mStreamingIndexData.flush(contextVk));
387
388 if (computeIndexRange)
389 {
390 range =
391 gl::ComputeIndexRange(type, indices, count, false /*primitiveRestartEnabled*/);
392 }
393 }
394
395 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, range.start, range.end, nullptr,
396 &commandBuffer));
397 commandBuffer->bindIndexBuffer(buffer, offset, GetVkIndexType(type));
Luc Ferron78e39b32018-02-26 07:42:44 -0500398 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
399 }
Jamie Madilld03a8492017-10-03 15:46:06 -0400400
401 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400402}
403
Jamie Madillc564c072017-06-01 12:45:42 -0400404gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
405 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400406 GLsizei count,
407 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400408 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800409 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400410{
411 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500412 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400413}
414
Jamie Madillc564c072017-06-01 12:45:42 -0400415gl::Error ContextVk::drawRangeElements(const gl::Context *context,
416 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400417 GLuint start,
418 GLuint end,
419 GLsizei count,
420 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800421 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400422{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500423 return gl::NoError();
424}
425
426VkDevice ContextVk::getDevice() const
427{
428 return mRenderer->getDevice();
429}
430
Jamie Madillc564c072017-06-01 12:45:42 -0400431gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
432 GLenum mode,
433 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800434{
435 UNIMPLEMENTED();
436 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
437}
438
Jamie Madillc564c072017-06-01 12:45:42 -0400439gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
440 GLenum mode,
441 GLenum type,
442 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800443{
444 UNIMPLEMENTED();
445 return gl::InternalError()
446 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
447}
448
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400449GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400450{
451 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400452 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400453}
454
455std::string ContextVk::getVendorString() const
456{
457 UNIMPLEMENTED();
458 return std::string();
459}
460
461std::string ContextVk::getRendererDescription() const
462{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500463 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400464}
465
466void ContextVk::insertEventMarker(GLsizei length, const char *marker)
467{
468 UNIMPLEMENTED();
469}
470
471void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
472{
473 UNIMPLEMENTED();
474}
475
476void ContextVk::popGroupMarker()
477{
478 UNIMPLEMENTED();
479}
480
Geoff Lang5d5253a2017-11-22 14:51:12 -0500481void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
482{
483 UNIMPLEMENTED();
484}
485
486void ContextVk::popDebugGroup()
487{
488 UNIMPLEMENTED();
489}
490
Jamie Madillfe548342017-06-19 11:13:24 -0400491void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400492{
Jamie Madill72106562017-03-24 14:18:50 -0400493 if (dirtyBits.any())
494 {
495 invalidateCurrentPipeline();
496 }
Jamie Madillebf72992017-10-13 14:09:45 -0400497
498 const auto &glState = context->getGLState();
499
500 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400501 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400502
503 for (auto dirtyBit : dirtyBits)
504 {
505 switch (dirtyBit)
506 {
507 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500508 if (glState.isScissorTestEnabled())
509 {
510 mPipelineDesc->updateScissor(glState.getScissor());
511 }
512 else
513 {
514 mPipelineDesc->updateScissor(glState.getViewport());
515 }
Jamie Madillebf72992017-10-13 14:09:45 -0400516 break;
517 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500518 // Only modify the scissor region if the test is enabled, otherwise we want to keep
519 // the viewport size as the scissor region.
520 if (glState.isScissorTestEnabled())
521 {
522 mPipelineDesc->updateScissor(glState.getScissor());
523 }
Jamie Madillebf72992017-10-13 14:09:45 -0400524 break;
525 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500526 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
527 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500528
529 // If the scissor test isn't enabled, we have to also update the scissor to
530 // be equal to the viewport to make sure we keep rendering everything in the
531 // viewport.
532 if (!glState.isScissorTestEnabled())
533 {
534 mPipelineDesc->updateScissor(glState.getViewport());
535 }
Jamie Madillebf72992017-10-13 14:09:45 -0400536 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400537 case gl::State::DIRTY_BIT_DEPTH_RANGE:
538 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
539 break;
540 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500541 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400542 break;
543 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500544 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400545 break;
546 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500547 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400548 break;
549 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500550 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400551 break;
552 case gl::State::DIRTY_BIT_COLOR_MASK:
553 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
554 break;
555 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
556 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
557 break;
558 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
559 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
560 break;
561 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
562 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
563 break;
564 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
565 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
566 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400567 case gl::State::DIRTY_BIT_SAMPLE_MASK:
568 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400569 break;
570 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400571 mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400572 break;
573 case gl::State::DIRTY_BIT_DEPTH_FUNC:
Jamie Madill0cec82a2018-03-14 09:21:07 -0400574 mPipelineDesc->updateDepthFunc(glState.getDepthStencilState());
Jamie Madillebf72992017-10-13 14:09:45 -0400575 break;
576 case gl::State::DIRTY_BIT_DEPTH_MASK:
577 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
578 break;
579 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
580 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
581 break;
582 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
583 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
584 break;
585 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
586 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
587 break;
588 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
589 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
590 break;
591 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
592 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
593 break;
594 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
595 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
596 break;
597 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
598 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
599 break;
600 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
601 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500602 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400603 break;
604 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500605 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400606 break;
607 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
608 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
609 break;
610 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
611 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
612 break;
613 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
614 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
615 break;
616 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500617 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400618 break;
619 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
620 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
621 break;
622 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500623 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
624 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
625 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
626 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400627 break;
628 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500629 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400630 break;
631 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500632 mClearDepthStencilValue.depthStencil.stencil =
633 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400634 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400635 case gl::State::DIRTY_BIT_UNPACK_STATE:
636 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400637 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500638 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
639 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
640 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400641 case gl::State::DIRTY_BIT_PACK_STATE:
642 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400643 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500644 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
645 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
646 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400647 case gl::State::DIRTY_BIT_DITHER_ENABLED:
648 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
649 break;
650 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
651 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
652 break;
653 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
654 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
655 break;
656 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
657 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
658 break;
659 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
660 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
661 break;
662 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
663 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
664 break;
665 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500666 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400667 break;
668 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
669 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
670 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800671 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
672 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
673 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400674 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
675 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
676 break;
677 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
678 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500679 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
680 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400681 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400682 break;
683 }
684 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400685 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400686 break;
687 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400688 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400689 break;
Geoff Langded79232017-11-28 15:21:11 -0500690 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
691 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
692 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800693 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
694 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
695 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500696 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
697 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
698 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400699 case gl::State::DIRTY_BIT_MULTISAMPLING:
700 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
701 break;
702 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
703 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
704 break;
705 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
706 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
707 break;
708 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
709 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
710 break;
711 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
712 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
713 break;
714 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
715 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
716 break;
717 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
718 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
719 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400720 case gl::State::DIRTY_BIT_CURRENT_VALUES:
721 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
722 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400723 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400724 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400725 break;
726 }
727 }
Jamie Madill5547b382017-10-23 18:16:01 -0400728
729 if (dirtyTextures)
730 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400731 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400732 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500733 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400734 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400735}
736
737GLint ContextVk::getGPUDisjoint()
738{
739 UNIMPLEMENTED();
740 return GLint();
741}
742
743GLint64 ContextVk::getTimestamp()
744{
745 UNIMPLEMENTED();
746 return GLint64();
747}
748
Jamie Madill4928b7c2017-06-20 12:57:39 -0400749void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400750{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400751}
752
753const gl::Caps &ContextVk::getNativeCaps() const
754{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400755 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400756}
757
758const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
759{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400760 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400761}
762
763const gl::Extensions &ContextVk::getNativeExtensions() const
764{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400765 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400766}
767
768const gl::Limitations &ContextVk::getNativeLimitations() const
769{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400770 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400771}
772
773CompilerImpl *ContextVk::createCompiler()
774{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400775 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400776}
777
Jamie Madillacccc6c2016-05-03 17:22:10 -0400778ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400779{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400780 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400781}
782
Jamie Madillacccc6c2016-05-03 17:22:10 -0400783ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400784{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400785 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400786}
787
Jamie Madillacccc6c2016-05-03 17:22:10 -0400788FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400789{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500790 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400791}
792
793TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
794{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400795 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400796}
797
Jamie Madille703c602018-02-20 10:21:48 -0500798RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400799{
Jamie Madille703c602018-02-20 10:21:48 -0500800 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400801}
802
Jamie Madill8f775602016-11-03 16:45:34 -0400803BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400804{
Jamie Madill8f775602016-11-03 16:45:34 -0400805 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400806}
807
Jamie Madillacccc6c2016-05-03 17:22:10 -0400808VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400809{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400810 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400811}
812
813QueryImpl *ContextVk::createQuery(GLenum type)
814{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400815 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400816}
817
818FenceNVImpl *ContextVk::createFenceNV()
819{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400820 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400821}
822
Jamie Madill70b5bb02017-08-28 13:32:37 -0400823SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400824{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400825 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400826}
827
Geoff Lang73bd2182016-07-15 13:01:24 -0400828TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400829{
Geoff Lang73bd2182016-07-15 13:01:24 -0400830 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400831}
832
Jamie Madill06ef36b2017-09-09 23:32:46 -0400833SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400834{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400835 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400836}
837
Yunchao Hea336b902017-08-02 16:05:21 +0800838ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
839{
840 return new ProgramPipelineVk(state);
841}
842
Sami Väisänene45e53b2016-05-25 10:36:04 +0300843std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
844{
845 return std::vector<PathImpl *>();
846}
847
Jamie Madill72106562017-03-24 14:18:50 -0400848void ContextVk::invalidateCurrentPipeline()
849{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500850 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400851}
852
Jamie Madill49ac74b2017-12-21 14:42:33 -0500853void ContextVk::onVertexArrayChange()
854{
855 // TODO(jmadill): Does not handle dependent state changes.
856 mVertexArrayDirty = true;
857 invalidateCurrentPipeline();
858}
859
Jamie Madillfe548342017-06-19 11:13:24 -0400860gl::Error ContextVk::dispatchCompute(const gl::Context *context,
861 GLuint numGroupsX,
862 GLuint numGroupsY,
863 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800864{
865 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500866 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800867}
868
Qin Jiajia62fcf622017-11-30 16:16:12 +0800869gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
870{
871 UNIMPLEMENTED();
872 return gl::InternalError();
873}
874
Xinghua Cao89c422a2017-11-29 18:24:20 +0800875gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
876{
877 UNIMPLEMENTED();
878 return gl::InternalError();
879}
880
881gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
882{
883 UNIMPLEMENTED();
884 return gl::InternalError();
885}
886
Jamie Madill76e471e2017-10-21 09:56:01 -0400887vk::DescriptorPool *ContextVk::getDescriptorPool()
888{
889 return &mDescriptorPool;
890}
891
Jamie Madillf4d693c2018-02-14 16:38:16 -0500892const VkClearValue &ContextVk::getClearColorValue() const
893{
894 return mClearColorValue;
895}
896
897const VkClearValue &ContextVk::getClearDepthStencilValue() const
898{
899 return mClearDepthStencilValue;
900}
901
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400902} // namespace rx