blob: 6a03392136580b6ce0dcdc2b811182068ed219b7 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// ContextVk.cpp:
7// Implements the class methods for ContextVk.
8//
9
10#include "libANGLE/renderer/vulkan/ContextVk.h"
11
Jamie Madill20e005b2017-04-07 14:19:22 -040012#include "common/bitset_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040013#include "common/debug.h"
Frank Henigmana53d0e12018-02-13 00:06:06 -050014#include "common/utilities.h"
Jamie Madillbd159f02017-10-09 19:39:06 -040015#include "libANGLE/Context.h"
Jamie Madilldf68a6f2017-01-13 17:29:53 -050016#include "libANGLE/Program.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040017#include "libANGLE/renderer/vulkan/BufferVk.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050018#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040019#include "libANGLE/renderer/vulkan/CompilerVk.h"
20#include "libANGLE/renderer/vulkan/ContextVk.h"
21#include "libANGLE/renderer/vulkan/DeviceVk.h"
22#include "libANGLE/renderer/vulkan/FenceNVVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040023#include "libANGLE/renderer/vulkan/FramebufferVk.h"
24#include "libANGLE/renderer/vulkan/ImageVk.h"
Yunchao Hea336b902017-08-02 16:05:21 +080025#include "libANGLE/renderer/vulkan/ProgramPipelineVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040026#include "libANGLE/renderer/vulkan/ProgramVk.h"
27#include "libANGLE/renderer/vulkan/QueryVk.h"
28#include "libANGLE/renderer/vulkan/RenderbufferVk.h"
29#include "libANGLE/renderer/vulkan/RendererVk.h"
30#include "libANGLE/renderer/vulkan/SamplerVk.h"
31#include "libANGLE/renderer/vulkan/ShaderVk.h"
Jamie Madill70b5bb02017-08-28 13:32:37 -040032#include "libANGLE/renderer/vulkan/SyncVk.h"
Jamie Madillacccc6c2016-05-03 17:22:10 -040033#include "libANGLE/renderer/vulkan/TextureVk.h"
34#include "libANGLE/renderer/vulkan/TransformFeedbackVk.h"
35#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050036#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040037
38namespace rx
39{
40
Jamie Madilld03a8492017-10-03 15:46:06 -040041namespace
42{
43
44VkIndexType GetVkIndexType(GLenum glIndexType)
45{
46 switch (glIndexType)
47 {
Luc Ferron80964f92018-03-08 10:31:24 -050048 case GL_UNSIGNED_BYTE:
Jamie Madilld03a8492017-10-03 15:46:06 -040049 case GL_UNSIGNED_SHORT:
50 return VK_INDEX_TYPE_UINT16;
51 case GL_UNSIGNED_INT:
52 return VK_INDEX_TYPE_UINT32;
53 default:
54 UNREACHABLE();
55 return VK_INDEX_TYPE_MAX_ENUM;
56 }
57}
58
Jamie Madill76e471e2017-10-21 09:56:01 -040059enum DescriptorPoolIndex : uint8_t
60{
61 UniformBufferPool = 0,
62 TexturePool = 1,
63};
64
Frank Henigmana53d0e12018-02-13 00:06:06 -050065constexpr size_t kStreamingVertexDataSize = 1024 * 1024;
66constexpr size_t kStreamingIndexDataSize = 1024 * 8;
67
Jamie Madilld03a8492017-10-03 15:46:06 -040068} // anonymous namespace
69
Jamie Madillacccc6c2016-05-03 17:22:10 -040070ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer)
Jamie Madill49ac74b2017-12-21 14:42:33 -050071 : ContextImpl(state),
72 mRenderer(renderer),
73 mCurrentDrawMode(GL_NONE),
74 mVertexArrayDirty(false),
Frank Henigman17448952017-01-05 15:48:26 -050075 mTexturesDirty(false),
Frank Henigmana53d0e12018-02-13 00:06:06 -050076 mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, kStreamingVertexDataSize),
77 mStreamingIndexData(VK_BUFFER_USAGE_INDEX_BUFFER_BIT, kStreamingIndexDataSize)
Jamie Madill9e54b5a2016-05-25 12:57:39 -040078{
Jamie Madillf4d693c2018-02-14 16:38:16 -050079 memset(&mClearColorValue, 0, sizeof(mClearColorValue));
80 memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue));
Jamie Madill9e54b5a2016-05-25 12:57:39 -040081}
82
83ContextVk::~ContextVk()
84{
85}
86
Jamie Madill76e471e2017-10-21 09:56:01 -040087void ContextVk::onDestroy(const gl::Context *context)
88{
89 VkDevice device = mRenderer->getDevice();
90
91 mDescriptorPool.destroy(device);
Frank Henigman17448952017-01-05 15:48:26 -050092 mStreamingVertexData.destroy(device);
Frank Henigmana53d0e12018-02-13 00:06:06 -050093 mStreamingIndexData.destroy(device);
Luc Ferron360098d2018-02-21 07:33:50 -050094 mLineLoopHandler.destroy(device);
Jamie Madill76e471e2017-10-21 09:56:01 -040095}
96
Jamie Madill9e54b5a2016-05-25 12:57:39 -040097gl::Error ContextVk::initialize()
98{
Jamie Madill76e471e2017-10-21 09:56:01 -040099 VkDevice device = mRenderer->getDevice();
100
101 VkDescriptorPoolSize poolSizes[2];
102 poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
103 poolSizes[UniformBufferPool].descriptorCount = 1024;
104 poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
105 poolSizes[TexturePool].descriptorCount = 1024;
106
107 VkDescriptorPoolCreateInfo descriptorPoolInfo;
108 descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
109 descriptorPoolInfo.pNext = nullptr;
110 descriptorPoolInfo.flags = 0;
111
112 // TODO(jmadill): Pick non-arbitrary max.
113 descriptorPoolInfo.maxSets = 2048;
114
115 // Reserve pools for uniform blocks and textures.
116 descriptorPoolInfo.poolSizeCount = 2;
117 descriptorPoolInfo.pPoolSizes = poolSizes;
118
119 ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo));
120
Jamie Madillf2f6d372018-01-10 21:37:23 -0500121 mPipelineDesc.reset(new vk::PipelineDesc());
122 mPipelineDesc->initDefaults();
123
Jamie Madille09bd5d2016-11-29 16:20:35 -0500124 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400125}
126
Jamie Madillafa02a22017-11-23 12:57:38 -0500127gl::Error ContextVk::flush(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400128{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500129 // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400130 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500131 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400132}
133
Jamie Madillafa02a22017-11-23 12:57:38 -0500134gl::Error ContextVk::finish(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400135{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500136 return mRenderer->finish(context);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400137}
138
Jamie Madill4928b7c2017-06-20 12:57:39 -0400139gl::Error ContextVk::initPipeline(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400140{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500141 ASSERT(!mCurrentPipeline);
Jamie Madill72106562017-03-24 14:18:50 -0400142
Jamie Madillf2f6d372018-01-10 21:37:23 -0500143 const gl::State &state = mState.getState();
144 VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray());
145 FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer());
146 ProgramVk *programVk = vk::GetImpl(state.getProgram());
Luc Ferronceb71902018-02-05 15:18:47 -0500147 const gl::AttributesMask activeAttribLocationsMask =
148 state.getProgram()->getActiveAttribLocationsMask();
Jamie Madillf2f6d372018-01-10 21:37:23 -0500149
150 // Ensure the topology of the pipeline description is updated.
151 mPipelineDesc->updateTopology(mCurrentDrawMode);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500152
Jamie Madill112a3a82018-01-23 13:04:06 -0500153 // Copy over the latest attrib and binding descriptions.
154 vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get());
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500155
Jamie Madillf2f6d372018-01-10 21:37:23 -0500156 // Ensure that the RenderPass description is updated.
157 mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500158
159 // TODO(jmadill): Validate with ASSERT against physical device limits/caps?
Luc Ferronceb71902018-02-05 15:18:47 -0500160 ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask,
161 &mCurrentPipeline));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500162
Jamie Madill72106562017-03-24 14:18:50 -0400163 return gl::NoError();
164}
165
Jamie Madill49ac74b2017-12-21 14:42:33 -0500166gl::Error ContextVk::setupDraw(const gl::Context *context,
167 GLenum mode,
168 DrawType drawType,
Frank Henigmana53d0e12018-02-13 00:06:06 -0500169 size_t firstVertex,
170 size_t lastVertex,
Luc Ferron78e39b32018-02-26 07:42:44 -0500171 ResourceVk *elementArrayBufferOverride,
Jamie Madill49ac74b2017-12-21 14:42:33 -0500172 vk::CommandBuffer **commandBuffer)
Jamie Madill72106562017-03-24 14:18:50 -0400173{
174 if (mode != mCurrentDrawMode)
175 {
176 invalidateCurrentPipeline();
177 mCurrentDrawMode = mode;
178 }
179
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500180 if (!mCurrentPipeline)
Jamie Madill72106562017-03-24 14:18:50 -0400181 {
Jamie Madill4928b7c2017-06-20 12:57:39 -0400182 ANGLE_TRY(initPipeline(context));
Jamie Madill72106562017-03-24 14:18:50 -0400183 }
184
Frank Henigman17448952017-01-05 15:48:26 -0500185 const auto &state = mState.getState();
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500186 const gl::Program *programGL = state.getProgram();
Frank Henigman17448952017-01-05 15:48:26 -0500187 ProgramVk *programVk = vk::GetImpl(programGL);
Jamie Madillacf2f3a2017-11-21 19:22:44 -0500188 const gl::VertexArray *vao = state.getVertexArray();
Frank Henigman17448952017-01-05 15:48:26 -0500189 VertexArrayVk *vkVAO = vk::GetImpl(vao);
190 const auto *drawFBO = state.getDrawFramebuffer();
191 FramebufferVk *vkFBO = vk::GetImpl(drawFBO);
Luc Ferronf8be7562018-02-06 15:59:11 -0500192 Serial queueSerial = mRenderer->getCurrentQueueSerial();
193 uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation();
Jamie Madill72106562017-03-24 14:18:50 -0400194
Jamie Madille4c5a232018-03-02 21:00:31 -0500195 vk::CommandGraphNode *graphNode = nullptr;
196 ANGLE_TRY(vkFBO->getCommandGraphNodeForDraw(context, &graphNode));
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500197
Jamie Madille4c5a232018-03-02 21:00:31 -0500198 if (!graphNode->getInsideRenderPassCommands()->valid())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500199 {
200 mVertexArrayDirty = true;
201 mTexturesDirty = true;
Jamie Madille4c5a232018-03-02 21:00:31 -0500202 ANGLE_TRY(graphNode->beginInsideRenderPassRecording(mRenderer, commandBuffer));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500203 }
204 else
205 {
Jamie Madille4c5a232018-03-02 21:00:31 -0500206 *commandBuffer = graphNode->getInsideRenderPassCommands();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500207 }
Jamie Madillbd159f02017-10-09 19:39:06 -0400208
Jamie Madill49ac74b2017-12-21 14:42:33 -0500209 // Ensure any writes to the VAO buffers are flushed before we read from them.
Luc Ferron78e39b32018-02-26 07:42:44 -0500210 if (mVertexArrayDirty || elementArrayBufferOverride != nullptr)
Jamie Madill49ac74b2017-12-21 14:42:33 -0500211 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500212
Jamie Madill49ac74b2017-12-21 14:42:33 -0500213 mVertexArrayDirty = false;
Jamie Madille4c5a232018-03-02 21:00:31 -0500214 vkVAO->updateDrawDependencies(graphNode, programGL->getActiveAttribLocationsMask(),
Luc Ferron78e39b32018-02-26 07:42:44 -0500215 elementArrayBufferOverride, queueSerial, drawType);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500216 }
217
218 // Ensure any writes to the textures are flushed before we read from them.
219 if (mTexturesDirty)
220 {
221 mTexturesDirty = false;
222 // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update.
223 const auto &completeTextures = state.getCompleteTextureCache();
224 for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings())
225 {
226 ASSERT(!samplerBinding.unreferenced);
227
228 // TODO(jmadill): Sampler arrays
229 ASSERT(samplerBinding.boundTextureUnits.size() == 1);
230
231 GLuint textureUnit = samplerBinding.boundTextureUnits[0];
232 const gl::Texture *texture = completeTextures[textureUnit];
233
234 // TODO(jmadill): Incomplete textures handling.
235 ASSERT(texture);
236
237 TextureVk *textureVk = vk::GetImpl(texture);
Jamie Madille4c5a232018-03-02 21:00:31 -0500238 textureVk->onReadResource(graphNode, mRenderer->getCurrentQueueSerial());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500239 }
240 }
241
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500242 (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get());
Frank Henigman17448952017-01-05 15:48:26 -0500243 ContextVk *contextVk = vk::GetImpl(context);
244 ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500245 (*commandBuffer)
Frank Henigman17448952017-01-05 15:48:26 -0500246 ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(),
247 vkVAO->getCurrentArrayBufferOffsets().data());
Jamie Madill49ac74b2017-12-21 14:42:33 -0500248
249 // Update the queue serial for the pipeline object.
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500250 ASSERT(mCurrentPipeline && mCurrentPipeline->valid());
251 mCurrentPipeline->updateSerial(queueSerial);
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500252
Jamie Madill76e471e2017-10-21 09:56:01 -0400253 // TODO(jmadill): Can probably use more dirty bits here.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500254 ANGLE_TRY(programVk->updateUniforms(this));
255 programVk->updateTexturesDescriptorSet(this);
Jamie Madill76e471e2017-10-21 09:56:01 -0400256
257 // Bind the graphics descriptor sets.
258 // TODO(jmadill): Handle multiple command buffers.
Jamie Madill5547b382017-10-23 18:16:01 -0400259 const auto &descriptorSets = programVk->getDescriptorSets();
Jamie Madill8c3988c2017-12-21 14:44:56 -0500260 const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange();
261 if (!usedRange.empty())
Jamie Madill76e471e2017-10-21 09:56:01 -0400262 {
Jamie Madill8c3988c2017-12-21 14:44:56 -0500263 ASSERT(!descriptorSets.empty());
264 const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500265 (*commandBuffer)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500266 ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(),
267 usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr);
Jamie Madill76e471e2017-10-21 09:56:01 -0400268 }
269
Jamie Madilldf68a6f2017-01-13 17:29:53 -0500270 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400271}
272
Jamie Madilld03a8492017-10-03 15:46:06 -0400273gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count)
274{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500275 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferron78e39b32018-02-26 07:42:44 -0500276 ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, nullptr,
277 &commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500278
279 if (mode == GL_LINE_LOOP)
280 {
Luc Ferron78e39b32018-02-26 07:42:44 -0500281 ANGLE_TRY(mLineLoopHandler.createIndexBuffer(this, first, count));
282 mLineLoopHandler.bindIndexBuffer(VK_INDEX_TYPE_UINT32, &commandBuffer);
283 ANGLE_TRY(mLineLoopHandler.draw(count, commandBuffer));
Luc Ferron360098d2018-02-21 07:33:50 -0500284 }
285 else
286 {
287 commandBuffer->draw(count, 1, first, 0);
288 }
289
Jamie Madilld03a8492017-10-03 15:46:06 -0400290 return gl::NoError();
291}
292
Jamie Madillc564c072017-06-01 12:45:42 -0400293gl::Error ContextVk::drawArraysInstanced(const gl::Context *context,
294 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400295 GLint first,
296 GLsizei count,
297 GLsizei instanceCount)
298{
299 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500300 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400301}
302
Jamie Madillc564c072017-06-01 12:45:42 -0400303gl::Error ContextVk::drawElements(const gl::Context *context,
304 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400305 GLsizei count,
306 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800307 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400308{
Frank Henigmana53d0e12018-02-13 00:06:06 -0500309 gl::VertexArray *vao = mState.getState().getVertexArray();
310 const gl::Buffer *elementArrayBuffer = vao->getElementArrayBuffer().get();
Luc Ferron78e39b32018-02-26 07:42:44 -0500311 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madilld03a8492017-10-03 15:46:06 -0400312
Luc Ferron78e39b32018-02-26 07:42:44 -0500313 if (mode == GL_LINE_LOOP)
314 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500315 if (!elementArrayBuffer)
316 {
317 UNIMPLEMENTED();
318 return gl::InternalError() << "Line loop indices in client memory not supported";
319 }
320
321 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
322
Luc Ferron78e39b32018-02-26 07:42:44 -0500323 ANGLE_TRY(mLineLoopHandler.createIndexBufferFromElementArrayBuffer(
324 this, elementArrayBufferVk, GetVkIndexType(type), count));
325
326 // TODO(fjhenigman): calculate the index range and pass to setupDraw()
327 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0,
328 mLineLoopHandler.getLineLoopBufferResource(), &commandBuffer));
329
330 mLineLoopHandler.bindIndexBuffer(GetVkIndexType(type), &commandBuffer);
331 commandBuffer->drawIndexed(count + 1, 1, 0, 0, 0);
332 }
333 else
334 {
Frank Henigmana53d0e12018-02-13 00:06:06 -0500335 ContextVk *contextVk = vk::GetImpl(context);
336 const bool computeIndexRange = vk::GetImpl(vao)->attribsToStream(contextVk).any();
337 gl::IndexRange range;
338 VkBuffer buffer = VK_NULL_HANDLE;
339 VkDeviceSize offset = 0;
340
341 if (elementArrayBuffer)
342 {
Luc Ferron80964f92018-03-08 10:31:24 -0500343 if (type == GL_UNSIGNED_BYTE)
344 {
345 // TODO(fjhenigman): Index format translation.
346 UNIMPLEMENTED();
347 return gl::InternalError() << "Unsigned byte translation is not implemented for "
348 << "indices in a buffer object";
349 }
350
Frank Henigmana53d0e12018-02-13 00:06:06 -0500351 BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer);
352 buffer = elementArrayBufferVk->getVkBuffer().getHandle();
353 offset = 0;
354
355 if (computeIndexRange)
356 {
357 ANGLE_TRY(elementArrayBufferVk->getIndexRange(
358 context, type, 0, count, false /*primitiveRestartEnabled*/, &range));
359 }
360 }
361 else
362 {
363 const GLsizei amount = sizeof(GLushort) * count;
Luc Ferron80964f92018-03-08 10:31:24 -0500364 GLubyte *dst = nullptr;
Frank Henigmana53d0e12018-02-13 00:06:06 -0500365
366 ANGLE_TRY(mStreamingIndexData.allocate(contextVk, amount, &dst, &buffer, &offset));
Luc Ferron80964f92018-03-08 10:31:24 -0500367 if (type == GL_UNSIGNED_BYTE)
368 {
369 // Unsigned bytes don't have direct support in Vulkan so we have to expand the
370 // memory to a GLushort.
371 const GLubyte *in = static_cast<const GLubyte *>(indices);
372 GLushort *expandedDst = reinterpret_cast<GLushort *>(dst);
373 for (GLsizei index = 0; index < count; index++)
374 {
375 expandedDst[index] = static_cast<GLushort>(in[index]);
376 }
377 }
378 else
379 {
380 memcpy(dst, indices, amount);
381 }
Frank Henigmana53d0e12018-02-13 00:06:06 -0500382 ANGLE_TRY(mStreamingIndexData.flush(contextVk));
383
384 if (computeIndexRange)
385 {
386 range =
387 gl::ComputeIndexRange(type, indices, count, false /*primitiveRestartEnabled*/);
388 }
389 }
390
391 ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, range.start, range.end, nullptr,
392 &commandBuffer));
393 commandBuffer->bindIndexBuffer(buffer, offset, GetVkIndexType(type));
Luc Ferron78e39b32018-02-26 07:42:44 -0500394 commandBuffer->drawIndexed(count, 1, 0, 0, 0);
395 }
Jamie Madilld03a8492017-10-03 15:46:06 -0400396
397 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400398}
399
Jamie Madillc564c072017-06-01 12:45:42 -0400400gl::Error ContextVk::drawElementsInstanced(const gl::Context *context,
401 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400402 GLsizei count,
403 GLenum type,
Jamie Madill876429b2017-04-20 15:46:24 -0400404 const void *indices,
Qin Jiajia1da00652017-06-20 17:16:25 +0800405 GLsizei instances)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400406{
407 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500408 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400409}
410
Jamie Madillc564c072017-06-01 12:45:42 -0400411gl::Error ContextVk::drawRangeElements(const gl::Context *context,
412 GLenum mode,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400413 GLuint start,
414 GLuint end,
415 GLsizei count,
416 GLenum type,
Qin Jiajia1da00652017-06-20 17:16:25 +0800417 const void *indices)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400418{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500419 return gl::NoError();
420}
421
422VkDevice ContextVk::getDevice() const
423{
424 return mRenderer->getDevice();
425}
426
Jamie Madillc564c072017-06-01 12:45:42 -0400427gl::Error ContextVk::drawArraysIndirect(const gl::Context *context,
428 GLenum mode,
429 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800430{
431 UNIMPLEMENTED();
432 return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend.";
433}
434
Jamie Madillc564c072017-06-01 12:45:42 -0400435gl::Error ContextVk::drawElementsIndirect(const gl::Context *context,
436 GLenum mode,
437 GLenum type,
438 const void *indirect)
Jiajia Qind9671222016-11-29 16:30:31 +0800439{
440 UNIMPLEMENTED();
441 return gl::InternalError()
442 << "DrawElementsIndirect hasn't been implemented for vulkan backend.";
443}
444
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400445GLenum ContextVk::getResetStatus()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400446{
447 UNIMPLEMENTED();
Corentin Wallez87fbe1c2016-08-03 14:41:42 -0400448 return GL_NO_ERROR;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400449}
450
451std::string ContextVk::getVendorString() const
452{
453 UNIMPLEMENTED();
454 return std::string();
455}
456
457std::string ContextVk::getRendererDescription() const
458{
Jamie Madille09bd5d2016-11-29 16:20:35 -0500459 return mRenderer->getRendererDescription();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400460}
461
462void ContextVk::insertEventMarker(GLsizei length, const char *marker)
463{
464 UNIMPLEMENTED();
465}
466
467void ContextVk::pushGroupMarker(GLsizei length, const char *marker)
468{
469 UNIMPLEMENTED();
470}
471
472void ContextVk::popGroupMarker()
473{
474 UNIMPLEMENTED();
475}
476
Geoff Lang5d5253a2017-11-22 14:51:12 -0500477void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message)
478{
479 UNIMPLEMENTED();
480}
481
482void ContextVk::popDebugGroup()
483{
484 UNIMPLEMENTED();
485}
486
Jamie Madillfe548342017-06-19 11:13:24 -0400487void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400488{
Jamie Madill72106562017-03-24 14:18:50 -0400489 if (dirtyBits.any())
490 {
491 invalidateCurrentPipeline();
492 }
Jamie Madillebf72992017-10-13 14:09:45 -0400493
494 const auto &glState = context->getGLState();
495
496 // TODO(jmadill): Full dirty bits implementation.
Jamie Madill5547b382017-10-23 18:16:01 -0400497 bool dirtyTextures = false;
Jamie Madillebf72992017-10-13 14:09:45 -0400498
499 for (auto dirtyBit : dirtyBits)
500 {
501 switch (dirtyBit)
502 {
503 case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED:
Luc Ferron00155d52018-02-06 10:48:47 -0500504 if (glState.isScissorTestEnabled())
505 {
506 mPipelineDesc->updateScissor(glState.getScissor());
507 }
508 else
509 {
510 mPipelineDesc->updateScissor(glState.getViewport());
511 }
Jamie Madillebf72992017-10-13 14:09:45 -0400512 break;
513 case gl::State::DIRTY_BIT_SCISSOR:
Luc Ferron00155d52018-02-06 10:48:47 -0500514 // Only modify the scissor region if the test is enabled, otherwise we want to keep
515 // the viewport size as the scissor region.
516 if (glState.isScissorTestEnabled())
517 {
518 mPipelineDesc->updateScissor(glState.getScissor());
519 }
Jamie Madillebf72992017-10-13 14:09:45 -0400520 break;
521 case gl::State::DIRTY_BIT_VIEWPORT:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500522 mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(),
523 glState.getFarPlane());
Luc Ferron00155d52018-02-06 10:48:47 -0500524
525 // If the scissor test isn't enabled, we have to also update the scissor to
526 // be equal to the viewport to make sure we keep rendering everything in the
527 // viewport.
528 if (!glState.isScissorTestEnabled())
529 {
530 mPipelineDesc->updateScissor(glState.getViewport());
531 }
Jamie Madillebf72992017-10-13 14:09:45 -0400532 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400533 case gl::State::DIRTY_BIT_DEPTH_RANGE:
534 WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented";
535 break;
536 case gl::State::DIRTY_BIT_BLEND_ENABLED:
Luc Ferronf8be7562018-02-06 15:59:11 -0500537 mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled());
Jamie Madillebf72992017-10-13 14:09:45 -0400538 break;
539 case gl::State::DIRTY_BIT_BLEND_COLOR:
Luc Ferronf8be7562018-02-06 15:59:11 -0500540 mPipelineDesc->updateBlendColor(glState.getBlendColor());
Jamie Madillebf72992017-10-13 14:09:45 -0400541 break;
542 case gl::State::DIRTY_BIT_BLEND_FUNCS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500543 mPipelineDesc->updateBlendFuncs(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400544 break;
545 case gl::State::DIRTY_BIT_BLEND_EQUATIONS:
Luc Ferronf8be7562018-02-06 15:59:11 -0500546 mPipelineDesc->updateBlendEquations(glState.getBlendState());
Jamie Madillebf72992017-10-13 14:09:45 -0400547 break;
548 case gl::State::DIRTY_BIT_COLOR_MASK:
549 WARN() << "DIRTY_BIT_COLOR_MASK unimplemented";
550 break;
551 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED:
552 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented";
553 break;
554 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED:
555 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented";
556 break;
557 case gl::State::DIRTY_BIT_SAMPLE_COVERAGE:
558 WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented";
559 break;
560 case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED:
561 WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented";
562 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400563 case gl::State::DIRTY_BIT_SAMPLE_MASK:
564 WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400565 break;
566 case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED:
567 WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented";
568 break;
569 case gl::State::DIRTY_BIT_DEPTH_FUNC:
570 WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented";
571 break;
572 case gl::State::DIRTY_BIT_DEPTH_MASK:
573 WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented";
574 break;
575 case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED:
576 WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented";
577 break;
578 case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT:
579 WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented";
580 break;
581 case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK:
582 WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented";
583 break;
584 case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT:
585 WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented";
586 break;
587 case gl::State::DIRTY_BIT_STENCIL_OPS_BACK:
588 WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented";
589 break;
590 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT:
591 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented";
592 break;
593 case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK:
594 WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented";
595 break;
596 case gl::State::DIRTY_BIT_CULL_FACE_ENABLED:
597 case gl::State::DIRTY_BIT_CULL_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500598 mPipelineDesc->updateCullMode(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400599 break;
600 case gl::State::DIRTY_BIT_FRONT_FACE:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500601 mPipelineDesc->updateFrontFace(glState.getRasterizerState());
Jamie Madillebf72992017-10-13 14:09:45 -0400602 break;
603 case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED:
604 WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented";
605 break;
606 case gl::State::DIRTY_BIT_POLYGON_OFFSET:
607 WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented";
608 break;
609 case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED:
610 WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented";
611 break;
612 case gl::State::DIRTY_BIT_LINE_WIDTH:
Jamie Madillf2f6d372018-01-10 21:37:23 -0500613 mPipelineDesc->updateLineWidth(glState.getLineWidth());
Jamie Madillebf72992017-10-13 14:09:45 -0400614 break;
615 case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED:
616 WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented";
617 break;
618 case gl::State::DIRTY_BIT_CLEAR_COLOR:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500619 mClearColorValue.color.float32[0] = glState.getColorClearValue().red;
620 mClearColorValue.color.float32[1] = glState.getColorClearValue().green;
621 mClearColorValue.color.float32[2] = glState.getColorClearValue().blue;
622 mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha;
Jamie Madillebf72992017-10-13 14:09:45 -0400623 break;
624 case gl::State::DIRTY_BIT_CLEAR_DEPTH:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500625 mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue();
Jamie Madillebf72992017-10-13 14:09:45 -0400626 break;
627 case gl::State::DIRTY_BIT_CLEAR_STENCIL:
Jamie Madillf4d693c2018-02-14 16:38:16 -0500628 mClearDepthStencilValue.depthStencil.stencil =
629 static_cast<uint32_t>(glState.getStencilClearValue());
Jamie Madillebf72992017-10-13 14:09:45 -0400630 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400631 case gl::State::DIRTY_BIT_UNPACK_STATE:
632 WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400633 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500634 case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING:
635 WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented";
636 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400637 case gl::State::DIRTY_BIT_PACK_STATE:
638 WARN() << "DIRTY_BIT_PACK_STATE unimplemented";
Jamie Madillebf72992017-10-13 14:09:45 -0400639 break;
Corentin Wallez29a20992017-11-06 18:23:16 -0500640 case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING:
641 WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented";
642 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400643 case gl::State::DIRTY_BIT_DITHER_ENABLED:
644 WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented";
645 break;
646 case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT:
647 WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented";
648 break;
649 case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT:
650 WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented";
651 break;
652 case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING:
653 WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented";
654 break;
655 case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING:
656 WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented";
657 break;
658 case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING:
659 WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented";
660 break;
661 case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING:
Jamie Madill49ac74b2017-12-21 14:42:33 -0500662 mVertexArrayDirty = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400663 break;
664 case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING:
665 WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented";
666 break;
Qin Jiajiaa98a2812017-11-30 18:12:06 +0800667 case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING:
668 WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented";
669 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400670 case gl::State::DIRTY_BIT_PROGRAM_BINDING:
671 WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented";
672 break;
673 case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE:
674 {
Jamie Madillf2f6d372018-01-10 21:37:23 -0500675 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
676 mPipelineDesc->updateShaders(programVk);
Jamie Madill5547b382017-10-23 18:16:01 -0400677 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400678 break;
679 }
680 case gl::State::DIRTY_BIT_TEXTURE_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400681 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400682 break;
683 case gl::State::DIRTY_BIT_SAMPLER_BINDINGS:
Jamie Madill5547b382017-10-23 18:16:01 -0400684 dirtyTextures = true;
Jamie Madillebf72992017-10-13 14:09:45 -0400685 break;
Geoff Langded79232017-11-28 15:21:11 -0500686 case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING:
687 WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented";
688 break;
Xinghua Cao10a4d432017-11-28 14:46:26 +0800689 case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING:
690 WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented";
691 break;
Jamie Madillf4141212017-12-12 15:08:07 -0500692 case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS:
693 WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented";
694 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400695 case gl::State::DIRTY_BIT_MULTISAMPLING:
696 WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented";
697 break;
698 case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE:
699 WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented";
700 break;
701 case gl::State::DIRTY_BIT_COVERAGE_MODULATION:
702 WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented";
703 break;
704 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV:
705 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented";
706 break;
707 case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ:
708 WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented";
709 break;
710 case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE:
711 WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented";
712 break;
713 case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB:
714 WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented";
715 break;
Jamie Madillc67323a2017-11-02 23:11:41 -0400716 case gl::State::DIRTY_BIT_CURRENT_VALUES:
717 WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented";
718 break;
Jamie Madillebf72992017-10-13 14:09:45 -0400719 default:
Jamie Madillc67323a2017-11-02 23:11:41 -0400720 UNREACHABLE();
Jamie Madillebf72992017-10-13 14:09:45 -0400721 break;
722 }
723 }
Jamie Madill5547b382017-10-23 18:16:01 -0400724
725 if (dirtyTextures)
726 {
Jamie Madille1f3ad42017-10-28 23:00:42 -0400727 ProgramVk *programVk = vk::GetImpl(glState.getProgram());
Jamie Madill5547b382017-10-23 18:16:01 -0400728 programVk->invalidateTextures();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500729 mTexturesDirty = true;
Jamie Madill5547b382017-10-23 18:16:01 -0400730 }
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400731}
732
733GLint ContextVk::getGPUDisjoint()
734{
735 UNIMPLEMENTED();
736 return GLint();
737}
738
739GLint64 ContextVk::getTimestamp()
740{
741 UNIMPLEMENTED();
742 return GLint64();
743}
744
Jamie Madill4928b7c2017-06-20 12:57:39 -0400745void ContextVk::onMakeCurrent(const gl::Context * /*context*/)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400746{
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400747}
748
749const gl::Caps &ContextVk::getNativeCaps() const
750{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400751 return mRenderer->getNativeCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400752}
753
754const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const
755{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400756 return mRenderer->getNativeTextureCaps();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400757}
758
759const gl::Extensions &ContextVk::getNativeExtensions() const
760{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400761 return mRenderer->getNativeExtensions();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400762}
763
764const gl::Limitations &ContextVk::getNativeLimitations() const
765{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400766 return mRenderer->getNativeLimitations();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400767}
768
769CompilerImpl *ContextVk::createCompiler()
770{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400771 return new CompilerVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400772}
773
Jamie Madillacccc6c2016-05-03 17:22:10 -0400774ShaderImpl *ContextVk::createShader(const gl::ShaderState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400775{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400776 return new ShaderVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400777}
778
Jamie Madillacccc6c2016-05-03 17:22:10 -0400779ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400780{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400781 return new ProgramVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400782}
783
Jamie Madillacccc6c2016-05-03 17:22:10 -0400784FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400785{
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500786 return FramebufferVk::CreateUserFBO(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400787}
788
789TextureImpl *ContextVk::createTexture(const gl::TextureState &state)
790{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400791 return new TextureVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400792}
793
Jamie Madille703c602018-02-20 10:21:48 -0500794RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400795{
Jamie Madille703c602018-02-20 10:21:48 -0500796 return new RenderbufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400797}
798
Jamie Madill8f775602016-11-03 16:45:34 -0400799BufferImpl *ContextVk::createBuffer(const gl::BufferState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400800{
Jamie Madill8f775602016-11-03 16:45:34 -0400801 return new BufferVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400802}
803
Jamie Madillacccc6c2016-05-03 17:22:10 -0400804VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400805{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400806 return new VertexArrayVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400807}
808
809QueryImpl *ContextVk::createQuery(GLenum type)
810{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400811 return new QueryVk(type);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400812}
813
814FenceNVImpl *ContextVk::createFenceNV()
815{
Jamie Madillacccc6c2016-05-03 17:22:10 -0400816 return new FenceNVVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400817}
818
Jamie Madill70b5bb02017-08-28 13:32:37 -0400819SyncImpl *ContextVk::createSync()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400820{
Jamie Madill70b5bb02017-08-28 13:32:37 -0400821 return new SyncVk();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400822}
823
Geoff Lang73bd2182016-07-15 13:01:24 -0400824TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400825{
Geoff Lang73bd2182016-07-15 13:01:24 -0400826 return new TransformFeedbackVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400827}
828
Jamie Madill06ef36b2017-09-09 23:32:46 -0400829SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400830{
Jamie Madill06ef36b2017-09-09 23:32:46 -0400831 return new SamplerVk(state);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400832}
833
Yunchao Hea336b902017-08-02 16:05:21 +0800834ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state)
835{
836 return new ProgramPipelineVk(state);
837}
838
Sami Väisänene45e53b2016-05-25 10:36:04 +0300839std::vector<PathImpl *> ContextVk::createPaths(GLsizei)
840{
841 return std::vector<PathImpl *>();
842}
843
Jamie Madill72106562017-03-24 14:18:50 -0400844void ContextVk::invalidateCurrentPipeline()
845{
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500846 mCurrentPipeline = nullptr;
Jamie Madill72106562017-03-24 14:18:50 -0400847}
848
Jamie Madill49ac74b2017-12-21 14:42:33 -0500849void ContextVk::onVertexArrayChange()
850{
851 // TODO(jmadill): Does not handle dependent state changes.
852 mVertexArrayDirty = true;
853 invalidateCurrentPipeline();
854}
855
Jamie Madillfe548342017-06-19 11:13:24 -0400856gl::Error ContextVk::dispatchCompute(const gl::Context *context,
857 GLuint numGroupsX,
858 GLuint numGroupsY,
859 GLuint numGroupsZ)
Xinghua Cao2b396592017-03-29 15:36:04 +0800860{
861 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500862 return gl::InternalError();
Xinghua Cao2b396592017-03-29 15:36:04 +0800863}
864
Qin Jiajia62fcf622017-11-30 16:16:12 +0800865gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect)
866{
867 UNIMPLEMENTED();
868 return gl::InternalError();
869}
870
Xinghua Cao89c422a2017-11-29 18:24:20 +0800871gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers)
872{
873 UNIMPLEMENTED();
874 return gl::InternalError();
875}
876
877gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers)
878{
879 UNIMPLEMENTED();
880 return gl::InternalError();
881}
882
Jamie Madill76e471e2017-10-21 09:56:01 -0400883vk::DescriptorPool *ContextVk::getDescriptorPool()
884{
885 return &mDescriptorPool;
886}
887
Jamie Madillf4d693c2018-02-14 16:38:16 -0500888const VkClearValue &ContextVk::getClearColorValue() const
889{
890 return mClearColorValue;
891}
892
893const VkClearValue &ContextVk::getClearDepthStencilValue() const
894{
895 return mClearDepthStencilValue;
896}
897
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400898} // namespace rx