Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 1 | // |
| 2 | // Copyright 2016 The ANGLE Project Authors. All rights reserved. |
| 3 | // Use of this source code is governed by a BSD-style license that can be |
| 4 | // found in the LICENSE file. |
| 5 | // |
| 6 | // ContextVk.cpp: |
| 7 | // Implements the class methods for ContextVk. |
| 8 | // |
| 9 | |
| 10 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 11 | |
Jamie Madill | 20e005b | 2017-04-07 14:19:22 -0400 | [diff] [blame] | 12 | #include "common/bitset_utils.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 13 | #include "common/debug.h" |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 14 | #include "libANGLE/Context.h" |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 15 | #include "libANGLE/Program.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 16 | #include "libANGLE/renderer/vulkan/BufferVk.h" |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 17 | #include "libANGLE/renderer/vulkan/CommandBufferNode.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 18 | #include "libANGLE/renderer/vulkan/CompilerVk.h" |
| 19 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 20 | #include "libANGLE/renderer/vulkan/DeviceVk.h" |
| 21 | #include "libANGLE/renderer/vulkan/FenceNVVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 22 | #include "libANGLE/renderer/vulkan/FramebufferVk.h" |
| 23 | #include "libANGLE/renderer/vulkan/ImageVk.h" |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 24 | #include "libANGLE/renderer/vulkan/ProgramPipelineVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 25 | #include "libANGLE/renderer/vulkan/ProgramVk.h" |
| 26 | #include "libANGLE/renderer/vulkan/QueryVk.h" |
| 27 | #include "libANGLE/renderer/vulkan/RenderbufferVk.h" |
| 28 | #include "libANGLE/renderer/vulkan/RendererVk.h" |
| 29 | #include "libANGLE/renderer/vulkan/SamplerVk.h" |
| 30 | #include "libANGLE/renderer/vulkan/ShaderVk.h" |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 31 | #include "libANGLE/renderer/vulkan/SyncVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 32 | #include "libANGLE/renderer/vulkan/TextureVk.h" |
| 33 | #include "libANGLE/renderer/vulkan/TransformFeedbackVk.h" |
| 34 | #include "libANGLE/renderer/vulkan/VertexArrayVk.h" |
Jamie Madill | 3c424b4 | 2018-01-19 12:35:09 -0500 | [diff] [blame] | 35 | #include "libANGLE/renderer/vulkan/vk_format_utils.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 36 | |
| 37 | namespace rx |
| 38 | { |
| 39 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 40 | namespace |
| 41 | { |
| 42 | |
| 43 | VkIndexType GetVkIndexType(GLenum glIndexType) |
| 44 | { |
| 45 | switch (glIndexType) |
| 46 | { |
| 47 | case GL_UNSIGNED_SHORT: |
| 48 | return VK_INDEX_TYPE_UINT16; |
| 49 | case GL_UNSIGNED_INT: |
| 50 | return VK_INDEX_TYPE_UINT32; |
| 51 | default: |
| 52 | UNREACHABLE(); |
| 53 | return VK_INDEX_TYPE_MAX_ENUM; |
| 54 | } |
| 55 | } |
| 56 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 57 | enum DescriptorPoolIndex : uint8_t |
| 58 | { |
| 59 | UniformBufferPool = 0, |
| 60 | TexturePool = 1, |
| 61 | }; |
| 62 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 63 | } // anonymous namespace |
| 64 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 65 | ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer) |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 66 | : ContextImpl(state), |
| 67 | mRenderer(renderer), |
| 68 | mCurrentDrawMode(GL_NONE), |
| 69 | mVertexArrayDirty(false), |
| 70 | mTexturesDirty(false) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 71 | { |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame^] | 72 | memset(&mClearColorValue, 0, sizeof(mClearColorValue)); |
| 73 | memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue)); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 74 | } |
| 75 | |
| 76 | ContextVk::~ContextVk() |
| 77 | { |
| 78 | } |
| 79 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 80 | void ContextVk::onDestroy(const gl::Context *context) |
| 81 | { |
| 82 | VkDevice device = mRenderer->getDevice(); |
| 83 | |
| 84 | mDescriptorPool.destroy(device); |
| 85 | } |
| 86 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 87 | gl::Error ContextVk::initialize() |
| 88 | { |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 89 | VkDevice device = mRenderer->getDevice(); |
| 90 | |
| 91 | VkDescriptorPoolSize poolSizes[2]; |
| 92 | poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 93 | poolSizes[UniformBufferPool].descriptorCount = 1024; |
| 94 | poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; |
| 95 | poolSizes[TexturePool].descriptorCount = 1024; |
| 96 | |
| 97 | VkDescriptorPoolCreateInfo descriptorPoolInfo; |
| 98 | descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; |
| 99 | descriptorPoolInfo.pNext = nullptr; |
| 100 | descriptorPoolInfo.flags = 0; |
| 101 | |
| 102 | // TODO(jmadill): Pick non-arbitrary max. |
| 103 | descriptorPoolInfo.maxSets = 2048; |
| 104 | |
| 105 | // Reserve pools for uniform blocks and textures. |
| 106 | descriptorPoolInfo.poolSizeCount = 2; |
| 107 | descriptorPoolInfo.pPoolSizes = poolSizes; |
| 108 | |
| 109 | ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo)); |
| 110 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 111 | mPipelineDesc.reset(new vk::PipelineDesc()); |
| 112 | mPipelineDesc->initDefaults(); |
| 113 | |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 114 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 115 | } |
| 116 | |
Jamie Madill | afa02a2 | 2017-11-23 12:57:38 -0500 | [diff] [blame] | 117 | gl::Error ContextVk::flush(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 118 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 119 | // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on. |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 120 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 121 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 122 | } |
| 123 | |
Jamie Madill | afa02a2 | 2017-11-23 12:57:38 -0500 | [diff] [blame] | 124 | gl::Error ContextVk::finish(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 125 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 126 | return mRenderer->finish(context); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 127 | } |
| 128 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 129 | gl::Error ContextVk::initPipeline(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 130 | { |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 131 | ASSERT(!mCurrentPipeline); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 132 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 133 | const gl::State &state = mState.getState(); |
| 134 | VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray()); |
| 135 | FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer()); |
| 136 | ProgramVk *programVk = vk::GetImpl(state.getProgram()); |
Luc Ferron | ceb7190 | 2018-02-05 15:18:47 -0500 | [diff] [blame] | 137 | const gl::AttributesMask activeAttribLocationsMask = |
| 138 | state.getProgram()->getActiveAttribLocationsMask(); |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 139 | |
| 140 | // Ensure the topology of the pipeline description is updated. |
| 141 | mPipelineDesc->updateTopology(mCurrentDrawMode); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 142 | |
Jamie Madill | 112a3a8 | 2018-01-23 13:04:06 -0500 | [diff] [blame] | 143 | // Copy over the latest attrib and binding descriptions. |
| 144 | vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get()); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 145 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 146 | // Ensure that the RenderPass description is updated. |
| 147 | mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 148 | |
| 149 | // TODO(jmadill): Validate with ASSERT against physical device limits/caps? |
Luc Ferron | ceb7190 | 2018-02-05 15:18:47 -0500 | [diff] [blame] | 150 | ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask, |
| 151 | &mCurrentPipeline)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 152 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 153 | return gl::NoError(); |
| 154 | } |
| 155 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 156 | gl::Error ContextVk::setupDraw(const gl::Context *context, |
| 157 | GLenum mode, |
| 158 | DrawType drawType, |
| 159 | vk::CommandBuffer **commandBuffer) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 160 | { |
| 161 | if (mode != mCurrentDrawMode) |
| 162 | { |
| 163 | invalidateCurrentPipeline(); |
| 164 | mCurrentDrawMode = mode; |
| 165 | } |
| 166 | |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 167 | if (!mCurrentPipeline) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 168 | { |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 169 | ANGLE_TRY(initPipeline(context)); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 170 | } |
| 171 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 172 | const auto &state = mState.getState(); |
Jamie Madill | acf2f3a | 2017-11-21 19:22:44 -0500 | [diff] [blame] | 173 | const gl::Program *programGL = state.getProgram(); |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 174 | ProgramVk *programVk = vk::GetImpl(programGL); |
Jamie Madill | acf2f3a | 2017-11-21 19:22:44 -0500 | [diff] [blame] | 175 | const gl::VertexArray *vao = state.getVertexArray(); |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 176 | VertexArrayVk *vkVAO = vk::GetImpl(vao); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 177 | const auto *drawFBO = state.getDrawFramebuffer(); |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 178 | FramebufferVk *vkFBO = vk::GetImpl(drawFBO); |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 179 | Serial queueSerial = mRenderer->getCurrentQueueSerial(); |
| 180 | uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation(); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 181 | |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 182 | // Process vertex attributes. Assume zero offsets for now. |
| 183 | // TODO(jmadill): Offset handling. |
Jamie Madill | da854a2 | 2017-11-30 17:24:21 -0500 | [diff] [blame] | 184 | const auto &vertexHandles = vkVAO->getCurrentArrayBufferHandles(); |
| 185 | angle::MemoryBuffer *zeroBuf = nullptr; |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 186 | ANGLE_TRY(context->getZeroFilledBuffer(maxAttrib * sizeof(VkDeviceSize), &zeroBuf)); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 187 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 188 | // TODO(jmadill): Need to link up the TextureVk to the Secondary CB. |
| 189 | vk::CommandBufferNode *renderNode = nullptr; |
| 190 | ANGLE_TRY(vkFBO->getRenderNode(context, &renderNode)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 191 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 192 | if (!renderNode->getInsideRenderPassCommands()->valid()) |
| 193 | { |
| 194 | mVertexArrayDirty = true; |
| 195 | mTexturesDirty = true; |
| 196 | ANGLE_TRY(renderNode->startRenderPassRecording(mRenderer, commandBuffer)); |
| 197 | } |
| 198 | else |
| 199 | { |
| 200 | *commandBuffer = renderNode->getInsideRenderPassCommands(); |
| 201 | } |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 202 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 203 | // Ensure any writes to the VAO buffers are flushed before we read from them. |
| 204 | if (mVertexArrayDirty) |
| 205 | { |
| 206 | mVertexArrayDirty = false; |
| 207 | vkVAO->updateDrawDependencies(renderNode, programGL->getActiveAttribLocationsMask(), |
| 208 | queueSerial, drawType); |
| 209 | } |
| 210 | |
| 211 | // Ensure any writes to the textures are flushed before we read from them. |
| 212 | if (mTexturesDirty) |
| 213 | { |
| 214 | mTexturesDirty = false; |
| 215 | // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update. |
| 216 | const auto &completeTextures = state.getCompleteTextureCache(); |
| 217 | for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings()) |
| 218 | { |
| 219 | ASSERT(!samplerBinding.unreferenced); |
| 220 | |
| 221 | // TODO(jmadill): Sampler arrays |
| 222 | ASSERT(samplerBinding.boundTextureUnits.size() == 1); |
| 223 | |
| 224 | GLuint textureUnit = samplerBinding.boundTextureUnits[0]; |
| 225 | const gl::Texture *texture = completeTextures[textureUnit]; |
| 226 | |
| 227 | // TODO(jmadill): Incomplete textures handling. |
| 228 | ASSERT(texture); |
| 229 | |
| 230 | TextureVk *textureVk = vk::GetImpl(texture); |
Jamie Madill | 0e65454 | 2018-02-07 14:50:06 -0500 | [diff] [blame] | 231 | textureVk->onReadResource(renderNode, mRenderer->getCurrentQueueSerial()); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 232 | } |
| 233 | } |
| 234 | |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 235 | (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get()); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 236 | (*commandBuffer) |
| 237 | ->bindVertexBuffers(0, maxAttrib, vertexHandles.data(), |
| 238 | reinterpret_cast<const VkDeviceSize *>(zeroBuf->data())); |
| 239 | |
| 240 | // Update the queue serial for the pipeline object. |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 241 | ASSERT(mCurrentPipeline && mCurrentPipeline->valid()); |
| 242 | mCurrentPipeline->updateSerial(queueSerial); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 243 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 244 | // TODO(jmadill): Can probably use more dirty bits here. |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 245 | ANGLE_TRY(programVk->updateUniforms(this)); |
| 246 | programVk->updateTexturesDescriptorSet(this); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 247 | |
| 248 | // Bind the graphics descriptor sets. |
| 249 | // TODO(jmadill): Handle multiple command buffers. |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 250 | const auto &descriptorSets = programVk->getDescriptorSets(); |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 251 | const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange(); |
| 252 | if (!usedRange.empty()) |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 253 | { |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 254 | ASSERT(!descriptorSets.empty()); |
| 255 | const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout(); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 256 | (*commandBuffer) |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 257 | ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(), |
| 258 | usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 259 | } |
| 260 | |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 261 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 262 | } |
| 263 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 264 | gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count) |
| 265 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 266 | vk::CommandBuffer *commandBuffer = nullptr; |
| 267 | ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, &commandBuffer)); |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 268 | commandBuffer->draw(count, 1, first, 0); |
| 269 | return gl::NoError(); |
| 270 | } |
| 271 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 272 | gl::Error ContextVk::drawArraysInstanced(const gl::Context *context, |
| 273 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 274 | GLint first, |
| 275 | GLsizei count, |
| 276 | GLsizei instanceCount) |
| 277 | { |
| 278 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 279 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 280 | } |
| 281 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 282 | gl::Error ContextVk::drawElements(const gl::Context *context, |
| 283 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 284 | GLsizei count, |
| 285 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 286 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 287 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 288 | vk::CommandBuffer *commandBuffer; |
| 289 | ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, &commandBuffer)); |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 290 | |
| 291 | if (indices) |
| 292 | { |
| 293 | // TODO(jmadill): Buffer offsets and immediate data. |
| 294 | UNIMPLEMENTED(); |
| 295 | return gl::InternalError() << "Only zero-offset index buffers are currently implemented."; |
| 296 | } |
| 297 | |
| 298 | if (type == GL_UNSIGNED_BYTE) |
| 299 | { |
| 300 | // TODO(jmadill): Index translation. |
| 301 | UNIMPLEMENTED(); |
| 302 | return gl::InternalError() << "Unsigned byte translation is not yet implemented."; |
| 303 | } |
| 304 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 305 | const gl::Buffer *elementArrayBuffer = |
| 306 | mState.getState().getVertexArray()->getElementArrayBuffer().get(); |
| 307 | ASSERT(elementArrayBuffer); |
| 308 | |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 309 | BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer); |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 310 | |
| 311 | commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type)); |
| 312 | commandBuffer->drawIndexed(count, 1, 0, 0, 0); |
| 313 | |
| 314 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 315 | } |
| 316 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 317 | gl::Error ContextVk::drawElementsInstanced(const gl::Context *context, |
| 318 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 319 | GLsizei count, |
| 320 | GLenum type, |
Jamie Madill | 876429b | 2017-04-20 15:46:24 -0400 | [diff] [blame] | 321 | const void *indices, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 322 | GLsizei instances) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 323 | { |
| 324 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 325 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 326 | } |
| 327 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 328 | gl::Error ContextVk::drawRangeElements(const gl::Context *context, |
| 329 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 330 | GLuint start, |
| 331 | GLuint end, |
| 332 | GLsizei count, |
| 333 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 334 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 335 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 336 | return gl::NoError(); |
| 337 | } |
| 338 | |
| 339 | VkDevice ContextVk::getDevice() const |
| 340 | { |
| 341 | return mRenderer->getDevice(); |
| 342 | } |
| 343 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 344 | gl::Error ContextVk::drawArraysIndirect(const gl::Context *context, |
| 345 | GLenum mode, |
| 346 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 347 | { |
| 348 | UNIMPLEMENTED(); |
| 349 | return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend."; |
| 350 | } |
| 351 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 352 | gl::Error ContextVk::drawElementsIndirect(const gl::Context *context, |
| 353 | GLenum mode, |
| 354 | GLenum type, |
| 355 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 356 | { |
| 357 | UNIMPLEMENTED(); |
| 358 | return gl::InternalError() |
| 359 | << "DrawElementsIndirect hasn't been implemented for vulkan backend."; |
| 360 | } |
| 361 | |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 362 | GLenum ContextVk::getResetStatus() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 363 | { |
| 364 | UNIMPLEMENTED(); |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 365 | return GL_NO_ERROR; |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 366 | } |
| 367 | |
| 368 | std::string ContextVk::getVendorString() const |
| 369 | { |
| 370 | UNIMPLEMENTED(); |
| 371 | return std::string(); |
| 372 | } |
| 373 | |
| 374 | std::string ContextVk::getRendererDescription() const |
| 375 | { |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 376 | return mRenderer->getRendererDescription(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 377 | } |
| 378 | |
| 379 | void ContextVk::insertEventMarker(GLsizei length, const char *marker) |
| 380 | { |
| 381 | UNIMPLEMENTED(); |
| 382 | } |
| 383 | |
| 384 | void ContextVk::pushGroupMarker(GLsizei length, const char *marker) |
| 385 | { |
| 386 | UNIMPLEMENTED(); |
| 387 | } |
| 388 | |
| 389 | void ContextVk::popGroupMarker() |
| 390 | { |
| 391 | UNIMPLEMENTED(); |
| 392 | } |
| 393 | |
Geoff Lang | 5d5253a | 2017-11-22 14:51:12 -0500 | [diff] [blame] | 394 | void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message) |
| 395 | { |
| 396 | UNIMPLEMENTED(); |
| 397 | } |
| 398 | |
| 399 | void ContextVk::popDebugGroup() |
| 400 | { |
| 401 | UNIMPLEMENTED(); |
| 402 | } |
| 403 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 404 | void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 405 | { |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 406 | if (dirtyBits.any()) |
| 407 | { |
| 408 | invalidateCurrentPipeline(); |
| 409 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 410 | |
| 411 | const auto &glState = context->getGLState(); |
| 412 | |
| 413 | // TODO(jmadill): Full dirty bits implementation. |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 414 | bool dirtyTextures = false; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 415 | |
| 416 | for (auto dirtyBit : dirtyBits) |
| 417 | { |
| 418 | switch (dirtyBit) |
| 419 | { |
| 420 | case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED: |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 421 | if (glState.isScissorTestEnabled()) |
| 422 | { |
| 423 | mPipelineDesc->updateScissor(glState.getScissor()); |
| 424 | } |
| 425 | else |
| 426 | { |
| 427 | mPipelineDesc->updateScissor(glState.getViewport()); |
| 428 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 429 | break; |
| 430 | case gl::State::DIRTY_BIT_SCISSOR: |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 431 | // Only modify the scissor region if the test is enabled, otherwise we want to keep |
| 432 | // the viewport size as the scissor region. |
| 433 | if (glState.isScissorTestEnabled()) |
| 434 | { |
| 435 | mPipelineDesc->updateScissor(glState.getScissor()); |
| 436 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 437 | break; |
| 438 | case gl::State::DIRTY_BIT_VIEWPORT: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 439 | mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(), |
| 440 | glState.getFarPlane()); |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 441 | |
| 442 | // If the scissor test isn't enabled, we have to also update the scissor to |
| 443 | // be equal to the viewport to make sure we keep rendering everything in the |
| 444 | // viewport. |
| 445 | if (!glState.isScissorTestEnabled()) |
| 446 | { |
| 447 | mPipelineDesc->updateScissor(glState.getViewport()); |
| 448 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 449 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 450 | case gl::State::DIRTY_BIT_DEPTH_RANGE: |
| 451 | WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented"; |
| 452 | break; |
| 453 | case gl::State::DIRTY_BIT_BLEND_ENABLED: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 454 | mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 455 | break; |
| 456 | case gl::State::DIRTY_BIT_BLEND_COLOR: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 457 | mPipelineDesc->updateBlendColor(glState.getBlendColor()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 458 | break; |
| 459 | case gl::State::DIRTY_BIT_BLEND_FUNCS: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 460 | mPipelineDesc->updateBlendFuncs(glState.getBlendState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 461 | break; |
| 462 | case gl::State::DIRTY_BIT_BLEND_EQUATIONS: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 463 | mPipelineDesc->updateBlendEquations(glState.getBlendState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 464 | break; |
| 465 | case gl::State::DIRTY_BIT_COLOR_MASK: |
| 466 | WARN() << "DIRTY_BIT_COLOR_MASK unimplemented"; |
| 467 | break; |
| 468 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED: |
| 469 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented"; |
| 470 | break; |
| 471 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED: |
| 472 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented"; |
| 473 | break; |
| 474 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE: |
| 475 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented"; |
| 476 | break; |
| 477 | case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED: |
| 478 | WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented"; |
| 479 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 480 | case gl::State::DIRTY_BIT_SAMPLE_MASK: |
| 481 | WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 482 | break; |
| 483 | case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED: |
| 484 | WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented"; |
| 485 | break; |
| 486 | case gl::State::DIRTY_BIT_DEPTH_FUNC: |
| 487 | WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented"; |
| 488 | break; |
| 489 | case gl::State::DIRTY_BIT_DEPTH_MASK: |
| 490 | WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented"; |
| 491 | break; |
| 492 | case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED: |
| 493 | WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented"; |
| 494 | break; |
| 495 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT: |
| 496 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented"; |
| 497 | break; |
| 498 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK: |
| 499 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented"; |
| 500 | break; |
| 501 | case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT: |
| 502 | WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented"; |
| 503 | break; |
| 504 | case gl::State::DIRTY_BIT_STENCIL_OPS_BACK: |
| 505 | WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented"; |
| 506 | break; |
| 507 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT: |
| 508 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented"; |
| 509 | break; |
| 510 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK: |
| 511 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented"; |
| 512 | break; |
| 513 | case gl::State::DIRTY_BIT_CULL_FACE_ENABLED: |
| 514 | case gl::State::DIRTY_BIT_CULL_FACE: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 515 | mPipelineDesc->updateCullMode(glState.getRasterizerState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 516 | break; |
| 517 | case gl::State::DIRTY_BIT_FRONT_FACE: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 518 | mPipelineDesc->updateFrontFace(glState.getRasterizerState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 519 | break; |
| 520 | case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED: |
| 521 | WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented"; |
| 522 | break; |
| 523 | case gl::State::DIRTY_BIT_POLYGON_OFFSET: |
| 524 | WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented"; |
| 525 | break; |
| 526 | case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED: |
| 527 | WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented"; |
| 528 | break; |
| 529 | case gl::State::DIRTY_BIT_LINE_WIDTH: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 530 | mPipelineDesc->updateLineWidth(glState.getLineWidth()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 531 | break; |
| 532 | case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED: |
| 533 | WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented"; |
| 534 | break; |
| 535 | case gl::State::DIRTY_BIT_CLEAR_COLOR: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame^] | 536 | mClearColorValue.color.float32[0] = glState.getColorClearValue().red; |
| 537 | mClearColorValue.color.float32[1] = glState.getColorClearValue().green; |
| 538 | mClearColorValue.color.float32[2] = glState.getColorClearValue().blue; |
| 539 | mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 540 | break; |
| 541 | case gl::State::DIRTY_BIT_CLEAR_DEPTH: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame^] | 542 | mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue(); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 543 | break; |
| 544 | case gl::State::DIRTY_BIT_CLEAR_STENCIL: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame^] | 545 | mClearDepthStencilValue.depthStencil.stencil = |
| 546 | static_cast<uint32_t>(glState.getStencilClearValue()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 547 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 548 | case gl::State::DIRTY_BIT_UNPACK_STATE: |
| 549 | WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 550 | break; |
Corentin Wallez | 29a2099 | 2017-11-06 18:23:16 -0500 | [diff] [blame] | 551 | case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING: |
| 552 | WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented"; |
| 553 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 554 | case gl::State::DIRTY_BIT_PACK_STATE: |
| 555 | WARN() << "DIRTY_BIT_PACK_STATE unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 556 | break; |
Corentin Wallez | 29a2099 | 2017-11-06 18:23:16 -0500 | [diff] [blame] | 557 | case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING: |
| 558 | WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented"; |
| 559 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 560 | case gl::State::DIRTY_BIT_DITHER_ENABLED: |
| 561 | WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented"; |
| 562 | break; |
| 563 | case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT: |
| 564 | WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented"; |
| 565 | break; |
| 566 | case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT: |
| 567 | WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented"; |
| 568 | break; |
| 569 | case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING: |
| 570 | WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented"; |
| 571 | break; |
| 572 | case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING: |
| 573 | WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented"; |
| 574 | break; |
| 575 | case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING: |
| 576 | WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented"; |
| 577 | break; |
| 578 | case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING: |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 579 | mVertexArrayDirty = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 580 | break; |
| 581 | case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING: |
| 582 | WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented"; |
| 583 | break; |
Qin Jiajia | a98a281 | 2017-11-30 18:12:06 +0800 | [diff] [blame] | 584 | case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING: |
| 585 | WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented"; |
| 586 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 587 | case gl::State::DIRTY_BIT_PROGRAM_BINDING: |
| 588 | WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented"; |
| 589 | break; |
| 590 | case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE: |
| 591 | { |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 592 | ProgramVk *programVk = vk::GetImpl(glState.getProgram()); |
| 593 | mPipelineDesc->updateShaders(programVk); |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 594 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 595 | break; |
| 596 | } |
| 597 | case gl::State::DIRTY_BIT_TEXTURE_BINDINGS: |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 598 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 599 | break; |
| 600 | case gl::State::DIRTY_BIT_SAMPLER_BINDINGS: |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 601 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 602 | break; |
Geoff Lang | ded7923 | 2017-11-28 15:21:11 -0500 | [diff] [blame] | 603 | case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING: |
| 604 | WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented"; |
| 605 | break; |
Xinghua Cao | 10a4d43 | 2017-11-28 14:46:26 +0800 | [diff] [blame] | 606 | case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING: |
| 607 | WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented"; |
| 608 | break; |
Jamie Madill | f414121 | 2017-12-12 15:08:07 -0500 | [diff] [blame] | 609 | case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS: |
| 610 | WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented"; |
| 611 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 612 | case gl::State::DIRTY_BIT_MULTISAMPLING: |
| 613 | WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented"; |
| 614 | break; |
| 615 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE: |
| 616 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented"; |
| 617 | break; |
| 618 | case gl::State::DIRTY_BIT_COVERAGE_MODULATION: |
| 619 | WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented"; |
| 620 | break; |
| 621 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV: |
| 622 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented"; |
| 623 | break; |
| 624 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ: |
| 625 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented"; |
| 626 | break; |
| 627 | case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE: |
| 628 | WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented"; |
| 629 | break; |
| 630 | case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB: |
| 631 | WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented"; |
| 632 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 633 | case gl::State::DIRTY_BIT_CURRENT_VALUES: |
| 634 | WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented"; |
| 635 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 636 | default: |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 637 | UNREACHABLE(); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 638 | break; |
| 639 | } |
| 640 | } |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 641 | |
| 642 | if (dirtyTextures) |
| 643 | { |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 644 | ProgramVk *programVk = vk::GetImpl(glState.getProgram()); |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 645 | programVk->invalidateTextures(); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 646 | mTexturesDirty = true; |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 647 | } |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 648 | } |
| 649 | |
| 650 | GLint ContextVk::getGPUDisjoint() |
| 651 | { |
| 652 | UNIMPLEMENTED(); |
| 653 | return GLint(); |
| 654 | } |
| 655 | |
| 656 | GLint64 ContextVk::getTimestamp() |
| 657 | { |
| 658 | UNIMPLEMENTED(); |
| 659 | return GLint64(); |
| 660 | } |
| 661 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 662 | void ContextVk::onMakeCurrent(const gl::Context * /*context*/) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 663 | { |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 664 | } |
| 665 | |
| 666 | const gl::Caps &ContextVk::getNativeCaps() const |
| 667 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 668 | return mRenderer->getNativeCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 669 | } |
| 670 | |
| 671 | const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const |
| 672 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 673 | return mRenderer->getNativeTextureCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 674 | } |
| 675 | |
| 676 | const gl::Extensions &ContextVk::getNativeExtensions() const |
| 677 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 678 | return mRenderer->getNativeExtensions(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 679 | } |
| 680 | |
| 681 | const gl::Limitations &ContextVk::getNativeLimitations() const |
| 682 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 683 | return mRenderer->getNativeLimitations(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 684 | } |
| 685 | |
| 686 | CompilerImpl *ContextVk::createCompiler() |
| 687 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 688 | return new CompilerVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 689 | } |
| 690 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 691 | ShaderImpl *ContextVk::createShader(const gl::ShaderState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 692 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 693 | return new ShaderVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 694 | } |
| 695 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 696 | ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 697 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 698 | return new ProgramVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 699 | } |
| 700 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 701 | FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 702 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 703 | return FramebufferVk::CreateUserFBO(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 704 | } |
| 705 | |
| 706 | TextureImpl *ContextVk::createTexture(const gl::TextureState &state) |
| 707 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 708 | return new TextureVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 709 | } |
| 710 | |
| 711 | RenderbufferImpl *ContextVk::createRenderbuffer() |
| 712 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 713 | return new RenderbufferVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 714 | } |
| 715 | |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 716 | BufferImpl *ContextVk::createBuffer(const gl::BufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 717 | { |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 718 | return new BufferVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 719 | } |
| 720 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 721 | VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 722 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 723 | return new VertexArrayVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 724 | } |
| 725 | |
| 726 | QueryImpl *ContextVk::createQuery(GLenum type) |
| 727 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 728 | return new QueryVk(type); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 729 | } |
| 730 | |
| 731 | FenceNVImpl *ContextVk::createFenceNV() |
| 732 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 733 | return new FenceNVVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 734 | } |
| 735 | |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 736 | SyncImpl *ContextVk::createSync() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 737 | { |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 738 | return new SyncVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 739 | } |
| 740 | |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 741 | TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 742 | { |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 743 | return new TransformFeedbackVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 744 | } |
| 745 | |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 746 | SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 747 | { |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 748 | return new SamplerVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 749 | } |
| 750 | |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 751 | ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state) |
| 752 | { |
| 753 | return new ProgramPipelineVk(state); |
| 754 | } |
| 755 | |
Sami Väisänen | e45e53b | 2016-05-25 10:36:04 +0300 | [diff] [blame] | 756 | std::vector<PathImpl *> ContextVk::createPaths(GLsizei) |
| 757 | { |
| 758 | return std::vector<PathImpl *>(); |
| 759 | } |
| 760 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 761 | void ContextVk::invalidateCurrentPipeline() |
| 762 | { |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 763 | mCurrentPipeline = nullptr; |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 764 | } |
| 765 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 766 | void ContextVk::onVertexArrayChange() |
| 767 | { |
| 768 | // TODO(jmadill): Does not handle dependent state changes. |
| 769 | mVertexArrayDirty = true; |
| 770 | invalidateCurrentPipeline(); |
| 771 | } |
| 772 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 773 | gl::Error ContextVk::dispatchCompute(const gl::Context *context, |
| 774 | GLuint numGroupsX, |
| 775 | GLuint numGroupsY, |
| 776 | GLuint numGroupsZ) |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 777 | { |
| 778 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 779 | return gl::InternalError(); |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 780 | } |
| 781 | |
Qin Jiajia | 62fcf62 | 2017-11-30 16:16:12 +0800 | [diff] [blame] | 782 | gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect) |
| 783 | { |
| 784 | UNIMPLEMENTED(); |
| 785 | return gl::InternalError(); |
| 786 | } |
| 787 | |
Xinghua Cao | 89c422a | 2017-11-29 18:24:20 +0800 | [diff] [blame] | 788 | gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers) |
| 789 | { |
| 790 | UNIMPLEMENTED(); |
| 791 | return gl::InternalError(); |
| 792 | } |
| 793 | |
| 794 | gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers) |
| 795 | { |
| 796 | UNIMPLEMENTED(); |
| 797 | return gl::InternalError(); |
| 798 | } |
| 799 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 800 | vk::DescriptorPool *ContextVk::getDescriptorPool() |
| 801 | { |
| 802 | return &mDescriptorPool; |
| 803 | } |
| 804 | |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame^] | 805 | const VkClearValue &ContextVk::getClearColorValue() const |
| 806 | { |
| 807 | return mClearColorValue; |
| 808 | } |
| 809 | |
| 810 | const VkClearValue &ContextVk::getClearDepthStencilValue() const |
| 811 | { |
| 812 | return mClearDepthStencilValue; |
| 813 | } |
| 814 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 815 | } // namespace rx |