Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 1 | // |
| 2 | // Copyright 2016 The ANGLE Project Authors. All rights reserved. |
| 3 | // Use of this source code is governed by a BSD-style license that can be |
| 4 | // found in the LICENSE file. |
| 5 | // |
| 6 | // ContextVk.cpp: |
| 7 | // Implements the class methods for ContextVk. |
| 8 | // |
| 9 | |
| 10 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 11 | |
Jamie Madill | 20e005b | 2017-04-07 14:19:22 -0400 | [diff] [blame] | 12 | #include "common/bitset_utils.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 13 | #include "common/debug.h" |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 14 | #include "libANGLE/Context.h" |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 15 | #include "libANGLE/Program.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 16 | #include "libANGLE/renderer/vulkan/BufferVk.h" |
Jamie Madill | 1f46bc1 | 2018-02-20 16:09:43 -0500 | [diff] [blame] | 17 | #include "libANGLE/renderer/vulkan/CommandGraph.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 18 | #include "libANGLE/renderer/vulkan/CompilerVk.h" |
| 19 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 20 | #include "libANGLE/renderer/vulkan/DeviceVk.h" |
| 21 | #include "libANGLE/renderer/vulkan/FenceNVVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 22 | #include "libANGLE/renderer/vulkan/FramebufferVk.h" |
| 23 | #include "libANGLE/renderer/vulkan/ImageVk.h" |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 24 | #include "libANGLE/renderer/vulkan/ProgramPipelineVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 25 | #include "libANGLE/renderer/vulkan/ProgramVk.h" |
| 26 | #include "libANGLE/renderer/vulkan/QueryVk.h" |
| 27 | #include "libANGLE/renderer/vulkan/RenderbufferVk.h" |
| 28 | #include "libANGLE/renderer/vulkan/RendererVk.h" |
| 29 | #include "libANGLE/renderer/vulkan/SamplerVk.h" |
| 30 | #include "libANGLE/renderer/vulkan/ShaderVk.h" |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 31 | #include "libANGLE/renderer/vulkan/SyncVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 32 | #include "libANGLE/renderer/vulkan/TextureVk.h" |
| 33 | #include "libANGLE/renderer/vulkan/TransformFeedbackVk.h" |
| 34 | #include "libANGLE/renderer/vulkan/VertexArrayVk.h" |
Jamie Madill | 3c424b4 | 2018-01-19 12:35:09 -0500 | [diff] [blame] | 35 | #include "libANGLE/renderer/vulkan/vk_format_utils.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 36 | |
| 37 | namespace rx |
| 38 | { |
| 39 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 40 | namespace |
| 41 | { |
| 42 | |
| 43 | VkIndexType GetVkIndexType(GLenum glIndexType) |
| 44 | { |
| 45 | switch (glIndexType) |
| 46 | { |
| 47 | case GL_UNSIGNED_SHORT: |
| 48 | return VK_INDEX_TYPE_UINT16; |
| 49 | case GL_UNSIGNED_INT: |
| 50 | return VK_INDEX_TYPE_UINT32; |
| 51 | default: |
| 52 | UNREACHABLE(); |
| 53 | return VK_INDEX_TYPE_MAX_ENUM; |
| 54 | } |
| 55 | } |
| 56 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 57 | enum DescriptorPoolIndex : uint8_t |
| 58 | { |
| 59 | UniformBufferPool = 0, |
| 60 | TexturePool = 1, |
| 61 | }; |
| 62 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 63 | } // anonymous namespace |
| 64 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 65 | ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer) |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 66 | : ContextImpl(state), |
| 67 | mRenderer(renderer), |
| 68 | mCurrentDrawMode(GL_NONE), |
| 69 | mVertexArrayDirty(false), |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 70 | mTexturesDirty(false), |
| 71 | mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, 1024 * 1024) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 72 | { |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 73 | memset(&mClearColorValue, 0, sizeof(mClearColorValue)); |
| 74 | memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue)); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 75 | } |
| 76 | |
| 77 | ContextVk::~ContextVk() |
| 78 | { |
| 79 | } |
| 80 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 81 | void ContextVk::onDestroy(const gl::Context *context) |
| 82 | { |
| 83 | VkDevice device = mRenderer->getDevice(); |
| 84 | |
| 85 | mDescriptorPool.destroy(device); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 86 | mStreamingVertexData.destroy(device); |
Luc Ferron | 360098d | 2018-02-21 07:33:50 -0500 | [diff] [blame] | 87 | mLineLoopHandler.destroy(device); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 88 | } |
| 89 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 90 | gl::Error ContextVk::initialize() |
| 91 | { |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 92 | VkDevice device = mRenderer->getDevice(); |
| 93 | |
| 94 | VkDescriptorPoolSize poolSizes[2]; |
| 95 | poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 96 | poolSizes[UniformBufferPool].descriptorCount = 1024; |
| 97 | poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; |
| 98 | poolSizes[TexturePool].descriptorCount = 1024; |
| 99 | |
| 100 | VkDescriptorPoolCreateInfo descriptorPoolInfo; |
| 101 | descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; |
| 102 | descriptorPoolInfo.pNext = nullptr; |
| 103 | descriptorPoolInfo.flags = 0; |
| 104 | |
| 105 | // TODO(jmadill): Pick non-arbitrary max. |
| 106 | descriptorPoolInfo.maxSets = 2048; |
| 107 | |
| 108 | // Reserve pools for uniform blocks and textures. |
| 109 | descriptorPoolInfo.poolSizeCount = 2; |
| 110 | descriptorPoolInfo.pPoolSizes = poolSizes; |
| 111 | |
| 112 | ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo)); |
| 113 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 114 | mPipelineDesc.reset(new vk::PipelineDesc()); |
| 115 | mPipelineDesc->initDefaults(); |
| 116 | |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 117 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 118 | } |
| 119 | |
Jamie Madill | afa02a2 | 2017-11-23 12:57:38 -0500 | [diff] [blame] | 120 | gl::Error ContextVk::flush(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 121 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 122 | // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on. |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 123 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 124 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 125 | } |
| 126 | |
Jamie Madill | afa02a2 | 2017-11-23 12:57:38 -0500 | [diff] [blame] | 127 | gl::Error ContextVk::finish(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 128 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 129 | return mRenderer->finish(context); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 130 | } |
| 131 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 132 | gl::Error ContextVk::initPipeline(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 133 | { |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 134 | ASSERT(!mCurrentPipeline); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 135 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 136 | const gl::State &state = mState.getState(); |
| 137 | VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray()); |
| 138 | FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer()); |
| 139 | ProgramVk *programVk = vk::GetImpl(state.getProgram()); |
Luc Ferron | ceb7190 | 2018-02-05 15:18:47 -0500 | [diff] [blame] | 140 | const gl::AttributesMask activeAttribLocationsMask = |
| 141 | state.getProgram()->getActiveAttribLocationsMask(); |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 142 | |
| 143 | // Ensure the topology of the pipeline description is updated. |
| 144 | mPipelineDesc->updateTopology(mCurrentDrawMode); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 145 | |
Jamie Madill | 112a3a8 | 2018-01-23 13:04:06 -0500 | [diff] [blame] | 146 | // Copy over the latest attrib and binding descriptions. |
| 147 | vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get()); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 148 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 149 | // Ensure that the RenderPass description is updated. |
| 150 | mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 151 | |
| 152 | // TODO(jmadill): Validate with ASSERT against physical device limits/caps? |
Luc Ferron | ceb7190 | 2018-02-05 15:18:47 -0500 | [diff] [blame] | 153 | ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask, |
| 154 | &mCurrentPipeline)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 155 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 156 | return gl::NoError(); |
| 157 | } |
| 158 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 159 | gl::Error ContextVk::setupDraw(const gl::Context *context, |
| 160 | GLenum mode, |
| 161 | DrawType drawType, |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 162 | int firstVertex, |
| 163 | int lastVertex, |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 164 | vk::CommandBuffer **commandBuffer) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 165 | { |
| 166 | if (mode != mCurrentDrawMode) |
| 167 | { |
| 168 | invalidateCurrentPipeline(); |
| 169 | mCurrentDrawMode = mode; |
| 170 | } |
| 171 | |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 172 | if (!mCurrentPipeline) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 173 | { |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 174 | ANGLE_TRY(initPipeline(context)); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 175 | } |
| 176 | |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 177 | const auto &state = mState.getState(); |
Jamie Madill | acf2f3a | 2017-11-21 19:22:44 -0500 | [diff] [blame] | 178 | const gl::Program *programGL = state.getProgram(); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 179 | ProgramVk *programVk = vk::GetImpl(programGL); |
Jamie Madill | acf2f3a | 2017-11-21 19:22:44 -0500 | [diff] [blame] | 180 | const gl::VertexArray *vao = state.getVertexArray(); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 181 | VertexArrayVk *vkVAO = vk::GetImpl(vao); |
| 182 | const auto *drawFBO = state.getDrawFramebuffer(); |
| 183 | FramebufferVk *vkFBO = vk::GetImpl(drawFBO); |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 184 | Serial queueSerial = mRenderer->getCurrentQueueSerial(); |
| 185 | uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation(); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 186 | |
Jamie Madill | 1f46bc1 | 2018-02-20 16:09:43 -0500 | [diff] [blame] | 187 | vk::CommandGraphNode *renderNode = nullptr; |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 188 | ANGLE_TRY(vkFBO->getRenderNode(context, &renderNode)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 189 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 190 | if (!renderNode->getInsideRenderPassCommands()->valid()) |
| 191 | { |
| 192 | mVertexArrayDirty = true; |
| 193 | mTexturesDirty = true; |
Jamie Madill | 1f46bc1 | 2018-02-20 16:09:43 -0500 | [diff] [blame] | 194 | ANGLE_TRY(renderNode->beginInsideRenderPassRecording(mRenderer, commandBuffer)); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 195 | } |
| 196 | else |
| 197 | { |
| 198 | *commandBuffer = renderNode->getInsideRenderPassCommands(); |
| 199 | } |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 200 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 201 | // Ensure any writes to the VAO buffers are flushed before we read from them. |
| 202 | if (mVertexArrayDirty) |
| 203 | { |
| 204 | mVertexArrayDirty = false; |
| 205 | vkVAO->updateDrawDependencies(renderNode, programGL->getActiveAttribLocationsMask(), |
| 206 | queueSerial, drawType); |
| 207 | } |
| 208 | |
| 209 | // Ensure any writes to the textures are flushed before we read from them. |
| 210 | if (mTexturesDirty) |
| 211 | { |
| 212 | mTexturesDirty = false; |
| 213 | // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update. |
| 214 | const auto &completeTextures = state.getCompleteTextureCache(); |
| 215 | for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings()) |
| 216 | { |
| 217 | ASSERT(!samplerBinding.unreferenced); |
| 218 | |
| 219 | // TODO(jmadill): Sampler arrays |
| 220 | ASSERT(samplerBinding.boundTextureUnits.size() == 1); |
| 221 | |
| 222 | GLuint textureUnit = samplerBinding.boundTextureUnits[0]; |
| 223 | const gl::Texture *texture = completeTextures[textureUnit]; |
| 224 | |
| 225 | // TODO(jmadill): Incomplete textures handling. |
| 226 | ASSERT(texture); |
| 227 | |
| 228 | TextureVk *textureVk = vk::GetImpl(texture); |
Jamie Madill | 0e65454 | 2018-02-07 14:50:06 -0500 | [diff] [blame] | 229 | textureVk->onReadResource(renderNode, mRenderer->getCurrentQueueSerial()); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 230 | } |
| 231 | } |
| 232 | |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 233 | (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get()); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 234 | ContextVk *contextVk = vk::GetImpl(context); |
| 235 | ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex)); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 236 | (*commandBuffer) |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 237 | ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(), |
| 238 | vkVAO->getCurrentArrayBufferOffsets().data()); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 239 | |
| 240 | // Update the queue serial for the pipeline object. |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 241 | ASSERT(mCurrentPipeline && mCurrentPipeline->valid()); |
| 242 | mCurrentPipeline->updateSerial(queueSerial); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 243 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 244 | // TODO(jmadill): Can probably use more dirty bits here. |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 245 | ANGLE_TRY(programVk->updateUniforms(this)); |
| 246 | programVk->updateTexturesDescriptorSet(this); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 247 | |
| 248 | // Bind the graphics descriptor sets. |
| 249 | // TODO(jmadill): Handle multiple command buffers. |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 250 | const auto &descriptorSets = programVk->getDescriptorSets(); |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 251 | const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange(); |
| 252 | if (!usedRange.empty()) |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 253 | { |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 254 | ASSERT(!descriptorSets.empty()); |
| 255 | const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout(); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 256 | (*commandBuffer) |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 257 | ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(), |
| 258 | usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 259 | } |
| 260 | |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 261 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 262 | } |
| 263 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 264 | gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count) |
| 265 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 266 | vk::CommandBuffer *commandBuffer = nullptr; |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 267 | ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, &commandBuffer)); |
Luc Ferron | 360098d | 2018-02-21 07:33:50 -0500 | [diff] [blame] | 268 | |
| 269 | if (mode == GL_LINE_LOOP) |
| 270 | { |
| 271 | ANGLE_TRY(mLineLoopHandler.draw(this, first, count, commandBuffer)); |
| 272 | } |
| 273 | else |
| 274 | { |
| 275 | commandBuffer->draw(count, 1, first, 0); |
| 276 | } |
| 277 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 278 | return gl::NoError(); |
| 279 | } |
| 280 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 281 | gl::Error ContextVk::drawArraysInstanced(const gl::Context *context, |
| 282 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 283 | GLint first, |
| 284 | GLsizei count, |
| 285 | GLsizei instanceCount) |
| 286 | { |
| 287 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 288 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 289 | } |
| 290 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 291 | gl::Error ContextVk::drawElements(const gl::Context *context, |
| 292 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 293 | GLsizei count, |
| 294 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 295 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 296 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 297 | vk::CommandBuffer *commandBuffer; |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 298 | // TODO(fjhenigman): calculate the index range and pass to setupDraw() |
| 299 | ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0, &commandBuffer)); |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 300 | |
| 301 | if (indices) |
| 302 | { |
| 303 | // TODO(jmadill): Buffer offsets and immediate data. |
| 304 | UNIMPLEMENTED(); |
| 305 | return gl::InternalError() << "Only zero-offset index buffers are currently implemented."; |
| 306 | } |
| 307 | |
| 308 | if (type == GL_UNSIGNED_BYTE) |
| 309 | { |
| 310 | // TODO(jmadill): Index translation. |
| 311 | UNIMPLEMENTED(); |
| 312 | return gl::InternalError() << "Unsigned byte translation is not yet implemented."; |
| 313 | } |
| 314 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 315 | const gl::Buffer *elementArrayBuffer = |
| 316 | mState.getState().getVertexArray()->getElementArrayBuffer().get(); |
| 317 | ASSERT(elementArrayBuffer); |
| 318 | |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 319 | BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer); |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 320 | |
| 321 | commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type)); |
| 322 | commandBuffer->drawIndexed(count, 1, 0, 0, 0); |
| 323 | |
| 324 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 325 | } |
| 326 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 327 | gl::Error ContextVk::drawElementsInstanced(const gl::Context *context, |
| 328 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 329 | GLsizei count, |
| 330 | GLenum type, |
Jamie Madill | 876429b | 2017-04-20 15:46:24 -0400 | [diff] [blame] | 331 | const void *indices, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 332 | GLsizei instances) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 333 | { |
| 334 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 335 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 336 | } |
| 337 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 338 | gl::Error ContextVk::drawRangeElements(const gl::Context *context, |
| 339 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 340 | GLuint start, |
| 341 | GLuint end, |
| 342 | GLsizei count, |
| 343 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 344 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 345 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 346 | return gl::NoError(); |
| 347 | } |
| 348 | |
| 349 | VkDevice ContextVk::getDevice() const |
| 350 | { |
| 351 | return mRenderer->getDevice(); |
| 352 | } |
| 353 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 354 | gl::Error ContextVk::drawArraysIndirect(const gl::Context *context, |
| 355 | GLenum mode, |
| 356 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 357 | { |
| 358 | UNIMPLEMENTED(); |
| 359 | return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend."; |
| 360 | } |
| 361 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 362 | gl::Error ContextVk::drawElementsIndirect(const gl::Context *context, |
| 363 | GLenum mode, |
| 364 | GLenum type, |
| 365 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 366 | { |
| 367 | UNIMPLEMENTED(); |
| 368 | return gl::InternalError() |
| 369 | << "DrawElementsIndirect hasn't been implemented for vulkan backend."; |
| 370 | } |
| 371 | |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 372 | GLenum ContextVk::getResetStatus() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 373 | { |
| 374 | UNIMPLEMENTED(); |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 375 | return GL_NO_ERROR; |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 376 | } |
| 377 | |
| 378 | std::string ContextVk::getVendorString() const |
| 379 | { |
| 380 | UNIMPLEMENTED(); |
| 381 | return std::string(); |
| 382 | } |
| 383 | |
| 384 | std::string ContextVk::getRendererDescription() const |
| 385 | { |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 386 | return mRenderer->getRendererDescription(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 387 | } |
| 388 | |
| 389 | void ContextVk::insertEventMarker(GLsizei length, const char *marker) |
| 390 | { |
| 391 | UNIMPLEMENTED(); |
| 392 | } |
| 393 | |
| 394 | void ContextVk::pushGroupMarker(GLsizei length, const char *marker) |
| 395 | { |
| 396 | UNIMPLEMENTED(); |
| 397 | } |
| 398 | |
| 399 | void ContextVk::popGroupMarker() |
| 400 | { |
| 401 | UNIMPLEMENTED(); |
| 402 | } |
| 403 | |
Geoff Lang | 5d5253a | 2017-11-22 14:51:12 -0500 | [diff] [blame] | 404 | void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message) |
| 405 | { |
| 406 | UNIMPLEMENTED(); |
| 407 | } |
| 408 | |
| 409 | void ContextVk::popDebugGroup() |
| 410 | { |
| 411 | UNIMPLEMENTED(); |
| 412 | } |
| 413 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 414 | void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 415 | { |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 416 | if (dirtyBits.any()) |
| 417 | { |
| 418 | invalidateCurrentPipeline(); |
| 419 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 420 | |
| 421 | const auto &glState = context->getGLState(); |
| 422 | |
| 423 | // TODO(jmadill): Full dirty bits implementation. |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 424 | bool dirtyTextures = false; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 425 | |
| 426 | for (auto dirtyBit : dirtyBits) |
| 427 | { |
| 428 | switch (dirtyBit) |
| 429 | { |
| 430 | case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED: |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 431 | if (glState.isScissorTestEnabled()) |
| 432 | { |
| 433 | mPipelineDesc->updateScissor(glState.getScissor()); |
| 434 | } |
| 435 | else |
| 436 | { |
| 437 | mPipelineDesc->updateScissor(glState.getViewport()); |
| 438 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 439 | break; |
| 440 | case gl::State::DIRTY_BIT_SCISSOR: |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 441 | // Only modify the scissor region if the test is enabled, otherwise we want to keep |
| 442 | // the viewport size as the scissor region. |
| 443 | if (glState.isScissorTestEnabled()) |
| 444 | { |
| 445 | mPipelineDesc->updateScissor(glState.getScissor()); |
| 446 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 447 | break; |
| 448 | case gl::State::DIRTY_BIT_VIEWPORT: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 449 | mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(), |
| 450 | glState.getFarPlane()); |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 451 | |
| 452 | // If the scissor test isn't enabled, we have to also update the scissor to |
| 453 | // be equal to the viewport to make sure we keep rendering everything in the |
| 454 | // viewport. |
| 455 | if (!glState.isScissorTestEnabled()) |
| 456 | { |
| 457 | mPipelineDesc->updateScissor(glState.getViewport()); |
| 458 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 459 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 460 | case gl::State::DIRTY_BIT_DEPTH_RANGE: |
| 461 | WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented"; |
| 462 | break; |
| 463 | case gl::State::DIRTY_BIT_BLEND_ENABLED: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 464 | mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 465 | break; |
| 466 | case gl::State::DIRTY_BIT_BLEND_COLOR: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 467 | mPipelineDesc->updateBlendColor(glState.getBlendColor()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 468 | break; |
| 469 | case gl::State::DIRTY_BIT_BLEND_FUNCS: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 470 | mPipelineDesc->updateBlendFuncs(glState.getBlendState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 471 | break; |
| 472 | case gl::State::DIRTY_BIT_BLEND_EQUATIONS: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 473 | mPipelineDesc->updateBlendEquations(glState.getBlendState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 474 | break; |
| 475 | case gl::State::DIRTY_BIT_COLOR_MASK: |
| 476 | WARN() << "DIRTY_BIT_COLOR_MASK unimplemented"; |
| 477 | break; |
| 478 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED: |
| 479 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented"; |
| 480 | break; |
| 481 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED: |
| 482 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented"; |
| 483 | break; |
| 484 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE: |
| 485 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented"; |
| 486 | break; |
| 487 | case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED: |
| 488 | WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented"; |
| 489 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 490 | case gl::State::DIRTY_BIT_SAMPLE_MASK: |
| 491 | WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 492 | break; |
| 493 | case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED: |
| 494 | WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented"; |
| 495 | break; |
| 496 | case gl::State::DIRTY_BIT_DEPTH_FUNC: |
| 497 | WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented"; |
| 498 | break; |
| 499 | case gl::State::DIRTY_BIT_DEPTH_MASK: |
| 500 | WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented"; |
| 501 | break; |
| 502 | case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED: |
| 503 | WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented"; |
| 504 | break; |
| 505 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT: |
| 506 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented"; |
| 507 | break; |
| 508 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK: |
| 509 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented"; |
| 510 | break; |
| 511 | case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT: |
| 512 | WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented"; |
| 513 | break; |
| 514 | case gl::State::DIRTY_BIT_STENCIL_OPS_BACK: |
| 515 | WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented"; |
| 516 | break; |
| 517 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT: |
| 518 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented"; |
| 519 | break; |
| 520 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK: |
| 521 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented"; |
| 522 | break; |
| 523 | case gl::State::DIRTY_BIT_CULL_FACE_ENABLED: |
| 524 | case gl::State::DIRTY_BIT_CULL_FACE: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 525 | mPipelineDesc->updateCullMode(glState.getRasterizerState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 526 | break; |
| 527 | case gl::State::DIRTY_BIT_FRONT_FACE: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 528 | mPipelineDesc->updateFrontFace(glState.getRasterizerState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 529 | break; |
| 530 | case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED: |
| 531 | WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented"; |
| 532 | break; |
| 533 | case gl::State::DIRTY_BIT_POLYGON_OFFSET: |
| 534 | WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented"; |
| 535 | break; |
| 536 | case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED: |
| 537 | WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented"; |
| 538 | break; |
| 539 | case gl::State::DIRTY_BIT_LINE_WIDTH: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 540 | mPipelineDesc->updateLineWidth(glState.getLineWidth()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 541 | break; |
| 542 | case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED: |
| 543 | WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented"; |
| 544 | break; |
| 545 | case gl::State::DIRTY_BIT_CLEAR_COLOR: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 546 | mClearColorValue.color.float32[0] = glState.getColorClearValue().red; |
| 547 | mClearColorValue.color.float32[1] = glState.getColorClearValue().green; |
| 548 | mClearColorValue.color.float32[2] = glState.getColorClearValue().blue; |
| 549 | mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 550 | break; |
| 551 | case gl::State::DIRTY_BIT_CLEAR_DEPTH: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 552 | mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue(); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 553 | break; |
| 554 | case gl::State::DIRTY_BIT_CLEAR_STENCIL: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 555 | mClearDepthStencilValue.depthStencil.stencil = |
| 556 | static_cast<uint32_t>(glState.getStencilClearValue()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 557 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 558 | case gl::State::DIRTY_BIT_UNPACK_STATE: |
| 559 | WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 560 | break; |
Corentin Wallez | 29a2099 | 2017-11-06 18:23:16 -0500 | [diff] [blame] | 561 | case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING: |
| 562 | WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented"; |
| 563 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 564 | case gl::State::DIRTY_BIT_PACK_STATE: |
| 565 | WARN() << "DIRTY_BIT_PACK_STATE unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 566 | break; |
Corentin Wallez | 29a2099 | 2017-11-06 18:23:16 -0500 | [diff] [blame] | 567 | case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING: |
| 568 | WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented"; |
| 569 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 570 | case gl::State::DIRTY_BIT_DITHER_ENABLED: |
| 571 | WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented"; |
| 572 | break; |
| 573 | case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT: |
| 574 | WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented"; |
| 575 | break; |
| 576 | case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT: |
| 577 | WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented"; |
| 578 | break; |
| 579 | case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING: |
| 580 | WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented"; |
| 581 | break; |
| 582 | case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING: |
| 583 | WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented"; |
| 584 | break; |
| 585 | case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING: |
| 586 | WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented"; |
| 587 | break; |
| 588 | case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING: |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 589 | mVertexArrayDirty = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 590 | break; |
| 591 | case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING: |
| 592 | WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented"; |
| 593 | break; |
Qin Jiajia | a98a281 | 2017-11-30 18:12:06 +0800 | [diff] [blame] | 594 | case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING: |
| 595 | WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented"; |
| 596 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 597 | case gl::State::DIRTY_BIT_PROGRAM_BINDING: |
| 598 | WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented"; |
| 599 | break; |
| 600 | case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE: |
| 601 | { |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 602 | ProgramVk *programVk = vk::GetImpl(glState.getProgram()); |
| 603 | mPipelineDesc->updateShaders(programVk); |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 604 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 605 | break; |
| 606 | } |
| 607 | case gl::State::DIRTY_BIT_TEXTURE_BINDINGS: |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 608 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 609 | break; |
| 610 | case gl::State::DIRTY_BIT_SAMPLER_BINDINGS: |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 611 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 612 | break; |
Geoff Lang | ded7923 | 2017-11-28 15:21:11 -0500 | [diff] [blame] | 613 | case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING: |
| 614 | WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented"; |
| 615 | break; |
Xinghua Cao | 10a4d43 | 2017-11-28 14:46:26 +0800 | [diff] [blame] | 616 | case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING: |
| 617 | WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented"; |
| 618 | break; |
Jamie Madill | f414121 | 2017-12-12 15:08:07 -0500 | [diff] [blame] | 619 | case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS: |
| 620 | WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented"; |
| 621 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 622 | case gl::State::DIRTY_BIT_MULTISAMPLING: |
| 623 | WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented"; |
| 624 | break; |
| 625 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE: |
| 626 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented"; |
| 627 | break; |
| 628 | case gl::State::DIRTY_BIT_COVERAGE_MODULATION: |
| 629 | WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented"; |
| 630 | break; |
| 631 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV: |
| 632 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented"; |
| 633 | break; |
| 634 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ: |
| 635 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented"; |
| 636 | break; |
| 637 | case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE: |
| 638 | WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented"; |
| 639 | break; |
| 640 | case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB: |
| 641 | WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented"; |
| 642 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 643 | case gl::State::DIRTY_BIT_CURRENT_VALUES: |
| 644 | WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented"; |
| 645 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 646 | default: |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 647 | UNREACHABLE(); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 648 | break; |
| 649 | } |
| 650 | } |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 651 | |
| 652 | if (dirtyTextures) |
| 653 | { |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 654 | ProgramVk *programVk = vk::GetImpl(glState.getProgram()); |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 655 | programVk->invalidateTextures(); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 656 | mTexturesDirty = true; |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 657 | } |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 658 | } |
| 659 | |
| 660 | GLint ContextVk::getGPUDisjoint() |
| 661 | { |
| 662 | UNIMPLEMENTED(); |
| 663 | return GLint(); |
| 664 | } |
| 665 | |
| 666 | GLint64 ContextVk::getTimestamp() |
| 667 | { |
| 668 | UNIMPLEMENTED(); |
| 669 | return GLint64(); |
| 670 | } |
| 671 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 672 | void ContextVk::onMakeCurrent(const gl::Context * /*context*/) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 673 | { |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 674 | } |
| 675 | |
| 676 | const gl::Caps &ContextVk::getNativeCaps() const |
| 677 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 678 | return mRenderer->getNativeCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 679 | } |
| 680 | |
| 681 | const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const |
| 682 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 683 | return mRenderer->getNativeTextureCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 684 | } |
| 685 | |
| 686 | const gl::Extensions &ContextVk::getNativeExtensions() const |
| 687 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 688 | return mRenderer->getNativeExtensions(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 689 | } |
| 690 | |
| 691 | const gl::Limitations &ContextVk::getNativeLimitations() const |
| 692 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 693 | return mRenderer->getNativeLimitations(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 694 | } |
| 695 | |
| 696 | CompilerImpl *ContextVk::createCompiler() |
| 697 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 698 | return new CompilerVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 699 | } |
| 700 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 701 | ShaderImpl *ContextVk::createShader(const gl::ShaderState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 702 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 703 | return new ShaderVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 704 | } |
| 705 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 706 | ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 707 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 708 | return new ProgramVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 709 | } |
| 710 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 711 | FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 712 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 713 | return FramebufferVk::CreateUserFBO(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 714 | } |
| 715 | |
| 716 | TextureImpl *ContextVk::createTexture(const gl::TextureState &state) |
| 717 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 718 | return new TextureVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 719 | } |
| 720 | |
Jamie Madill | e703c60 | 2018-02-20 10:21:48 -0500 | [diff] [blame] | 721 | RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 722 | { |
Jamie Madill | e703c60 | 2018-02-20 10:21:48 -0500 | [diff] [blame] | 723 | return new RenderbufferVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 724 | } |
| 725 | |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 726 | BufferImpl *ContextVk::createBuffer(const gl::BufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 727 | { |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 728 | return new BufferVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 729 | } |
| 730 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 731 | VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 732 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 733 | return new VertexArrayVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 734 | } |
| 735 | |
| 736 | QueryImpl *ContextVk::createQuery(GLenum type) |
| 737 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 738 | return new QueryVk(type); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 739 | } |
| 740 | |
| 741 | FenceNVImpl *ContextVk::createFenceNV() |
| 742 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 743 | return new FenceNVVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 744 | } |
| 745 | |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 746 | SyncImpl *ContextVk::createSync() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 747 | { |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 748 | return new SyncVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 749 | } |
| 750 | |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 751 | TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 752 | { |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 753 | return new TransformFeedbackVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 754 | } |
| 755 | |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 756 | SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 757 | { |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 758 | return new SamplerVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 759 | } |
| 760 | |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 761 | ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state) |
| 762 | { |
| 763 | return new ProgramPipelineVk(state); |
| 764 | } |
| 765 | |
Sami Väisänen | e45e53b | 2016-05-25 10:36:04 +0300 | [diff] [blame] | 766 | std::vector<PathImpl *> ContextVk::createPaths(GLsizei) |
| 767 | { |
| 768 | return std::vector<PathImpl *>(); |
| 769 | } |
| 770 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 771 | void ContextVk::invalidateCurrentPipeline() |
| 772 | { |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 773 | mCurrentPipeline = nullptr; |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 774 | } |
| 775 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 776 | void ContextVk::onVertexArrayChange() |
| 777 | { |
| 778 | // TODO(jmadill): Does not handle dependent state changes. |
| 779 | mVertexArrayDirty = true; |
| 780 | invalidateCurrentPipeline(); |
| 781 | } |
| 782 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 783 | gl::Error ContextVk::dispatchCompute(const gl::Context *context, |
| 784 | GLuint numGroupsX, |
| 785 | GLuint numGroupsY, |
| 786 | GLuint numGroupsZ) |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 787 | { |
| 788 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 789 | return gl::InternalError(); |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 790 | } |
| 791 | |
Qin Jiajia | 62fcf62 | 2017-11-30 16:16:12 +0800 | [diff] [blame] | 792 | gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect) |
| 793 | { |
| 794 | UNIMPLEMENTED(); |
| 795 | return gl::InternalError(); |
| 796 | } |
| 797 | |
Xinghua Cao | 89c422a | 2017-11-29 18:24:20 +0800 | [diff] [blame] | 798 | gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers) |
| 799 | { |
| 800 | UNIMPLEMENTED(); |
| 801 | return gl::InternalError(); |
| 802 | } |
| 803 | |
| 804 | gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers) |
| 805 | { |
| 806 | UNIMPLEMENTED(); |
| 807 | return gl::InternalError(); |
| 808 | } |
| 809 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 810 | vk::DescriptorPool *ContextVk::getDescriptorPool() |
| 811 | { |
| 812 | return &mDescriptorPool; |
| 813 | } |
| 814 | |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 815 | const VkClearValue &ContextVk::getClearColorValue() const |
| 816 | { |
| 817 | return mClearColorValue; |
| 818 | } |
| 819 | |
| 820 | const VkClearValue &ContextVk::getClearDepthStencilValue() const |
| 821 | { |
| 822 | return mClearDepthStencilValue; |
| 823 | } |
| 824 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 825 | } // namespace rx |