Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 1 | // |
| 2 | // Copyright 2016 The ANGLE Project Authors. All rights reserved. |
| 3 | // Use of this source code is governed by a BSD-style license that can be |
| 4 | // found in the LICENSE file. |
| 5 | // |
| 6 | // ContextVk.cpp: |
| 7 | // Implements the class methods for ContextVk. |
| 8 | // |
| 9 | |
| 10 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 11 | |
Jamie Madill | 20e005b | 2017-04-07 14:19:22 -0400 | [diff] [blame] | 12 | #include "common/bitset_utils.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 13 | #include "common/debug.h" |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 14 | #include "common/utilities.h" |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 15 | #include "libANGLE/Context.h" |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 16 | #include "libANGLE/Program.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 17 | #include "libANGLE/renderer/vulkan/BufferVk.h" |
Jamie Madill | 1f46bc1 | 2018-02-20 16:09:43 -0500 | [diff] [blame] | 18 | #include "libANGLE/renderer/vulkan/CommandGraph.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 19 | #include "libANGLE/renderer/vulkan/CompilerVk.h" |
| 20 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 21 | #include "libANGLE/renderer/vulkan/DeviceVk.h" |
| 22 | #include "libANGLE/renderer/vulkan/FenceNVVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 23 | #include "libANGLE/renderer/vulkan/FramebufferVk.h" |
| 24 | #include "libANGLE/renderer/vulkan/ImageVk.h" |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 25 | #include "libANGLE/renderer/vulkan/ProgramPipelineVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 26 | #include "libANGLE/renderer/vulkan/ProgramVk.h" |
| 27 | #include "libANGLE/renderer/vulkan/QueryVk.h" |
| 28 | #include "libANGLE/renderer/vulkan/RenderbufferVk.h" |
| 29 | #include "libANGLE/renderer/vulkan/RendererVk.h" |
| 30 | #include "libANGLE/renderer/vulkan/SamplerVk.h" |
| 31 | #include "libANGLE/renderer/vulkan/ShaderVk.h" |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 32 | #include "libANGLE/renderer/vulkan/SyncVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 33 | #include "libANGLE/renderer/vulkan/TextureVk.h" |
| 34 | #include "libANGLE/renderer/vulkan/TransformFeedbackVk.h" |
| 35 | #include "libANGLE/renderer/vulkan/VertexArrayVk.h" |
Jamie Madill | 3c424b4 | 2018-01-19 12:35:09 -0500 | [diff] [blame] | 36 | #include "libANGLE/renderer/vulkan/vk_format_utils.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 37 | |
| 38 | namespace rx |
| 39 | { |
| 40 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 41 | namespace |
| 42 | { |
| 43 | |
| 44 | VkIndexType GetVkIndexType(GLenum glIndexType) |
| 45 | { |
| 46 | switch (glIndexType) |
| 47 | { |
Luc Ferron | 80964f9 | 2018-03-08 10:31:24 -0500 | [diff] [blame] | 48 | case GL_UNSIGNED_BYTE: |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 49 | case GL_UNSIGNED_SHORT: |
| 50 | return VK_INDEX_TYPE_UINT16; |
| 51 | case GL_UNSIGNED_INT: |
| 52 | return VK_INDEX_TYPE_UINT32; |
| 53 | default: |
| 54 | UNREACHABLE(); |
| 55 | return VK_INDEX_TYPE_MAX_ENUM; |
| 56 | } |
| 57 | } |
| 58 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 59 | enum DescriptorPoolIndex : uint8_t |
| 60 | { |
| 61 | UniformBufferPool = 0, |
| 62 | TexturePool = 1, |
| 63 | }; |
| 64 | |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 65 | constexpr size_t kStreamingVertexDataSize = 1024 * 1024; |
| 66 | constexpr size_t kStreamingIndexDataSize = 1024 * 8; |
| 67 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 68 | } // anonymous namespace |
| 69 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 70 | ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer) |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 71 | : ContextImpl(state), |
| 72 | mRenderer(renderer), |
| 73 | mCurrentDrawMode(GL_NONE), |
| 74 | mVertexArrayDirty(false), |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 75 | mTexturesDirty(false), |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 76 | mStreamingVertexData(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, kStreamingVertexDataSize), |
| 77 | mStreamingIndexData(VK_BUFFER_USAGE_INDEX_BUFFER_BIT, kStreamingIndexDataSize) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 78 | { |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 79 | memset(&mClearColorValue, 0, sizeof(mClearColorValue)); |
| 80 | memset(&mClearDepthStencilValue, 0, sizeof(mClearDepthStencilValue)); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 81 | } |
| 82 | |
| 83 | ContextVk::~ContextVk() |
| 84 | { |
| 85 | } |
| 86 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 87 | void ContextVk::onDestroy(const gl::Context *context) |
| 88 | { |
| 89 | VkDevice device = mRenderer->getDevice(); |
| 90 | |
| 91 | mDescriptorPool.destroy(device); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 92 | mStreamingVertexData.destroy(device); |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 93 | mStreamingIndexData.destroy(device); |
Luc Ferron | 360098d | 2018-02-21 07:33:50 -0500 | [diff] [blame] | 94 | mLineLoopHandler.destroy(device); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 95 | } |
| 96 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 97 | gl::Error ContextVk::initialize() |
| 98 | { |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 99 | VkDevice device = mRenderer->getDevice(); |
| 100 | |
| 101 | VkDescriptorPoolSize poolSizes[2]; |
| 102 | poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 103 | poolSizes[UniformBufferPool].descriptorCount = 1024; |
| 104 | poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; |
| 105 | poolSizes[TexturePool].descriptorCount = 1024; |
| 106 | |
| 107 | VkDescriptorPoolCreateInfo descriptorPoolInfo; |
| 108 | descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; |
| 109 | descriptorPoolInfo.pNext = nullptr; |
Jamie Madill | 67ae6c5 | 2018-03-09 11:49:01 -0500 | [diff] [blame] | 110 | descriptorPoolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 111 | |
| 112 | // TODO(jmadill): Pick non-arbitrary max. |
| 113 | descriptorPoolInfo.maxSets = 2048; |
| 114 | |
| 115 | // Reserve pools for uniform blocks and textures. |
| 116 | descriptorPoolInfo.poolSizeCount = 2; |
| 117 | descriptorPoolInfo.pPoolSizes = poolSizes; |
| 118 | |
| 119 | ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo)); |
| 120 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 121 | mPipelineDesc.reset(new vk::PipelineDesc()); |
| 122 | mPipelineDesc->initDefaults(); |
| 123 | |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 124 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 125 | } |
| 126 | |
Jamie Madill | afa02a2 | 2017-11-23 12:57:38 -0500 | [diff] [blame] | 127 | gl::Error ContextVk::flush(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 128 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 129 | // TODO(jmadill): Flush will need to insert a semaphore for the next flush to wait on. |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 130 | UNIMPLEMENTED(); |
Luc Ferron | 3314040 | 2018-03-08 13:57:52 -0500 | [diff] [blame] | 131 | |
| 132 | // dEQP tests rely on having no errors thrown at the end of the test and they always call |
| 133 | // flush at the end of the their tests. Just returning NoError until we implement flush |
| 134 | // allow us to work on enabling many tests in the meantime. |
| 135 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 136 | } |
| 137 | |
Jamie Madill | afa02a2 | 2017-11-23 12:57:38 -0500 | [diff] [blame] | 138 | gl::Error ContextVk::finish(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 139 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 140 | return mRenderer->finish(context); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 141 | } |
| 142 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 143 | gl::Error ContextVk::initPipeline(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 144 | { |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 145 | ASSERT(!mCurrentPipeline); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 146 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 147 | const gl::State &state = mState.getState(); |
| 148 | VertexArrayVk *vertexArrayVk = vk::GetImpl(state.getVertexArray()); |
| 149 | FramebufferVk *framebufferVk = vk::GetImpl(state.getDrawFramebuffer()); |
| 150 | ProgramVk *programVk = vk::GetImpl(state.getProgram()); |
Luc Ferron | ceb7190 | 2018-02-05 15:18:47 -0500 | [diff] [blame] | 151 | const gl::AttributesMask activeAttribLocationsMask = |
| 152 | state.getProgram()->getActiveAttribLocationsMask(); |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 153 | |
| 154 | // Ensure the topology of the pipeline description is updated. |
| 155 | mPipelineDesc->updateTopology(mCurrentDrawMode); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 156 | |
Jamie Madill | 112a3a8 | 2018-01-23 13:04:06 -0500 | [diff] [blame] | 157 | // Copy over the latest attrib and binding descriptions. |
| 158 | vertexArrayVk->getPackedInputDescriptions(mPipelineDesc.get()); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 159 | |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 160 | // Ensure that the RenderPass description is updated. |
| 161 | mPipelineDesc->updateRenderPassDesc(framebufferVk->getRenderPassDesc(context)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 162 | |
| 163 | // TODO(jmadill): Validate with ASSERT against physical device limits/caps? |
Luc Ferron | ceb7190 | 2018-02-05 15:18:47 -0500 | [diff] [blame] | 164 | ANGLE_TRY(mRenderer->getPipeline(programVk, *mPipelineDesc, activeAttribLocationsMask, |
| 165 | &mCurrentPipeline)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 166 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 167 | return gl::NoError(); |
| 168 | } |
| 169 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 170 | gl::Error ContextVk::setupDraw(const gl::Context *context, |
| 171 | GLenum mode, |
| 172 | DrawType drawType, |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 173 | size_t firstVertex, |
| 174 | size_t lastVertex, |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 175 | ResourceVk *elementArrayBufferOverride, |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 176 | vk::CommandBuffer **commandBuffer) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 177 | { |
| 178 | if (mode != mCurrentDrawMode) |
| 179 | { |
| 180 | invalidateCurrentPipeline(); |
| 181 | mCurrentDrawMode = mode; |
| 182 | } |
| 183 | |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 184 | if (!mCurrentPipeline) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 185 | { |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 186 | ANGLE_TRY(initPipeline(context)); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 187 | } |
| 188 | |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 189 | const auto &state = mState.getState(); |
Jamie Madill | acf2f3a | 2017-11-21 19:22:44 -0500 | [diff] [blame] | 190 | const gl::Program *programGL = state.getProgram(); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 191 | ProgramVk *programVk = vk::GetImpl(programGL); |
Jamie Madill | acf2f3a | 2017-11-21 19:22:44 -0500 | [diff] [blame] | 192 | const gl::VertexArray *vao = state.getVertexArray(); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 193 | VertexArrayVk *vkVAO = vk::GetImpl(vao); |
| 194 | const auto *drawFBO = state.getDrawFramebuffer(); |
| 195 | FramebufferVk *vkFBO = vk::GetImpl(drawFBO); |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 196 | Serial queueSerial = mRenderer->getCurrentQueueSerial(); |
| 197 | uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation(); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 198 | |
Jamie Madill | e4c5a23 | 2018-03-02 21:00:31 -0500 | [diff] [blame] | 199 | vk::CommandGraphNode *graphNode = nullptr; |
| 200 | ANGLE_TRY(vkFBO->getCommandGraphNodeForDraw(context, &graphNode)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 201 | |
Jamie Madill | e4c5a23 | 2018-03-02 21:00:31 -0500 | [diff] [blame] | 202 | if (!graphNode->getInsideRenderPassCommands()->valid()) |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 203 | { |
| 204 | mVertexArrayDirty = true; |
| 205 | mTexturesDirty = true; |
Jamie Madill | e4c5a23 | 2018-03-02 21:00:31 -0500 | [diff] [blame] | 206 | ANGLE_TRY(graphNode->beginInsideRenderPassRecording(mRenderer, commandBuffer)); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 207 | } |
| 208 | else |
| 209 | { |
Jamie Madill | e4c5a23 | 2018-03-02 21:00:31 -0500 | [diff] [blame] | 210 | *commandBuffer = graphNode->getInsideRenderPassCommands(); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 211 | } |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 212 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 213 | // Ensure any writes to the VAO buffers are flushed before we read from them. |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 214 | if (mVertexArrayDirty || elementArrayBufferOverride != nullptr) |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 215 | { |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 216 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 217 | mVertexArrayDirty = false; |
Jamie Madill | e4c5a23 | 2018-03-02 21:00:31 -0500 | [diff] [blame] | 218 | vkVAO->updateDrawDependencies(graphNode, programGL->getActiveAttribLocationsMask(), |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 219 | elementArrayBufferOverride, queueSerial, drawType); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 220 | } |
| 221 | |
| 222 | // Ensure any writes to the textures are flushed before we read from them. |
| 223 | if (mTexturesDirty) |
| 224 | { |
| 225 | mTexturesDirty = false; |
| 226 | // TODO(jmadill): Should probably merge this for loop with programVk's descriptor update. |
| 227 | const auto &completeTextures = state.getCompleteTextureCache(); |
| 228 | for (const gl::SamplerBinding &samplerBinding : programGL->getSamplerBindings()) |
| 229 | { |
| 230 | ASSERT(!samplerBinding.unreferenced); |
| 231 | |
| 232 | // TODO(jmadill): Sampler arrays |
| 233 | ASSERT(samplerBinding.boundTextureUnits.size() == 1); |
| 234 | |
| 235 | GLuint textureUnit = samplerBinding.boundTextureUnits[0]; |
| 236 | const gl::Texture *texture = completeTextures[textureUnit]; |
| 237 | |
| 238 | // TODO(jmadill): Incomplete textures handling. |
| 239 | ASSERT(texture); |
| 240 | |
| 241 | TextureVk *textureVk = vk::GetImpl(texture); |
Jamie Madill | e4c5a23 | 2018-03-02 21:00:31 -0500 | [diff] [blame] | 242 | textureVk->onReadResource(graphNode, mRenderer->getCurrentQueueSerial()); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 243 | } |
| 244 | } |
| 245 | |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 246 | (*commandBuffer)->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline->get()); |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 247 | ContextVk *contextVk = vk::GetImpl(context); |
| 248 | ANGLE_TRY(vkVAO->streamVertexData(contextVk, &mStreamingVertexData, firstVertex, lastVertex)); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 249 | (*commandBuffer) |
Frank Henigman | 1744895 | 2017-01-05 15:48:26 -0500 | [diff] [blame] | 250 | ->bindVertexBuffers(0, maxAttrib, vkVAO->getCurrentArrayBufferHandles().data(), |
| 251 | vkVAO->getCurrentArrayBufferOffsets().data()); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 252 | |
| 253 | // Update the queue serial for the pipeline object. |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 254 | ASSERT(mCurrentPipeline && mCurrentPipeline->valid()); |
| 255 | mCurrentPipeline->updateSerial(queueSerial); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 256 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 257 | // TODO(jmadill): Can probably use more dirty bits here. |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 258 | ANGLE_TRY(programVk->updateUniforms(this)); |
| 259 | programVk->updateTexturesDescriptorSet(this); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 260 | |
| 261 | // Bind the graphics descriptor sets. |
| 262 | // TODO(jmadill): Handle multiple command buffers. |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 263 | const auto &descriptorSets = programVk->getDescriptorSets(); |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 264 | const gl::RangeUI &usedRange = programVk->getUsedDescriptorSetRange(); |
| 265 | if (!usedRange.empty()) |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 266 | { |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 267 | ASSERT(!descriptorSets.empty()); |
| 268 | const vk::PipelineLayout &pipelineLayout = mRenderer->getGraphicsPipelineLayout(); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 269 | (*commandBuffer) |
Jamie Madill | 8c3988c | 2017-12-21 14:44:56 -0500 | [diff] [blame] | 270 | ->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, usedRange.low(), |
| 271 | usedRange.length(), &descriptorSets[usedRange.low()], 0, nullptr); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 272 | } |
| 273 | |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 274 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 275 | } |
| 276 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 277 | gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count) |
| 278 | { |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 279 | vk::CommandBuffer *commandBuffer = nullptr; |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 280 | ANGLE_TRY(setupDraw(context, mode, DrawType::Arrays, first, first + count - 1, nullptr, |
| 281 | &commandBuffer)); |
Luc Ferron | 360098d | 2018-02-21 07:33:50 -0500 | [diff] [blame] | 282 | |
| 283 | if (mode == GL_LINE_LOOP) |
| 284 | { |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 285 | ANGLE_TRY(mLineLoopHandler.createIndexBuffer(this, first, count)); |
| 286 | mLineLoopHandler.bindIndexBuffer(VK_INDEX_TYPE_UINT32, &commandBuffer); |
| 287 | ANGLE_TRY(mLineLoopHandler.draw(count, commandBuffer)); |
Luc Ferron | 360098d | 2018-02-21 07:33:50 -0500 | [diff] [blame] | 288 | } |
| 289 | else |
| 290 | { |
| 291 | commandBuffer->draw(count, 1, first, 0); |
| 292 | } |
| 293 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 294 | return gl::NoError(); |
| 295 | } |
| 296 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 297 | gl::Error ContextVk::drawArraysInstanced(const gl::Context *context, |
| 298 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 299 | GLint first, |
| 300 | GLsizei count, |
| 301 | GLsizei instanceCount) |
| 302 | { |
| 303 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 304 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 305 | } |
| 306 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 307 | gl::Error ContextVk::drawElements(const gl::Context *context, |
| 308 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 309 | GLsizei count, |
| 310 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 311 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 312 | { |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 313 | gl::VertexArray *vao = mState.getState().getVertexArray(); |
| 314 | const gl::Buffer *elementArrayBuffer = vao->getElementArrayBuffer().get(); |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 315 | vk::CommandBuffer *commandBuffer = nullptr; |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 316 | |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 317 | if (mode == GL_LINE_LOOP) |
| 318 | { |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 319 | if (!elementArrayBuffer) |
| 320 | { |
| 321 | UNIMPLEMENTED(); |
| 322 | return gl::InternalError() << "Line loop indices in client memory not supported"; |
| 323 | } |
| 324 | |
| 325 | BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer); |
| 326 | |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 327 | ANGLE_TRY(mLineLoopHandler.createIndexBufferFromElementArrayBuffer( |
| 328 | this, elementArrayBufferVk, GetVkIndexType(type), count)); |
| 329 | |
| 330 | // TODO(fjhenigman): calculate the index range and pass to setupDraw() |
| 331 | ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, 0, 0, |
| 332 | mLineLoopHandler.getLineLoopBufferResource(), &commandBuffer)); |
| 333 | |
| 334 | mLineLoopHandler.bindIndexBuffer(GetVkIndexType(type), &commandBuffer); |
| 335 | commandBuffer->drawIndexed(count + 1, 1, 0, 0, 0); |
| 336 | } |
| 337 | else |
| 338 | { |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 339 | ContextVk *contextVk = vk::GetImpl(context); |
| 340 | const bool computeIndexRange = vk::GetImpl(vao)->attribsToStream(contextVk).any(); |
| 341 | gl::IndexRange range; |
| 342 | VkBuffer buffer = VK_NULL_HANDLE; |
| 343 | VkDeviceSize offset = 0; |
| 344 | |
| 345 | if (elementArrayBuffer) |
| 346 | { |
Luc Ferron | 80964f9 | 2018-03-08 10:31:24 -0500 | [diff] [blame] | 347 | if (type == GL_UNSIGNED_BYTE) |
| 348 | { |
| 349 | // TODO(fjhenigman): Index format translation. |
| 350 | UNIMPLEMENTED(); |
| 351 | return gl::InternalError() << "Unsigned byte translation is not implemented for " |
| 352 | << "indices in a buffer object"; |
| 353 | } |
| 354 | |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 355 | BufferVk *elementArrayBufferVk = vk::GetImpl(elementArrayBuffer); |
| 356 | buffer = elementArrayBufferVk->getVkBuffer().getHandle(); |
| 357 | offset = 0; |
| 358 | |
| 359 | if (computeIndexRange) |
| 360 | { |
| 361 | ANGLE_TRY(elementArrayBufferVk->getIndexRange( |
| 362 | context, type, 0, count, false /*primitiveRestartEnabled*/, &range)); |
| 363 | } |
| 364 | } |
| 365 | else |
| 366 | { |
| 367 | const GLsizei amount = sizeof(GLushort) * count; |
Luc Ferron | 80964f9 | 2018-03-08 10:31:24 -0500 | [diff] [blame] | 368 | GLubyte *dst = nullptr; |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 369 | |
| 370 | ANGLE_TRY(mStreamingIndexData.allocate(contextVk, amount, &dst, &buffer, &offset)); |
Luc Ferron | 80964f9 | 2018-03-08 10:31:24 -0500 | [diff] [blame] | 371 | if (type == GL_UNSIGNED_BYTE) |
| 372 | { |
| 373 | // Unsigned bytes don't have direct support in Vulkan so we have to expand the |
| 374 | // memory to a GLushort. |
| 375 | const GLubyte *in = static_cast<const GLubyte *>(indices); |
| 376 | GLushort *expandedDst = reinterpret_cast<GLushort *>(dst); |
| 377 | for (GLsizei index = 0; index < count; index++) |
| 378 | { |
| 379 | expandedDst[index] = static_cast<GLushort>(in[index]); |
| 380 | } |
| 381 | } |
| 382 | else |
| 383 | { |
| 384 | memcpy(dst, indices, amount); |
| 385 | } |
Frank Henigman | a53d0e1 | 2018-02-13 00:06:06 -0500 | [diff] [blame] | 386 | ANGLE_TRY(mStreamingIndexData.flush(contextVk)); |
| 387 | |
| 388 | if (computeIndexRange) |
| 389 | { |
| 390 | range = |
| 391 | gl::ComputeIndexRange(type, indices, count, false /*primitiveRestartEnabled*/); |
| 392 | } |
| 393 | } |
| 394 | |
| 395 | ANGLE_TRY(setupDraw(context, mode, DrawType::Elements, range.start, range.end, nullptr, |
| 396 | &commandBuffer)); |
| 397 | commandBuffer->bindIndexBuffer(buffer, offset, GetVkIndexType(type)); |
Luc Ferron | 78e39b3 | 2018-02-26 07:42:44 -0500 | [diff] [blame] | 398 | commandBuffer->drawIndexed(count, 1, 0, 0, 0); |
| 399 | } |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 400 | |
| 401 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 402 | } |
| 403 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 404 | gl::Error ContextVk::drawElementsInstanced(const gl::Context *context, |
| 405 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 406 | GLsizei count, |
| 407 | GLenum type, |
Jamie Madill | 876429b | 2017-04-20 15:46:24 -0400 | [diff] [blame] | 408 | const void *indices, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 409 | GLsizei instances) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 410 | { |
| 411 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 412 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 413 | } |
| 414 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 415 | gl::Error ContextVk::drawRangeElements(const gl::Context *context, |
| 416 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 417 | GLuint start, |
| 418 | GLuint end, |
| 419 | GLsizei count, |
| 420 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 421 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 422 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 423 | return gl::NoError(); |
| 424 | } |
| 425 | |
| 426 | VkDevice ContextVk::getDevice() const |
| 427 | { |
| 428 | return mRenderer->getDevice(); |
| 429 | } |
| 430 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 431 | gl::Error ContextVk::drawArraysIndirect(const gl::Context *context, |
| 432 | GLenum mode, |
| 433 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 434 | { |
| 435 | UNIMPLEMENTED(); |
| 436 | return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend."; |
| 437 | } |
| 438 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 439 | gl::Error ContextVk::drawElementsIndirect(const gl::Context *context, |
| 440 | GLenum mode, |
| 441 | GLenum type, |
| 442 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 443 | { |
| 444 | UNIMPLEMENTED(); |
| 445 | return gl::InternalError() |
| 446 | << "DrawElementsIndirect hasn't been implemented for vulkan backend."; |
| 447 | } |
| 448 | |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 449 | GLenum ContextVk::getResetStatus() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 450 | { |
| 451 | UNIMPLEMENTED(); |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 452 | return GL_NO_ERROR; |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 453 | } |
| 454 | |
| 455 | std::string ContextVk::getVendorString() const |
| 456 | { |
| 457 | UNIMPLEMENTED(); |
| 458 | return std::string(); |
| 459 | } |
| 460 | |
| 461 | std::string ContextVk::getRendererDescription() const |
| 462 | { |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 463 | return mRenderer->getRendererDescription(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 464 | } |
| 465 | |
| 466 | void ContextVk::insertEventMarker(GLsizei length, const char *marker) |
| 467 | { |
| 468 | UNIMPLEMENTED(); |
| 469 | } |
| 470 | |
| 471 | void ContextVk::pushGroupMarker(GLsizei length, const char *marker) |
| 472 | { |
| 473 | UNIMPLEMENTED(); |
| 474 | } |
| 475 | |
| 476 | void ContextVk::popGroupMarker() |
| 477 | { |
| 478 | UNIMPLEMENTED(); |
| 479 | } |
| 480 | |
Geoff Lang | 5d5253a | 2017-11-22 14:51:12 -0500 | [diff] [blame] | 481 | void ContextVk::pushDebugGroup(GLenum source, GLuint id, GLsizei length, const char *message) |
| 482 | { |
| 483 | UNIMPLEMENTED(); |
| 484 | } |
| 485 | |
| 486 | void ContextVk::popDebugGroup() |
| 487 | { |
| 488 | UNIMPLEMENTED(); |
| 489 | } |
| 490 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 491 | void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 492 | { |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 493 | if (dirtyBits.any()) |
| 494 | { |
| 495 | invalidateCurrentPipeline(); |
| 496 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 497 | |
| 498 | const auto &glState = context->getGLState(); |
| 499 | |
| 500 | // TODO(jmadill): Full dirty bits implementation. |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 501 | bool dirtyTextures = false; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 502 | |
| 503 | for (auto dirtyBit : dirtyBits) |
| 504 | { |
| 505 | switch (dirtyBit) |
| 506 | { |
| 507 | case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED: |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 508 | if (glState.isScissorTestEnabled()) |
| 509 | { |
| 510 | mPipelineDesc->updateScissor(glState.getScissor()); |
| 511 | } |
| 512 | else |
| 513 | { |
| 514 | mPipelineDesc->updateScissor(glState.getViewport()); |
| 515 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 516 | break; |
| 517 | case gl::State::DIRTY_BIT_SCISSOR: |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 518 | // Only modify the scissor region if the test is enabled, otherwise we want to keep |
| 519 | // the viewport size as the scissor region. |
| 520 | if (glState.isScissorTestEnabled()) |
| 521 | { |
| 522 | mPipelineDesc->updateScissor(glState.getScissor()); |
| 523 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 524 | break; |
| 525 | case gl::State::DIRTY_BIT_VIEWPORT: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 526 | mPipelineDesc->updateViewport(glState.getViewport(), glState.getNearPlane(), |
| 527 | glState.getFarPlane()); |
Luc Ferron | 00155d5 | 2018-02-06 10:48:47 -0500 | [diff] [blame] | 528 | |
| 529 | // If the scissor test isn't enabled, we have to also update the scissor to |
| 530 | // be equal to the viewport to make sure we keep rendering everything in the |
| 531 | // viewport. |
| 532 | if (!glState.isScissorTestEnabled()) |
| 533 | { |
| 534 | mPipelineDesc->updateScissor(glState.getViewport()); |
| 535 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 536 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 537 | case gl::State::DIRTY_BIT_DEPTH_RANGE: |
| 538 | WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented"; |
| 539 | break; |
| 540 | case gl::State::DIRTY_BIT_BLEND_ENABLED: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 541 | mPipelineDesc->updateBlendEnabled(glState.isBlendEnabled()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 542 | break; |
| 543 | case gl::State::DIRTY_BIT_BLEND_COLOR: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 544 | mPipelineDesc->updateBlendColor(glState.getBlendColor()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 545 | break; |
| 546 | case gl::State::DIRTY_BIT_BLEND_FUNCS: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 547 | mPipelineDesc->updateBlendFuncs(glState.getBlendState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 548 | break; |
| 549 | case gl::State::DIRTY_BIT_BLEND_EQUATIONS: |
Luc Ferron | f8be756 | 2018-02-06 15:59:11 -0500 | [diff] [blame] | 550 | mPipelineDesc->updateBlendEquations(glState.getBlendState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 551 | break; |
| 552 | case gl::State::DIRTY_BIT_COLOR_MASK: |
| 553 | WARN() << "DIRTY_BIT_COLOR_MASK unimplemented"; |
| 554 | break; |
| 555 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED: |
| 556 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented"; |
| 557 | break; |
| 558 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED: |
| 559 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented"; |
| 560 | break; |
| 561 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE: |
| 562 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented"; |
| 563 | break; |
| 564 | case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED: |
| 565 | WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented"; |
| 566 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 567 | case gl::State::DIRTY_BIT_SAMPLE_MASK: |
| 568 | WARN() << "DIRTY_BIT_SAMPLE_MASK unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 569 | break; |
| 570 | case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED: |
Jamie Madill | 0cec82a | 2018-03-14 09:21:07 -0400 | [diff] [blame] | 571 | mPipelineDesc->updateDepthTestEnabled(glState.getDepthStencilState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 572 | break; |
| 573 | case gl::State::DIRTY_BIT_DEPTH_FUNC: |
Jamie Madill | 0cec82a | 2018-03-14 09:21:07 -0400 | [diff] [blame] | 574 | mPipelineDesc->updateDepthFunc(glState.getDepthStencilState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 575 | break; |
| 576 | case gl::State::DIRTY_BIT_DEPTH_MASK: |
| 577 | WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented"; |
| 578 | break; |
| 579 | case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED: |
| 580 | WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented"; |
| 581 | break; |
| 582 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT: |
| 583 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented"; |
| 584 | break; |
| 585 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK: |
| 586 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented"; |
| 587 | break; |
| 588 | case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT: |
| 589 | WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented"; |
| 590 | break; |
| 591 | case gl::State::DIRTY_BIT_STENCIL_OPS_BACK: |
| 592 | WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented"; |
| 593 | break; |
| 594 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT: |
| 595 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented"; |
| 596 | break; |
| 597 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK: |
| 598 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented"; |
| 599 | break; |
| 600 | case gl::State::DIRTY_BIT_CULL_FACE_ENABLED: |
| 601 | case gl::State::DIRTY_BIT_CULL_FACE: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 602 | mPipelineDesc->updateCullMode(glState.getRasterizerState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 603 | break; |
| 604 | case gl::State::DIRTY_BIT_FRONT_FACE: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 605 | mPipelineDesc->updateFrontFace(glState.getRasterizerState()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 606 | break; |
| 607 | case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED: |
| 608 | WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented"; |
| 609 | break; |
| 610 | case gl::State::DIRTY_BIT_POLYGON_OFFSET: |
| 611 | WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented"; |
| 612 | break; |
| 613 | case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED: |
| 614 | WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented"; |
| 615 | break; |
| 616 | case gl::State::DIRTY_BIT_LINE_WIDTH: |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 617 | mPipelineDesc->updateLineWidth(glState.getLineWidth()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 618 | break; |
| 619 | case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED: |
| 620 | WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented"; |
| 621 | break; |
| 622 | case gl::State::DIRTY_BIT_CLEAR_COLOR: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 623 | mClearColorValue.color.float32[0] = glState.getColorClearValue().red; |
| 624 | mClearColorValue.color.float32[1] = glState.getColorClearValue().green; |
| 625 | mClearColorValue.color.float32[2] = glState.getColorClearValue().blue; |
| 626 | mClearColorValue.color.float32[3] = glState.getColorClearValue().alpha; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 627 | break; |
| 628 | case gl::State::DIRTY_BIT_CLEAR_DEPTH: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 629 | mClearDepthStencilValue.depthStencil.depth = glState.getDepthClearValue(); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 630 | break; |
| 631 | case gl::State::DIRTY_BIT_CLEAR_STENCIL: |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 632 | mClearDepthStencilValue.depthStencil.stencil = |
| 633 | static_cast<uint32_t>(glState.getStencilClearValue()); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 634 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 635 | case gl::State::DIRTY_BIT_UNPACK_STATE: |
| 636 | WARN() << "DIRTY_BIT_UNPACK_STATE unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 637 | break; |
Corentin Wallez | 29a2099 | 2017-11-06 18:23:16 -0500 | [diff] [blame] | 638 | case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING: |
| 639 | WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented"; |
| 640 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 641 | case gl::State::DIRTY_BIT_PACK_STATE: |
| 642 | WARN() << "DIRTY_BIT_PACK_STATE unimplemented"; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 643 | break; |
Corentin Wallez | 29a2099 | 2017-11-06 18:23:16 -0500 | [diff] [blame] | 644 | case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING: |
| 645 | WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented"; |
| 646 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 647 | case gl::State::DIRTY_BIT_DITHER_ENABLED: |
| 648 | WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented"; |
| 649 | break; |
| 650 | case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT: |
| 651 | WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented"; |
| 652 | break; |
| 653 | case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT: |
| 654 | WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented"; |
| 655 | break; |
| 656 | case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING: |
| 657 | WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented"; |
| 658 | break; |
| 659 | case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING: |
| 660 | WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented"; |
| 661 | break; |
| 662 | case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING: |
| 663 | WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented"; |
| 664 | break; |
| 665 | case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING: |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 666 | mVertexArrayDirty = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 667 | break; |
| 668 | case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING: |
| 669 | WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented"; |
| 670 | break; |
Qin Jiajia | a98a281 | 2017-11-30 18:12:06 +0800 | [diff] [blame] | 671 | case gl::State::DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING: |
| 672 | WARN() << "DIRTY_BIT_DISPATCH_INDIRECT_BUFFER_BINDING unimplemented"; |
| 673 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 674 | case gl::State::DIRTY_BIT_PROGRAM_BINDING: |
| 675 | WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented"; |
| 676 | break; |
| 677 | case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE: |
| 678 | { |
Jamie Madill | f2f6d37 | 2018-01-10 21:37:23 -0500 | [diff] [blame] | 679 | ProgramVk *programVk = vk::GetImpl(glState.getProgram()); |
| 680 | mPipelineDesc->updateShaders(programVk); |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 681 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 682 | break; |
| 683 | } |
| 684 | case gl::State::DIRTY_BIT_TEXTURE_BINDINGS: |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 685 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 686 | break; |
| 687 | case gl::State::DIRTY_BIT_SAMPLER_BINDINGS: |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 688 | dirtyTextures = true; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 689 | break; |
Geoff Lang | ded7923 | 2017-11-28 15:21:11 -0500 | [diff] [blame] | 690 | case gl::State::DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING: |
| 691 | WARN() << "DIRTY_BIT_TRANSFORM_FEEDBACK_BINDING unimplemented"; |
| 692 | break; |
Xinghua Cao | 10a4d43 | 2017-11-28 14:46:26 +0800 | [diff] [blame] | 693 | case gl::State::DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING: |
| 694 | WARN() << "DIRTY_BIT_SHADER_STORAGE_BUFFER_BINDING unimplemented"; |
| 695 | break; |
Jamie Madill | f414121 | 2017-12-12 15:08:07 -0500 | [diff] [blame] | 696 | case gl::State::DIRTY_BIT_UNIFORM_BUFFER_BINDINGS: |
| 697 | WARN() << "DIRTY_BIT_UNIFORM_BUFFER_BINDINGS unimplemented"; |
| 698 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 699 | case gl::State::DIRTY_BIT_MULTISAMPLING: |
| 700 | WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented"; |
| 701 | break; |
| 702 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE: |
| 703 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented"; |
| 704 | break; |
| 705 | case gl::State::DIRTY_BIT_COVERAGE_MODULATION: |
| 706 | WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented"; |
| 707 | break; |
| 708 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV: |
| 709 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented"; |
| 710 | break; |
| 711 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ: |
| 712 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented"; |
| 713 | break; |
| 714 | case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE: |
| 715 | WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented"; |
| 716 | break; |
| 717 | case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB: |
| 718 | WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented"; |
| 719 | break; |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 720 | case gl::State::DIRTY_BIT_CURRENT_VALUES: |
| 721 | WARN() << "DIRTY_BIT_CURRENT_VALUES unimplemented"; |
| 722 | break; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 723 | default: |
Jamie Madill | c67323a | 2017-11-02 23:11:41 -0400 | [diff] [blame] | 724 | UNREACHABLE(); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 725 | break; |
| 726 | } |
| 727 | } |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 728 | |
| 729 | if (dirtyTextures) |
| 730 | { |
Jamie Madill | e1f3ad4 | 2017-10-28 23:00:42 -0400 | [diff] [blame] | 731 | ProgramVk *programVk = vk::GetImpl(glState.getProgram()); |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 732 | programVk->invalidateTextures(); |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 733 | mTexturesDirty = true; |
Jamie Madill | 5547b38 | 2017-10-23 18:16:01 -0400 | [diff] [blame] | 734 | } |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 735 | } |
| 736 | |
| 737 | GLint ContextVk::getGPUDisjoint() |
| 738 | { |
| 739 | UNIMPLEMENTED(); |
| 740 | return GLint(); |
| 741 | } |
| 742 | |
| 743 | GLint64 ContextVk::getTimestamp() |
| 744 | { |
| 745 | UNIMPLEMENTED(); |
| 746 | return GLint64(); |
| 747 | } |
| 748 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 749 | void ContextVk::onMakeCurrent(const gl::Context * /*context*/) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 750 | { |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 751 | } |
| 752 | |
| 753 | const gl::Caps &ContextVk::getNativeCaps() const |
| 754 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 755 | return mRenderer->getNativeCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 756 | } |
| 757 | |
| 758 | const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const |
| 759 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 760 | return mRenderer->getNativeTextureCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 761 | } |
| 762 | |
| 763 | const gl::Extensions &ContextVk::getNativeExtensions() const |
| 764 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 765 | return mRenderer->getNativeExtensions(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 766 | } |
| 767 | |
| 768 | const gl::Limitations &ContextVk::getNativeLimitations() const |
| 769 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 770 | return mRenderer->getNativeLimitations(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 771 | } |
| 772 | |
| 773 | CompilerImpl *ContextVk::createCompiler() |
| 774 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 775 | return new CompilerVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 776 | } |
| 777 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 778 | ShaderImpl *ContextVk::createShader(const gl::ShaderState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 779 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 780 | return new ShaderVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 781 | } |
| 782 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 783 | ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 784 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 785 | return new ProgramVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 786 | } |
| 787 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 788 | FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 789 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 790 | return FramebufferVk::CreateUserFBO(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 791 | } |
| 792 | |
| 793 | TextureImpl *ContextVk::createTexture(const gl::TextureState &state) |
| 794 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 795 | return new TextureVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 796 | } |
| 797 | |
Jamie Madill | e703c60 | 2018-02-20 10:21:48 -0500 | [diff] [blame] | 798 | RenderbufferImpl *ContextVk::createRenderbuffer(const gl::RenderbufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 799 | { |
Jamie Madill | e703c60 | 2018-02-20 10:21:48 -0500 | [diff] [blame] | 800 | return new RenderbufferVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 801 | } |
| 802 | |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 803 | BufferImpl *ContextVk::createBuffer(const gl::BufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 804 | { |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 805 | return new BufferVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 806 | } |
| 807 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 808 | VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 809 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 810 | return new VertexArrayVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 811 | } |
| 812 | |
| 813 | QueryImpl *ContextVk::createQuery(GLenum type) |
| 814 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 815 | return new QueryVk(type); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 816 | } |
| 817 | |
| 818 | FenceNVImpl *ContextVk::createFenceNV() |
| 819 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 820 | return new FenceNVVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 821 | } |
| 822 | |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 823 | SyncImpl *ContextVk::createSync() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 824 | { |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 825 | return new SyncVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 826 | } |
| 827 | |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 828 | TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 829 | { |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 830 | return new TransformFeedbackVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 831 | } |
| 832 | |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 833 | SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 834 | { |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 835 | return new SamplerVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 836 | } |
| 837 | |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 838 | ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state) |
| 839 | { |
| 840 | return new ProgramPipelineVk(state); |
| 841 | } |
| 842 | |
Sami Väisänen | e45e53b | 2016-05-25 10:36:04 +0300 | [diff] [blame] | 843 | std::vector<PathImpl *> ContextVk::createPaths(GLsizei) |
| 844 | { |
| 845 | return std::vector<PathImpl *>(); |
| 846 | } |
| 847 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 848 | void ContextVk::invalidateCurrentPipeline() |
| 849 | { |
Jamie Madill | ffa4cbb | 2018-01-23 13:04:07 -0500 | [diff] [blame] | 850 | mCurrentPipeline = nullptr; |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 851 | } |
| 852 | |
Jamie Madill | 49ac74b | 2017-12-21 14:42:33 -0500 | [diff] [blame] | 853 | void ContextVk::onVertexArrayChange() |
| 854 | { |
| 855 | // TODO(jmadill): Does not handle dependent state changes. |
| 856 | mVertexArrayDirty = true; |
| 857 | invalidateCurrentPipeline(); |
| 858 | } |
| 859 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 860 | gl::Error ContextVk::dispatchCompute(const gl::Context *context, |
| 861 | GLuint numGroupsX, |
| 862 | GLuint numGroupsY, |
| 863 | GLuint numGroupsZ) |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 864 | { |
| 865 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 866 | return gl::InternalError(); |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 867 | } |
| 868 | |
Qin Jiajia | 62fcf62 | 2017-11-30 16:16:12 +0800 | [diff] [blame] | 869 | gl::Error ContextVk::dispatchComputeIndirect(const gl::Context *context, GLintptr indirect) |
| 870 | { |
| 871 | UNIMPLEMENTED(); |
| 872 | return gl::InternalError(); |
| 873 | } |
| 874 | |
Xinghua Cao | 89c422a | 2017-11-29 18:24:20 +0800 | [diff] [blame] | 875 | gl::Error ContextVk::memoryBarrier(const gl::Context *context, GLbitfield barriers) |
| 876 | { |
| 877 | UNIMPLEMENTED(); |
| 878 | return gl::InternalError(); |
| 879 | } |
| 880 | |
| 881 | gl::Error ContextVk::memoryBarrierByRegion(const gl::Context *context, GLbitfield barriers) |
| 882 | { |
| 883 | UNIMPLEMENTED(); |
| 884 | return gl::InternalError(); |
| 885 | } |
| 886 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 887 | vk::DescriptorPool *ContextVk::getDescriptorPool() |
| 888 | { |
| 889 | return &mDescriptorPool; |
| 890 | } |
| 891 | |
Jamie Madill | f4d693c | 2018-02-14 16:38:16 -0500 | [diff] [blame] | 892 | const VkClearValue &ContextVk::getClearColorValue() const |
| 893 | { |
| 894 | return mClearColorValue; |
| 895 | } |
| 896 | |
| 897 | const VkClearValue &ContextVk::getClearDepthStencilValue() const |
| 898 | { |
| 899 | return mClearDepthStencilValue; |
| 900 | } |
| 901 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 902 | } // namespace rx |