Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 1 | // |
| 2 | // Copyright 2016 The ANGLE Project Authors. All rights reserved. |
| 3 | // Use of this source code is governed by a BSD-style license that can be |
| 4 | // found in the LICENSE file. |
| 5 | // |
| 6 | // ContextVk.cpp: |
| 7 | // Implements the class methods for ContextVk. |
| 8 | // |
| 9 | |
| 10 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 11 | |
Jamie Madill | 20e005b | 2017-04-07 14:19:22 -0400 | [diff] [blame] | 12 | #include "common/bitset_utils.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 13 | #include "common/debug.h" |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 14 | #include "libANGLE/Context.h" |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 15 | #include "libANGLE/Program.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 16 | #include "libANGLE/renderer/vulkan/BufferVk.h" |
| 17 | #include "libANGLE/renderer/vulkan/CompilerVk.h" |
| 18 | #include "libANGLE/renderer/vulkan/ContextVk.h" |
| 19 | #include "libANGLE/renderer/vulkan/DeviceVk.h" |
| 20 | #include "libANGLE/renderer/vulkan/FenceNVVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 21 | #include "libANGLE/renderer/vulkan/FramebufferVk.h" |
| 22 | #include "libANGLE/renderer/vulkan/ImageVk.h" |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 23 | #include "libANGLE/renderer/vulkan/ProgramPipelineVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 24 | #include "libANGLE/renderer/vulkan/ProgramVk.h" |
| 25 | #include "libANGLE/renderer/vulkan/QueryVk.h" |
| 26 | #include "libANGLE/renderer/vulkan/RenderbufferVk.h" |
| 27 | #include "libANGLE/renderer/vulkan/RendererVk.h" |
| 28 | #include "libANGLE/renderer/vulkan/SamplerVk.h" |
| 29 | #include "libANGLE/renderer/vulkan/ShaderVk.h" |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 30 | #include "libANGLE/renderer/vulkan/SyncVk.h" |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 31 | #include "libANGLE/renderer/vulkan/TextureVk.h" |
| 32 | #include "libANGLE/renderer/vulkan/TransformFeedbackVk.h" |
| 33 | #include "libANGLE/renderer/vulkan/VertexArrayVk.h" |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 34 | #include "libANGLE/renderer/vulkan/formatutilsvk.h" |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 35 | |
| 36 | namespace rx |
| 37 | { |
| 38 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 39 | namespace |
| 40 | { |
| 41 | |
| 42 | VkIndexType GetVkIndexType(GLenum glIndexType) |
| 43 | { |
| 44 | switch (glIndexType) |
| 45 | { |
| 46 | case GL_UNSIGNED_SHORT: |
| 47 | return VK_INDEX_TYPE_UINT16; |
| 48 | case GL_UNSIGNED_INT: |
| 49 | return VK_INDEX_TYPE_UINT32; |
| 50 | default: |
| 51 | UNREACHABLE(); |
| 52 | return VK_INDEX_TYPE_MAX_ENUM; |
| 53 | } |
| 54 | } |
| 55 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 56 | enum DescriptorPoolIndex : uint8_t |
| 57 | { |
| 58 | UniformBufferPool = 0, |
| 59 | TexturePool = 1, |
| 60 | }; |
| 61 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 62 | } // anonymous namespace |
| 63 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 64 | ContextVk::ContextVk(const gl::ContextState &state, RendererVk *renderer) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 65 | : ContextImpl(state), mRenderer(renderer), mCurrentDrawMode(GL_NONE) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 66 | { |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 67 | // The module handle is filled out at draw time. |
| 68 | mCurrentShaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; |
| 69 | mCurrentShaderStages[0].pNext = nullptr; |
| 70 | mCurrentShaderStages[0].flags = 0; |
| 71 | mCurrentShaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT; |
| 72 | mCurrentShaderStages[0].module = VK_NULL_HANDLE; |
| 73 | mCurrentShaderStages[0].pName = "main"; |
| 74 | mCurrentShaderStages[0].pSpecializationInfo = nullptr; |
| 75 | |
| 76 | mCurrentShaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; |
| 77 | mCurrentShaderStages[1].pNext = nullptr; |
| 78 | mCurrentShaderStages[1].flags = 0; |
| 79 | mCurrentShaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT; |
| 80 | mCurrentShaderStages[1].module = VK_NULL_HANDLE; |
| 81 | mCurrentShaderStages[1].pName = "main"; |
| 82 | mCurrentShaderStages[1].pSpecializationInfo = nullptr; |
| 83 | |
| 84 | // The binding descriptions are filled in at draw time. |
| 85 | mCurrentVertexInputState.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; |
| 86 | mCurrentVertexInputState.pNext = nullptr; |
| 87 | mCurrentVertexInputState.flags = 0; |
| 88 | mCurrentVertexInputState.vertexBindingDescriptionCount = 0; |
| 89 | mCurrentVertexInputState.pVertexBindingDescriptions = nullptr; |
| 90 | mCurrentVertexInputState.vertexAttributeDescriptionCount = 0; |
| 91 | mCurrentVertexInputState.pVertexAttributeDescriptions = nullptr; |
| 92 | |
| 93 | // Primitive topology is filled in at draw time. |
| 94 | mCurrentInputAssemblyState.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; |
| 95 | mCurrentInputAssemblyState.pNext = nullptr; |
| 96 | mCurrentInputAssemblyState.flags = 0; |
| 97 | mCurrentInputAssemblyState.topology = gl_vk::GetPrimitiveTopology(mCurrentDrawMode); |
| 98 | mCurrentInputAssemblyState.primitiveRestartEnable = VK_FALSE; |
| 99 | |
| 100 | // Set initial viewport and scissor state. |
| 101 | mCurrentViewportVk.x = 0.0f; |
| 102 | mCurrentViewportVk.y = 0.0f; |
| 103 | mCurrentViewportVk.width = 0.0f; |
| 104 | mCurrentViewportVk.height = 0.0f; |
| 105 | mCurrentViewportVk.minDepth = 0.0f; |
| 106 | mCurrentViewportVk.maxDepth = 1.0f; |
| 107 | |
| 108 | mCurrentScissorVk.offset.x = 0; |
| 109 | mCurrentScissorVk.offset.y = 0; |
| 110 | mCurrentScissorVk.extent.width = 0u; |
| 111 | mCurrentScissorVk.extent.height = 0u; |
| 112 | |
| 113 | mCurrentViewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; |
| 114 | mCurrentViewportState.pNext = nullptr; |
| 115 | mCurrentViewportState.flags = 0; |
| 116 | mCurrentViewportState.viewportCount = 1; |
| 117 | mCurrentViewportState.pViewports = &mCurrentViewportVk; |
| 118 | mCurrentViewportState.scissorCount = 1; |
| 119 | mCurrentViewportState.pScissors = &mCurrentScissorVk; |
| 120 | |
| 121 | // Set initial rasterizer state. |
| 122 | // TODO(jmadill): Extra rasterizer state features. |
| 123 | mCurrentRasterState.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; |
| 124 | mCurrentRasterState.pNext = nullptr; |
| 125 | mCurrentRasterState.flags = 0; |
| 126 | mCurrentRasterState.depthClampEnable = VK_FALSE; |
| 127 | mCurrentRasterState.rasterizerDiscardEnable = VK_FALSE; |
| 128 | mCurrentRasterState.polygonMode = VK_POLYGON_MODE_FILL; |
| 129 | mCurrentRasterState.cullMode = VK_CULL_MODE_NONE; |
| 130 | mCurrentRasterState.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE; |
| 131 | mCurrentRasterState.depthBiasEnable = VK_FALSE; |
| 132 | mCurrentRasterState.depthBiasConstantFactor = 0.0f; |
| 133 | mCurrentRasterState.depthBiasClamp = 0.0f; |
| 134 | mCurrentRasterState.depthBiasSlopeFactor = 0.0f; |
| 135 | mCurrentRasterState.lineWidth = 1.0f; |
| 136 | |
| 137 | // Initialize a dummy multisample state. |
| 138 | // TODO(jmadill): Multisample state. |
| 139 | mCurrentMultisampleState.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; |
| 140 | mCurrentMultisampleState.pNext = nullptr; |
| 141 | mCurrentMultisampleState.flags = 0; |
| 142 | mCurrentMultisampleState.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; |
| 143 | mCurrentMultisampleState.sampleShadingEnable = VK_FALSE; |
| 144 | mCurrentMultisampleState.minSampleShading = 0.0f; |
| 145 | mCurrentMultisampleState.pSampleMask = nullptr; |
| 146 | mCurrentMultisampleState.alphaToCoverageEnable = VK_FALSE; |
| 147 | mCurrentMultisampleState.alphaToOneEnable = VK_FALSE; |
| 148 | |
| 149 | // TODO(jmadill): Depth/stencil state. |
| 150 | |
| 151 | // Initialize a dummy MRT blend state. |
| 152 | // TODO(jmadill): Blend state/MRT. |
| 153 | mCurrentBlendAttachmentState.blendEnable = VK_FALSE; |
| 154 | mCurrentBlendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_ONE; |
| 155 | mCurrentBlendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ONE; |
| 156 | mCurrentBlendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD; |
| 157 | mCurrentBlendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; |
| 158 | mCurrentBlendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE; |
| 159 | mCurrentBlendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD; |
| 160 | mCurrentBlendAttachmentState.colorWriteMask = |
| 161 | (VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | |
| 162 | VK_COLOR_COMPONENT_A_BIT); |
| 163 | |
| 164 | mCurrentBlendState.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; |
| 165 | mCurrentBlendState.pNext = 0; |
| 166 | mCurrentBlendState.flags = 0; |
| 167 | mCurrentBlendState.logicOpEnable = VK_FALSE; |
| 168 | mCurrentBlendState.logicOp = VK_LOGIC_OP_CLEAR; |
| 169 | mCurrentBlendState.attachmentCount = 1; |
| 170 | mCurrentBlendState.pAttachments = &mCurrentBlendAttachmentState; |
| 171 | mCurrentBlendState.blendConstants[0] = 0.0f; |
| 172 | mCurrentBlendState.blendConstants[1] = 0.0f; |
| 173 | mCurrentBlendState.blendConstants[2] = 0.0f; |
| 174 | mCurrentBlendState.blendConstants[3] = 0.0f; |
| 175 | |
| 176 | // TODO(jmadill): Dynamic state. |
| 177 | |
| 178 | // The layout and renderpass are filled out at draw time. |
| 179 | mCurrentPipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; |
| 180 | mCurrentPipelineInfo.pNext = nullptr; |
| 181 | mCurrentPipelineInfo.flags = 0; |
| 182 | mCurrentPipelineInfo.stageCount = 2; |
| 183 | mCurrentPipelineInfo.pStages = mCurrentShaderStages; |
| 184 | mCurrentPipelineInfo.pVertexInputState = &mCurrentVertexInputState; |
| 185 | mCurrentPipelineInfo.pInputAssemblyState = &mCurrentInputAssemblyState; |
| 186 | mCurrentPipelineInfo.pTessellationState = nullptr; |
| 187 | mCurrentPipelineInfo.pViewportState = &mCurrentViewportState; |
| 188 | mCurrentPipelineInfo.pRasterizationState = &mCurrentRasterState; |
| 189 | mCurrentPipelineInfo.pMultisampleState = &mCurrentMultisampleState; |
| 190 | mCurrentPipelineInfo.pDepthStencilState = nullptr; |
| 191 | mCurrentPipelineInfo.pColorBlendState = &mCurrentBlendState; |
| 192 | mCurrentPipelineInfo.pDynamicState = nullptr; |
| 193 | mCurrentPipelineInfo.layout = VK_NULL_HANDLE; |
| 194 | mCurrentPipelineInfo.renderPass = VK_NULL_HANDLE; |
| 195 | mCurrentPipelineInfo.subpass = 0; |
| 196 | mCurrentPipelineInfo.basePipelineHandle = VK_NULL_HANDLE; |
| 197 | mCurrentPipelineInfo.basePipelineIndex = 0; |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 198 | } |
| 199 | |
| 200 | ContextVk::~ContextVk() |
| 201 | { |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 202 | invalidateCurrentPipeline(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 203 | } |
| 204 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 205 | void ContextVk::onDestroy(const gl::Context *context) |
| 206 | { |
| 207 | VkDevice device = mRenderer->getDevice(); |
| 208 | |
| 209 | mDescriptorPool.destroy(device); |
| 210 | } |
| 211 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 212 | gl::Error ContextVk::initialize() |
| 213 | { |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 214 | VkDevice device = mRenderer->getDevice(); |
| 215 | |
| 216 | VkDescriptorPoolSize poolSizes[2]; |
| 217 | poolSizes[UniformBufferPool].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; |
| 218 | poolSizes[UniformBufferPool].descriptorCount = 1024; |
| 219 | poolSizes[TexturePool].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; |
| 220 | poolSizes[TexturePool].descriptorCount = 1024; |
| 221 | |
| 222 | VkDescriptorPoolCreateInfo descriptorPoolInfo; |
| 223 | descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; |
| 224 | descriptorPoolInfo.pNext = nullptr; |
| 225 | descriptorPoolInfo.flags = 0; |
| 226 | |
| 227 | // TODO(jmadill): Pick non-arbitrary max. |
| 228 | descriptorPoolInfo.maxSets = 2048; |
| 229 | |
| 230 | // Reserve pools for uniform blocks and textures. |
| 231 | descriptorPoolInfo.poolSizeCount = 2; |
| 232 | descriptorPoolInfo.pPoolSizes = poolSizes; |
| 233 | |
| 234 | ANGLE_TRY(mDescriptorPool.init(device, descriptorPoolInfo)); |
| 235 | |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 236 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 237 | } |
| 238 | |
| 239 | gl::Error ContextVk::flush() |
| 240 | { |
| 241 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 242 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 243 | } |
| 244 | |
| 245 | gl::Error ContextVk::finish() |
| 246 | { |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 247 | // TODO(jmadill): Implement finish. |
| 248 | // UNIMPLEMENTED(); |
| 249 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 250 | } |
| 251 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 252 | gl::Error ContextVk::initPipeline(const gl::Context *context) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 253 | { |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 254 | ASSERT(!mCurrentPipeline.valid()); |
| 255 | |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 256 | VkDevice device = mRenderer->getDevice(); |
| 257 | const auto &state = mState.getState(); |
| 258 | const auto &programGL = state.getProgram(); |
| 259 | const auto &vao = state.getVertexArray(); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 260 | const auto &programVk = GetImplAs<ProgramVk>(programGL); |
| 261 | const auto *drawFBO = state.getDrawFramebuffer(); |
| 262 | FramebufferVk *vkFBO = GetImplAs<FramebufferVk>(drawFBO); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 263 | VertexArrayVk *vkVAO = GetImplAs<VertexArrayVk>(vao); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 264 | |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 265 | // Ensure the attribs and bindings are updated. |
| 266 | vkVAO->updateVertexDescriptions(context); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 267 | |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 268 | const auto &vertexBindings = vkVAO->getVertexBindingDescs(); |
| 269 | const auto &vertexAttribs = vkVAO->getVertexAttribDescs(); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 270 | |
| 271 | // TODO(jmadill): Validate with ASSERT against physical device limits/caps? |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 272 | mCurrentVertexInputState.vertexBindingDescriptionCount = |
| 273 | static_cast<uint32_t>(vertexBindings.size()); |
| 274 | mCurrentVertexInputState.pVertexBindingDescriptions = vertexBindings.data(); |
| 275 | mCurrentVertexInputState.vertexAttributeDescriptionCount = |
| 276 | static_cast<uint32_t>(vertexAttribs.size()); |
| 277 | mCurrentVertexInputState.pVertexAttributeDescriptions = vertexAttribs.data(); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 278 | |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 279 | mCurrentInputAssemblyState.topology = gl_vk::GetPrimitiveTopology(mCurrentDrawMode); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 280 | |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 281 | vk::RenderPass *renderPass = nullptr; |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 282 | ANGLE_TRY_RESULT(vkFBO->getRenderPass(context, device), renderPass); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 283 | ASSERT(renderPass && renderPass->valid()); |
| 284 | |
Jamie Madill | c514348 | 2017-10-15 20:20:06 -0400 | [diff] [blame] | 285 | const vk::PipelineLayout &pipelineLayout = programVk->getPipelineLayout(); |
| 286 | ASSERT(pipelineLayout.valid()); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 287 | |
Jamie Madill | c514348 | 2017-10-15 20:20:06 -0400 | [diff] [blame] | 288 | mCurrentPipelineInfo.layout = pipelineLayout.getHandle(); |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 289 | mCurrentPipelineInfo.renderPass = renderPass->getHandle(); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 290 | |
Jamie Madill | 5deea72 | 2017-02-16 10:44:46 -0500 | [diff] [blame] | 291 | vk::Pipeline newPipeline; |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 292 | ANGLE_TRY(newPipeline.initGraphics(device, mCurrentPipelineInfo)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 293 | |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 294 | // TODO(jmadill): Don't dispose the current pipeline immediately, it could be in use. |
Jamie Madill | 5deea72 | 2017-02-16 10:44:46 -0500 | [diff] [blame] | 295 | mCurrentPipeline.retain(device, std::move(newPipeline)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 296 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 297 | return gl::NoError(); |
| 298 | } |
| 299 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 300 | gl::Error ContextVk::setupDraw(const gl::Context *context, GLenum mode) |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 301 | { |
| 302 | if (mode != mCurrentDrawMode) |
| 303 | { |
| 304 | invalidateCurrentPipeline(); |
| 305 | mCurrentDrawMode = mode; |
| 306 | } |
| 307 | |
| 308 | if (!mCurrentPipeline.valid()) |
| 309 | { |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 310 | ANGLE_TRY(initPipeline(context)); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 311 | ASSERT(mCurrentPipeline.valid()); |
| 312 | } |
| 313 | |
| 314 | VkDevice device = mRenderer->getDevice(); |
| 315 | const auto &state = mState.getState(); |
| 316 | const auto &programGL = state.getProgram(); |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 317 | ProgramVk *programVk = GetImplAs<ProgramVk>(programGL); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 318 | const auto &vao = state.getVertexArray(); |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 319 | VertexArrayVk *vkVAO = GetImplAs<VertexArrayVk>(vao); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 320 | const auto *drawFBO = state.getDrawFramebuffer(); |
| 321 | FramebufferVk *vkFBO = GetImplAs<FramebufferVk>(drawFBO); |
| 322 | Serial queueSerial = mRenderer->getCurrentQueueSerial(); |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 323 | uint32_t maxAttrib = programGL->getState().getMaxActiveAttribLocation(); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 324 | |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 325 | // Process vertex attributes. Assume zero offsets for now. |
| 326 | // TODO(jmadill): Offset handling. |
| 327 | const std::vector<VkBuffer> &vertexHandles = vkVAO->getCurrentVertexBufferHandlesCache(); |
| 328 | angle::MemoryBuffer *zeroBuf = nullptr; |
| 329 | ANGLE_TRY(context->getZeroFilledBuffer(maxAttrib * sizeof(VkDeviceSize), &zeroBuf)); |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 330 | |
Jamie Madill | 0c0dc34 | 2017-03-24 14:18:51 -0400 | [diff] [blame] | 331 | vk::CommandBuffer *commandBuffer = nullptr; |
| 332 | ANGLE_TRY(mRenderer->getStartedCommandBuffer(&commandBuffer)); |
Jamie Madill | d482615 | 2017-09-21 11:18:59 -0400 | [diff] [blame] | 333 | ANGLE_TRY(vkFBO->ensureInRenderPass(context, device, commandBuffer, queueSerial, state)); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 334 | |
| 335 | commandBuffer->bindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, mCurrentPipeline); |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 336 | commandBuffer->bindVertexBuffers(0, maxAttrib, vertexHandles.data(), |
| 337 | reinterpret_cast<const VkDeviceSize *>(zeroBuf->data())); |
| 338 | |
Jamie Madill | abd3135 | 2017-09-19 00:24:58 -0400 | [diff] [blame] | 339 | // TODO(jmadill): the queue serial should be bound to the pipeline. |
| 340 | setQueueSerial(queueSerial); |
Jamie Madill | bd159f0 | 2017-10-09 19:39:06 -0400 | [diff] [blame] | 341 | vkVAO->updateCurrentBufferSerials(programGL->getActiveAttribLocationsMask(), queueSerial); |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 342 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 343 | // TODO(jmadill): Can probably use more dirty bits here. |
| 344 | ContextVk *contextVk = GetImplAs<ContextVk>(context); |
| 345 | ANGLE_TRY(programVk->updateUniforms(contextVk)); |
| 346 | |
| 347 | // Bind the graphics descriptor sets. |
| 348 | // TODO(jmadill): Handle multiple command buffers. |
| 349 | VkDescriptorSet uniformDescriptorSet = programVk->getDescriptorSet(); |
| 350 | if (uniformDescriptorSet != VK_NULL_HANDLE) |
| 351 | { |
| 352 | const vk::PipelineLayout &pipelineLayout = programVk->getPipelineLayout(); |
| 353 | commandBuffer->bindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, |
| 354 | &uniformDescriptorSet, 0, nullptr); |
| 355 | } |
| 356 | |
Jamie Madill | df68a6f | 2017-01-13 17:29:53 -0500 | [diff] [blame] | 357 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 358 | } |
| 359 | |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 360 | gl::Error ContextVk::drawArrays(const gl::Context *context, GLenum mode, GLint first, GLsizei count) |
| 361 | { |
| 362 | ANGLE_TRY(setupDraw(context, mode)); |
| 363 | |
| 364 | vk::CommandBuffer *commandBuffer = nullptr; |
| 365 | ANGLE_TRY(mRenderer->getStartedCommandBuffer(&commandBuffer)); |
| 366 | |
| 367 | commandBuffer->draw(count, 1, first, 0); |
| 368 | return gl::NoError(); |
| 369 | } |
| 370 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 371 | gl::Error ContextVk::drawArraysInstanced(const gl::Context *context, |
| 372 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 373 | GLint first, |
| 374 | GLsizei count, |
| 375 | GLsizei instanceCount) |
| 376 | { |
| 377 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 378 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 379 | } |
| 380 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 381 | gl::Error ContextVk::drawElements(const gl::Context *context, |
| 382 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 383 | GLsizei count, |
| 384 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 385 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 386 | { |
Jamie Madill | d03a849 | 2017-10-03 15:46:06 -0400 | [diff] [blame] | 387 | ANGLE_TRY(setupDraw(context, mode)); |
| 388 | |
| 389 | if (indices) |
| 390 | { |
| 391 | // TODO(jmadill): Buffer offsets and immediate data. |
| 392 | UNIMPLEMENTED(); |
| 393 | return gl::InternalError() << "Only zero-offset index buffers are currently implemented."; |
| 394 | } |
| 395 | |
| 396 | if (type == GL_UNSIGNED_BYTE) |
| 397 | { |
| 398 | // TODO(jmadill): Index translation. |
| 399 | UNIMPLEMENTED(); |
| 400 | return gl::InternalError() << "Unsigned byte translation is not yet implemented."; |
| 401 | } |
| 402 | |
| 403 | vk::CommandBuffer *commandBuffer = nullptr; |
| 404 | ANGLE_TRY(mRenderer->getStartedCommandBuffer(&commandBuffer)); |
| 405 | |
| 406 | const gl::Buffer *elementArrayBuffer = |
| 407 | mState.getState().getVertexArray()->getElementArrayBuffer().get(); |
| 408 | ASSERT(elementArrayBuffer); |
| 409 | |
| 410 | BufferVk *elementArrayBufferVk = GetImplAs<BufferVk>(elementArrayBuffer); |
| 411 | |
| 412 | commandBuffer->bindIndexBuffer(elementArrayBufferVk->getVkBuffer(), 0, GetVkIndexType(type)); |
| 413 | commandBuffer->drawIndexed(count, 1, 0, 0, 0); |
| 414 | |
| 415 | return gl::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 416 | } |
| 417 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 418 | gl::Error ContextVk::drawElementsInstanced(const gl::Context *context, |
| 419 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 420 | GLsizei count, |
| 421 | GLenum type, |
Jamie Madill | 876429b | 2017-04-20 15:46:24 -0400 | [diff] [blame] | 422 | const void *indices, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 423 | GLsizei instances) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 424 | { |
| 425 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 426 | return gl::InternalError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 427 | } |
| 428 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 429 | gl::Error ContextVk::drawRangeElements(const gl::Context *context, |
| 430 | GLenum mode, |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 431 | GLuint start, |
| 432 | GLuint end, |
| 433 | GLsizei count, |
| 434 | GLenum type, |
Qin Jiajia | 1da0065 | 2017-06-20 17:16:25 +0800 | [diff] [blame] | 435 | const void *indices) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 436 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 437 | return gl::NoError(); |
| 438 | } |
| 439 | |
| 440 | VkDevice ContextVk::getDevice() const |
| 441 | { |
| 442 | return mRenderer->getDevice(); |
| 443 | } |
| 444 | |
Jamie Madill | 0c0dc34 | 2017-03-24 14:18:51 -0400 | [diff] [blame] | 445 | vk::Error ContextVk::getStartedCommandBuffer(vk::CommandBuffer **commandBufferOut) |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 446 | { |
Jamie Madill | 0c0dc34 | 2017-03-24 14:18:51 -0400 | [diff] [blame] | 447 | return mRenderer->getStartedCommandBuffer(commandBufferOut); |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 448 | } |
| 449 | |
Jamie Madill | 0c0dc34 | 2017-03-24 14:18:51 -0400 | [diff] [blame] | 450 | vk::Error ContextVk::submitCommands(vk::CommandBuffer *commandBuffer) |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 451 | { |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 452 | setQueueSerial(mRenderer->getCurrentQueueSerial()); |
Jamie Madill | f651c77 | 2017-02-21 15:03:51 -0500 | [diff] [blame] | 453 | ANGLE_TRY(mRenderer->submitCommandBuffer(commandBuffer)); |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 454 | return vk::NoError(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 455 | } |
| 456 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 457 | gl::Error ContextVk::drawArraysIndirect(const gl::Context *context, |
| 458 | GLenum mode, |
| 459 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 460 | { |
| 461 | UNIMPLEMENTED(); |
| 462 | return gl::InternalError() << "DrawArraysIndirect hasn't been implemented for vulkan backend."; |
| 463 | } |
| 464 | |
Jamie Madill | c564c07 | 2017-06-01 12:45:42 -0400 | [diff] [blame] | 465 | gl::Error ContextVk::drawElementsIndirect(const gl::Context *context, |
| 466 | GLenum mode, |
| 467 | GLenum type, |
| 468 | const void *indirect) |
Jiajia Qin | d967122 | 2016-11-29 16:30:31 +0800 | [diff] [blame] | 469 | { |
| 470 | UNIMPLEMENTED(); |
| 471 | return gl::InternalError() |
| 472 | << "DrawElementsIndirect hasn't been implemented for vulkan backend."; |
| 473 | } |
| 474 | |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 475 | GLenum ContextVk::getResetStatus() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 476 | { |
| 477 | UNIMPLEMENTED(); |
Corentin Wallez | 87fbe1c | 2016-08-03 14:41:42 -0400 | [diff] [blame] | 478 | return GL_NO_ERROR; |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 479 | } |
| 480 | |
| 481 | std::string ContextVk::getVendorString() const |
| 482 | { |
| 483 | UNIMPLEMENTED(); |
| 484 | return std::string(); |
| 485 | } |
| 486 | |
| 487 | std::string ContextVk::getRendererDescription() const |
| 488 | { |
Jamie Madill | e09bd5d | 2016-11-29 16:20:35 -0500 | [diff] [blame] | 489 | return mRenderer->getRendererDescription(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 490 | } |
| 491 | |
| 492 | void ContextVk::insertEventMarker(GLsizei length, const char *marker) |
| 493 | { |
| 494 | UNIMPLEMENTED(); |
| 495 | } |
| 496 | |
| 497 | void ContextVk::pushGroupMarker(GLsizei length, const char *marker) |
| 498 | { |
| 499 | UNIMPLEMENTED(); |
| 500 | } |
| 501 | |
| 502 | void ContextVk::popGroupMarker() |
| 503 | { |
| 504 | UNIMPLEMENTED(); |
| 505 | } |
| 506 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 507 | void ContextVk::syncState(const gl::Context *context, const gl::State::DirtyBits &dirtyBits) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 508 | { |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 509 | if (dirtyBits.any()) |
| 510 | { |
| 511 | invalidateCurrentPipeline(); |
| 512 | } |
Jamie Madill | ebf7299 | 2017-10-13 14:09:45 -0400 | [diff] [blame] | 513 | |
| 514 | const auto &glState = context->getGLState(); |
| 515 | |
| 516 | // TODO(jmadill): Full dirty bits implementation. |
| 517 | |
| 518 | for (auto dirtyBit : dirtyBits) |
| 519 | { |
| 520 | switch (dirtyBit) |
| 521 | { |
| 522 | case gl::State::DIRTY_BIT_SCISSOR_TEST_ENABLED: |
| 523 | WARN() << "DIRTY_BIT_SCISSOR_TEST_ENABLED unimplemented"; |
| 524 | break; |
| 525 | case gl::State::DIRTY_BIT_SCISSOR: |
| 526 | WARN() << "DIRTY_BIT_SCISSOR unimplemented"; |
| 527 | break; |
| 528 | case gl::State::DIRTY_BIT_VIEWPORT: |
| 529 | { |
| 530 | const gl::Rectangle &viewportGL = glState.getViewport(); |
| 531 | mCurrentViewportVk.x = static_cast<float>(viewportGL.x); |
| 532 | mCurrentViewportVk.y = static_cast<float>(viewportGL.y); |
| 533 | mCurrentViewportVk.width = static_cast<float>(viewportGL.width); |
| 534 | mCurrentViewportVk.height = static_cast<float>(viewportGL.height); |
| 535 | mCurrentViewportVk.minDepth = glState.getNearPlane(); |
| 536 | mCurrentViewportVk.maxDepth = glState.getFarPlane(); |
| 537 | |
| 538 | // TODO(jmadill): Scissor. |
| 539 | mCurrentScissorVk.offset.x = viewportGL.x; |
| 540 | mCurrentScissorVk.offset.y = viewportGL.y; |
| 541 | mCurrentScissorVk.extent.width = viewportGL.width; |
| 542 | mCurrentScissorVk.extent.height = viewportGL.height; |
| 543 | break; |
| 544 | } |
| 545 | case gl::State::DIRTY_BIT_DEPTH_RANGE: |
| 546 | WARN() << "DIRTY_BIT_DEPTH_RANGE unimplemented"; |
| 547 | break; |
| 548 | case gl::State::DIRTY_BIT_BLEND_ENABLED: |
| 549 | WARN() << "DIRTY_BIT_BLEND_ENABLED unimplemented"; |
| 550 | break; |
| 551 | case gl::State::DIRTY_BIT_BLEND_COLOR: |
| 552 | WARN() << "DIRTY_BIT_BLEND_COLOR unimplemented"; |
| 553 | break; |
| 554 | case gl::State::DIRTY_BIT_BLEND_FUNCS: |
| 555 | WARN() << "DIRTY_BIT_BLEND_FUNCS unimplemented"; |
| 556 | break; |
| 557 | case gl::State::DIRTY_BIT_BLEND_EQUATIONS: |
| 558 | WARN() << "DIRTY_BIT_BLEND_EQUATIONS unimplemented"; |
| 559 | break; |
| 560 | case gl::State::DIRTY_BIT_COLOR_MASK: |
| 561 | WARN() << "DIRTY_BIT_COLOR_MASK unimplemented"; |
| 562 | break; |
| 563 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED: |
| 564 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_COVERAGE_ENABLED unimplemented"; |
| 565 | break; |
| 566 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE_ENABLED: |
| 567 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE_ENABLED unimplemented"; |
| 568 | break; |
| 569 | case gl::State::DIRTY_BIT_SAMPLE_COVERAGE: |
| 570 | WARN() << "DIRTY_BIT_SAMPLE_COVERAGE unimplemented"; |
| 571 | break; |
| 572 | case gl::State::DIRTY_BIT_SAMPLE_MASK_ENABLED: |
| 573 | WARN() << "DIRTY_BIT_SAMPLE_MASK_ENABLED unimplemented"; |
| 574 | break; |
| 575 | case gl::State::DIRTY_BIT_SAMPLE_MASK_WORD_0: |
| 576 | case gl::State::DIRTY_BIT_SAMPLE_MASK_WORD_0 + 1: |
| 577 | WARN() << "DIRTY_BIT_SAMPLE_MASK_WORD unimplemented"; |
| 578 | break; |
| 579 | case gl::State::DIRTY_BIT_DEPTH_TEST_ENABLED: |
| 580 | WARN() << "DIRTY_BIT_DEPTH_TEST_ENABLED unimplemented"; |
| 581 | break; |
| 582 | case gl::State::DIRTY_BIT_DEPTH_FUNC: |
| 583 | WARN() << "DIRTY_BIT_DEPTH_FUNC unimplemented"; |
| 584 | break; |
| 585 | case gl::State::DIRTY_BIT_DEPTH_MASK: |
| 586 | WARN() << "DIRTY_BIT_DEPTH_MASK unimplemented"; |
| 587 | break; |
| 588 | case gl::State::DIRTY_BIT_STENCIL_TEST_ENABLED: |
| 589 | WARN() << "DIRTY_BIT_STENCIL_TEST_ENABLED unimplemented"; |
| 590 | break; |
| 591 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_FRONT: |
| 592 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_FRONT unimplemented"; |
| 593 | break; |
| 594 | case gl::State::DIRTY_BIT_STENCIL_FUNCS_BACK: |
| 595 | WARN() << "DIRTY_BIT_STENCIL_FUNCS_BACK unimplemented"; |
| 596 | break; |
| 597 | case gl::State::DIRTY_BIT_STENCIL_OPS_FRONT: |
| 598 | WARN() << "DIRTY_BIT_STENCIL_OPS_FRONT unimplemented"; |
| 599 | break; |
| 600 | case gl::State::DIRTY_BIT_STENCIL_OPS_BACK: |
| 601 | WARN() << "DIRTY_BIT_STENCIL_OPS_BACK unimplemented"; |
| 602 | break; |
| 603 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_FRONT: |
| 604 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_FRONT unimplemented"; |
| 605 | break; |
| 606 | case gl::State::DIRTY_BIT_STENCIL_WRITEMASK_BACK: |
| 607 | WARN() << "DIRTY_BIT_STENCIL_WRITEMASK_BACK unimplemented"; |
| 608 | break; |
| 609 | case gl::State::DIRTY_BIT_CULL_FACE_ENABLED: |
| 610 | case gl::State::DIRTY_BIT_CULL_FACE: |
| 611 | mCurrentRasterState.cullMode = gl_vk::GetCullMode(glState.getRasterizerState()); |
| 612 | break; |
| 613 | case gl::State::DIRTY_BIT_FRONT_FACE: |
| 614 | mCurrentRasterState.frontFace = |
| 615 | gl_vk::GetFrontFace(glState.getRasterizerState().frontFace); |
| 616 | break; |
| 617 | case gl::State::DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED: |
| 618 | WARN() << "DIRTY_BIT_POLYGON_OFFSET_FILL_ENABLED unimplemented"; |
| 619 | break; |
| 620 | case gl::State::DIRTY_BIT_POLYGON_OFFSET: |
| 621 | WARN() << "DIRTY_BIT_POLYGON_OFFSET unimplemented"; |
| 622 | break; |
| 623 | case gl::State::DIRTY_BIT_RASTERIZER_DISCARD_ENABLED: |
| 624 | WARN() << "DIRTY_BIT_RASTERIZER_DISCARD_ENABLED unimplemented"; |
| 625 | break; |
| 626 | case gl::State::DIRTY_BIT_LINE_WIDTH: |
| 627 | mCurrentRasterState.lineWidth = glState.getLineWidth(); |
| 628 | break; |
| 629 | case gl::State::DIRTY_BIT_PRIMITIVE_RESTART_ENABLED: |
| 630 | WARN() << "DIRTY_BIT_PRIMITIVE_RESTART_ENABLED unimplemented"; |
| 631 | break; |
| 632 | case gl::State::DIRTY_BIT_CLEAR_COLOR: |
| 633 | WARN() << "DIRTY_BIT_CLEAR_COLOR unimplemented"; |
| 634 | break; |
| 635 | case gl::State::DIRTY_BIT_CLEAR_DEPTH: |
| 636 | WARN() << "DIRTY_BIT_CLEAR_DEPTH unimplemented"; |
| 637 | break; |
| 638 | case gl::State::DIRTY_BIT_CLEAR_STENCIL: |
| 639 | WARN() << "DIRTY_BIT_CLEAR_STENCIL unimplemented"; |
| 640 | break; |
| 641 | case gl::State::DIRTY_BIT_UNPACK_ALIGNMENT: |
| 642 | WARN() << "DIRTY_BIT_UNPACK_ALIGNMENT unimplemented"; |
| 643 | break; |
| 644 | case gl::State::DIRTY_BIT_UNPACK_ROW_LENGTH: |
| 645 | WARN() << "DIRTY_BIT_UNPACK_ROW_LENGTH unimplemented"; |
| 646 | break; |
| 647 | case gl::State::DIRTY_BIT_UNPACK_IMAGE_HEIGHT: |
| 648 | WARN() << "DIRTY_BIT_UNPACK_IMAGE_HEIGHT unimplemented"; |
| 649 | break; |
| 650 | case gl::State::DIRTY_BIT_UNPACK_SKIP_IMAGES: |
| 651 | WARN() << "DIRTY_BIT_UNPACK_SKIP_IMAGES unimplemented"; |
| 652 | break; |
| 653 | case gl::State::DIRTY_BIT_UNPACK_SKIP_ROWS: |
| 654 | WARN() << "DIRTY_BIT_UNPACK_SKIP_ROWS unimplemented"; |
| 655 | break; |
| 656 | case gl::State::DIRTY_BIT_UNPACK_SKIP_PIXELS: |
| 657 | WARN() << "DIRTY_BIT_UNPACK_SKIP_PIXELS unimplemented"; |
| 658 | break; |
| 659 | case gl::State::DIRTY_BIT_UNPACK_BUFFER_BINDING: |
| 660 | WARN() << "DIRTY_BIT_UNPACK_BUFFER_BINDING unimplemented"; |
| 661 | break; |
| 662 | case gl::State::DIRTY_BIT_PACK_ALIGNMENT: |
| 663 | WARN() << "DIRTY_BIT_PACK_ALIGNMENT unimplemented"; |
| 664 | break; |
| 665 | case gl::State::DIRTY_BIT_PACK_REVERSE_ROW_ORDER: |
| 666 | WARN() << "DIRTY_BIT_PACK_REVERSE_ROW_ORDER unimplemented"; |
| 667 | break; |
| 668 | case gl::State::DIRTY_BIT_PACK_ROW_LENGTH: |
| 669 | WARN() << "DIRTY_BIT_PACK_ROW_LENGTH unimplemented"; |
| 670 | break; |
| 671 | case gl::State::DIRTY_BIT_PACK_SKIP_ROWS: |
| 672 | WARN() << "DIRTY_BIT_PACK_SKIP_ROWS unimplemented"; |
| 673 | break; |
| 674 | case gl::State::DIRTY_BIT_PACK_SKIP_PIXELS: |
| 675 | WARN() << "DIRTY_BIT_PACK_SKIP_PIXELS unimplemented"; |
| 676 | break; |
| 677 | case gl::State::DIRTY_BIT_PACK_BUFFER_BINDING: |
| 678 | WARN() << "DIRTY_BIT_PACK_BUFFER_BINDING unimplemented"; |
| 679 | break; |
| 680 | case gl::State::DIRTY_BIT_DITHER_ENABLED: |
| 681 | WARN() << "DIRTY_BIT_DITHER_ENABLED unimplemented"; |
| 682 | break; |
| 683 | case gl::State::DIRTY_BIT_GENERATE_MIPMAP_HINT: |
| 684 | WARN() << "DIRTY_BIT_GENERATE_MIPMAP_HINT unimplemented"; |
| 685 | break; |
| 686 | case gl::State::DIRTY_BIT_SHADER_DERIVATIVE_HINT: |
| 687 | WARN() << "DIRTY_BIT_SHADER_DERIVATIVE_HINT unimplemented"; |
| 688 | break; |
| 689 | case gl::State::DIRTY_BIT_READ_FRAMEBUFFER_BINDING: |
| 690 | WARN() << "DIRTY_BIT_READ_FRAMEBUFFER_BINDING unimplemented"; |
| 691 | break; |
| 692 | case gl::State::DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING: |
| 693 | WARN() << "DIRTY_BIT_DRAW_FRAMEBUFFER_BINDING unimplemented"; |
| 694 | break; |
| 695 | case gl::State::DIRTY_BIT_RENDERBUFFER_BINDING: |
| 696 | WARN() << "DIRTY_BIT_RENDERBUFFER_BINDING unimplemented"; |
| 697 | break; |
| 698 | case gl::State::DIRTY_BIT_VERTEX_ARRAY_BINDING: |
| 699 | WARN() << "DIRTY_BIT_VERTEX_ARRAY_BINDING unimplemented"; |
| 700 | break; |
| 701 | case gl::State::DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING: |
| 702 | WARN() << "DIRTY_BIT_DRAW_INDIRECT_BUFFER_BINDING unimplemented"; |
| 703 | break; |
| 704 | case gl::State::DIRTY_BIT_PROGRAM_BINDING: |
| 705 | WARN() << "DIRTY_BIT_PROGRAM_BINDING unimplemented"; |
| 706 | break; |
| 707 | case gl::State::DIRTY_BIT_PROGRAM_EXECUTABLE: |
| 708 | { |
| 709 | // { vertex, fragment } |
| 710 | ProgramVk *programVk = GetImplAs<ProgramVk>(glState.getProgram()); |
| 711 | mCurrentShaderStages[0].module = programVk->getLinkedVertexModule().getHandle(); |
| 712 | mCurrentShaderStages[1].module = programVk->getLinkedFragmentModule().getHandle(); |
| 713 | |
| 714 | // Also invalidate the vertex descriptions cache in the Vertex Array. |
| 715 | VertexArrayVk *vaoVk = GetImplAs<VertexArrayVk>(glState.getVertexArray()); |
| 716 | vaoVk->invalidateVertexDescriptions(); |
| 717 | break; |
| 718 | } |
| 719 | case gl::State::DIRTY_BIT_TEXTURE_BINDINGS: |
| 720 | WARN() << "DIRTY_BIT_TEXTURE_BINDINGS unimplemented"; |
| 721 | break; |
| 722 | case gl::State::DIRTY_BIT_SAMPLER_BINDINGS: |
| 723 | WARN() << "DIRTY_BIT_SAMPLER_BINDINGS unimplemented"; |
| 724 | break; |
| 725 | case gl::State::DIRTY_BIT_MULTISAMPLING: |
| 726 | WARN() << "DIRTY_BIT_MULTISAMPLING unimplemented"; |
| 727 | break; |
| 728 | case gl::State::DIRTY_BIT_SAMPLE_ALPHA_TO_ONE: |
| 729 | WARN() << "DIRTY_BIT_SAMPLE_ALPHA_TO_ONE unimplemented"; |
| 730 | break; |
| 731 | case gl::State::DIRTY_BIT_COVERAGE_MODULATION: |
| 732 | WARN() << "DIRTY_BIT_COVERAGE_MODULATION unimplemented"; |
| 733 | break; |
| 734 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_MV: |
| 735 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_MV unimplemented"; |
| 736 | break; |
| 737 | case gl::State::DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ: |
| 738 | WARN() << "DIRTY_BIT_PATH_RENDERING_MATRIX_PROJ unimplemented"; |
| 739 | break; |
| 740 | case gl::State::DIRTY_BIT_PATH_RENDERING_STENCIL_STATE: |
| 741 | WARN() << "DIRTY_BIT_PATH_RENDERING_STENCIL_STATE unimplemented"; |
| 742 | break; |
| 743 | case gl::State::DIRTY_BIT_FRAMEBUFFER_SRGB: |
| 744 | WARN() << "DIRTY_BIT_FRAMEBUFFER_SRGB unimplemented"; |
| 745 | break; |
| 746 | default: |
| 747 | if (dirtyBit >= gl::State::DIRTY_BIT_CURRENT_VALUE_0 && |
| 748 | dirtyBit < gl::State::DIRTY_BIT_CURRENT_VALUE_MAX) |
| 749 | { |
| 750 | WARN() << "DIRTY_BIT_CURRENT_VALUE unimplemented"; |
| 751 | } |
| 752 | else |
| 753 | { |
| 754 | UNREACHABLE(); |
| 755 | } |
| 756 | break; |
| 757 | } |
| 758 | } |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 759 | } |
| 760 | |
| 761 | GLint ContextVk::getGPUDisjoint() |
| 762 | { |
| 763 | UNIMPLEMENTED(); |
| 764 | return GLint(); |
| 765 | } |
| 766 | |
| 767 | GLint64 ContextVk::getTimestamp() |
| 768 | { |
| 769 | UNIMPLEMENTED(); |
| 770 | return GLint64(); |
| 771 | } |
| 772 | |
Jamie Madill | 4928b7c | 2017-06-20 12:57:39 -0400 | [diff] [blame] | 773 | void ContextVk::onMakeCurrent(const gl::Context * /*context*/) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 774 | { |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 775 | } |
| 776 | |
| 777 | const gl::Caps &ContextVk::getNativeCaps() const |
| 778 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 779 | return mRenderer->getNativeCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 780 | } |
| 781 | |
| 782 | const gl::TextureCapsMap &ContextVk::getNativeTextureCaps() const |
| 783 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 784 | return mRenderer->getNativeTextureCaps(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 785 | } |
| 786 | |
| 787 | const gl::Extensions &ContextVk::getNativeExtensions() const |
| 788 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 789 | return mRenderer->getNativeExtensions(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 790 | } |
| 791 | |
| 792 | const gl::Limitations &ContextVk::getNativeLimitations() const |
| 793 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 794 | return mRenderer->getNativeLimitations(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 795 | } |
| 796 | |
| 797 | CompilerImpl *ContextVk::createCompiler() |
| 798 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 799 | return new CompilerVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 800 | } |
| 801 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 802 | ShaderImpl *ContextVk::createShader(const gl::ShaderState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 803 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 804 | return new ShaderVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 805 | } |
| 806 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 807 | ProgramImpl *ContextVk::createProgram(const gl::ProgramState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 808 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 809 | return new ProgramVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 810 | } |
| 811 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 812 | FramebufferImpl *ContextVk::createFramebuffer(const gl::FramebufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 813 | { |
Jamie Madill | 7b57b9d | 2017-01-13 09:33:38 -0500 | [diff] [blame] | 814 | return FramebufferVk::CreateUserFBO(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 815 | } |
| 816 | |
| 817 | TextureImpl *ContextVk::createTexture(const gl::TextureState &state) |
| 818 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 819 | return new TextureVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 820 | } |
| 821 | |
| 822 | RenderbufferImpl *ContextVk::createRenderbuffer() |
| 823 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 824 | return new RenderbufferVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 825 | } |
| 826 | |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 827 | BufferImpl *ContextVk::createBuffer(const gl::BufferState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 828 | { |
Jamie Madill | 8f77560 | 2016-11-03 16:45:34 -0400 | [diff] [blame] | 829 | return new BufferVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 830 | } |
| 831 | |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 832 | VertexArrayImpl *ContextVk::createVertexArray(const gl::VertexArrayState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 833 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 834 | return new VertexArrayVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 835 | } |
| 836 | |
| 837 | QueryImpl *ContextVk::createQuery(GLenum type) |
| 838 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 839 | return new QueryVk(type); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 840 | } |
| 841 | |
| 842 | FenceNVImpl *ContextVk::createFenceNV() |
| 843 | { |
Jamie Madill | acccc6c | 2016-05-03 17:22:10 -0400 | [diff] [blame] | 844 | return new FenceNVVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 845 | } |
| 846 | |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 847 | SyncImpl *ContextVk::createSync() |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 848 | { |
Jamie Madill | 70b5bb0 | 2017-08-28 13:32:37 -0400 | [diff] [blame] | 849 | return new SyncVk(); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 850 | } |
| 851 | |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 852 | TransformFeedbackImpl *ContextVk::createTransformFeedback(const gl::TransformFeedbackState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 853 | { |
Geoff Lang | 73bd218 | 2016-07-15 13:01:24 -0400 | [diff] [blame] | 854 | return new TransformFeedbackVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 855 | } |
| 856 | |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 857 | SamplerImpl *ContextVk::createSampler(const gl::SamplerState &state) |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 858 | { |
Jamie Madill | 06ef36b | 2017-09-09 23:32:46 -0400 | [diff] [blame] | 859 | return new SamplerVk(state); |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 860 | } |
| 861 | |
Yunchao He | a336b90 | 2017-08-02 16:05:21 +0800 | [diff] [blame] | 862 | ProgramPipelineImpl *ContextVk::createProgramPipeline(const gl::ProgramPipelineState &state) |
| 863 | { |
| 864 | return new ProgramPipelineVk(state); |
| 865 | } |
| 866 | |
Sami Väisänen | e45e53b | 2016-05-25 10:36:04 +0300 | [diff] [blame] | 867 | std::vector<PathImpl *> ContextVk::createPaths(GLsizei) |
| 868 | { |
| 869 | return std::vector<PathImpl *>(); |
| 870 | } |
| 871 | |
Jamie Madill | 7210656 | 2017-03-24 14:18:50 -0400 | [diff] [blame] | 872 | // TODO(jmadill): Use pipeline cache. |
| 873 | void ContextVk::invalidateCurrentPipeline() |
| 874 | { |
| 875 | mRenderer->enqueueGarbageOrDeleteNow(*this, mCurrentPipeline); |
| 876 | } |
| 877 | |
Jamie Madill | fe54834 | 2017-06-19 11:13:24 -0400 | [diff] [blame] | 878 | gl::Error ContextVk::dispatchCompute(const gl::Context *context, |
| 879 | GLuint numGroupsX, |
| 880 | GLuint numGroupsY, |
| 881 | GLuint numGroupsZ) |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 882 | { |
| 883 | UNIMPLEMENTED(); |
Yuly Novikov | c4d18aa | 2017-03-09 18:45:02 -0500 | [diff] [blame] | 884 | return gl::InternalError(); |
Xinghua Cao | 2b39659 | 2017-03-29 15:36:04 +0800 | [diff] [blame] | 885 | } |
| 886 | |
Jamie Madill | 76e471e | 2017-10-21 09:56:01 -0400 | [diff] [blame] | 887 | vk::DescriptorPool *ContextVk::getDescriptorPool() |
| 888 | { |
| 889 | return &mDescriptorPool; |
| 890 | } |
| 891 | |
Jamie Madill | 9e54b5a | 2016-05-25 12:57:39 -0400 | [diff] [blame] | 892 | } // namespace rx |