blob: ed5324f8a2f95e87a0078216e87b340931e94038 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040013#include "libANGLE/Context.h"
14#include "libANGLE/renderer/vulkan/ContextVk.h"
15#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050016#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017
18namespace rx
19{
Luc Ferron5164b792018-03-06 09:10:12 -050020namespace
21{
Jamie Madill93edca12018-03-30 10:43:18 -040022void MapSwizzleState(GLenum internalFormat,
23 const gl::SwizzleState &swizzleState,
24 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050025{
26 switch (internalFormat)
27 {
Jamie Madill26084d02018-04-09 13:44:04 -040028 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040029 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
30 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
31 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050033 break;
Jamie Madill26084d02018-04-09 13:44:04 -040034 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040035 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
36 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
37 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050039 break;
Jamie Madill26084d02018-04-09 13:44:04 -040040 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040041 swizzleStateOut->swizzleRed = GL_ZERO;
42 swizzleStateOut->swizzleGreen = GL_ZERO;
43 swizzleStateOut->swizzleBlue = GL_ZERO;
44 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040045 break;
Luc Ferron5164b792018-03-06 09:10:12 -050046 default:
Jamie Madill93edca12018-03-30 10:43:18 -040047 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050048 break;
49 }
50}
Jamie Madill26084d02018-04-09 13:44:04 -040051
52constexpr VkBufferUsageFlags kStagingBufferFlags =
53 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
54constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron5164b792018-03-06 09:10:12 -050055} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040056
Jamie Madill26084d02018-04-09 13:44:04 -040057// StagingStorage implementation.
Jamie Madilla7be1f72018-04-13 15:16:26 -040058PixelBuffer::PixelBuffer() : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040059{
Jamie Madill20fa8d52018-04-15 10:09:32 -040060 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
61 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
62 mStagingBuffer.init(4);
Jamie Madill26084d02018-04-09 13:44:04 -040063}
64
Jamie Madilla7be1f72018-04-13 15:16:26 -040065PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040066{
67}
68
Jamie Madilla7be1f72018-04-13 15:16:26 -040069void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040070{
71 mStagingBuffer.release(renderer);
72}
73
Jamie Madilla7be1f72018-04-13 15:16:26 -040074gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
75 const gl::ImageIndex &index,
76 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040077 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040078 const gl::InternalFormat &formatInfo,
79 const gl::PixelUnpackState &unpack,
80 GLenum type,
81 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -040082{
83 GLuint inputRowPitch = 0;
84 ANGLE_TRY_RESULT(
85 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
86 inputRowPitch);
87
88 GLuint inputDepthPitch = 0;
89 ANGLE_TRY_RESULT(
90 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
91 inputDepthPitch);
92
93 // TODO(jmadill): skip images for 3D Textures.
94 bool applySkipImages = false;
95
96 GLuint inputSkipBytes = 0;
97 ANGLE_TRY_RESULT(
98 formatInfo.computeSkipBytes(inputRowPitch, inputDepthPitch, unpack, applySkipImages),
99 inputSkipBytes);
100
101 RendererVk *renderer = contextVk->getRenderer();
102
103 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
104 const angle::Format &storageFormat = vkFormat.textureFormat();
105
106 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
107 size_t outputDepthPitch = outputRowPitch * extents.height;
108
Jamie Madill20fa8d52018-04-15 10:09:32 -0400109 VkBuffer bufferHandle = VK_NULL_HANDLE;
110
Jamie Madill26084d02018-04-09 13:44:04 -0400111 uint8_t *stagingPointer = nullptr;
112 bool newBufferAllocated = false;
113 uint32_t stagingOffset = 0;
114 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400115 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400116 &stagingOffset, &newBufferAllocated);
117
118 const uint8_t *source = pixels + inputSkipBytes;
119
120 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
121
122 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
123 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
124
Jamie Madill20fa8d52018-04-15 10:09:32 -0400125 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400126
Jamie Madill20fa8d52018-04-15 10:09:32 -0400127 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
128 copy.bufferRowLength = extents.width;
129 copy.bufferImageHeight = extents.height;
130 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
131 copy.imageSubresource.mipLevel = index.getLevelIndex();
132 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
133 copy.imageSubresource.layerCount = index.getLayerCount();
134
Luc Ferron33e05ba2018-04-23 15:12:34 -0400135 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400136 gl_vk::GetExtent(extents, &copy.imageExtent);
137
138 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400139
140 return gl::NoError();
141}
142
Jamie Madilla7be1f72018-04-13 15:16:26 -0400143vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
144 vk::ImageHelper *image,
145 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400146{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400147 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400148 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400149 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400150 }
151
Jamie Madill20fa8d52018-04-15 10:09:32 -0400152 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
153
154 for (const SubresourceUpdate &update : mSubresourceUpdates)
155 {
156 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400157
158 // Conservatively flush all writes to the image. We could use a more restricted barrier.
159 // Do not move this above the for loop, otherwise multiple updates can have race conditions
160 // and not be applied correctly as seen i:
161 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
162 image->changeLayoutWithStages(
163 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
164 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
165
Jamie Madill20fa8d52018-04-15 10:09:32 -0400166 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
167 image->getCurrentLayout(), 1, &update.copyRegion);
168 }
169
170 mSubresourceUpdates.clear();
171
Jamie Madill26084d02018-04-09 13:44:04 -0400172 return vk::NoError();
173}
174
Luc Ferron10434f62018-04-24 10:06:37 -0400175bool PixelBuffer::empty() const
176{
177 return mSubresourceUpdates.empty();
178}
179
Jamie Madilla7be1f72018-04-13 15:16:26 -0400180PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400181{
182}
183
Jamie Madilla7be1f72018-04-13 15:16:26 -0400184PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
185 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400186 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
187{
188}
189
Jamie Madilla7be1f72018-04-13 15:16:26 -0400190PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400191
Jamie Madill26084d02018-04-09 13:44:04 -0400192// TextureVk implementation.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400193TextureVk::TextureVk(const gl::TextureState &state) : TextureImpl(state)
194{
Jamie Madillbc543422018-03-30 10:43:19 -0400195 mRenderTarget.image = &mImage;
Luc Ferron66410532018-04-20 12:47:45 -0400196 mRenderTarget.imageView = &mBaseLevelImageView;
Jamie Madillbc543422018-03-30 10:43:19 -0400197 mRenderTarget.resource = this;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400198}
199
200TextureVk::~TextureVk()
201{
202}
203
Jamie Madill035fd6b2017-10-03 15:43:22 -0400204gl::Error TextureVk::onDestroy(const gl::Context *context)
205{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400206 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400207 RendererVk *renderer = contextVk->getRenderer();
208
Jamie Madillc4f27e42018-03-31 14:19:18 -0400209 releaseImage(context, renderer);
Jamie Madille88ec8e2017-10-31 17:18:14 -0400210 renderer->releaseResource(*this, &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400211
Jamie Madilla7be1f72018-04-13 15:16:26 -0400212 mPixelBuffer.release(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400213
Jamie Madill035fd6b2017-10-03 15:43:22 -0400214 return gl::NoError();
215}
216
Jamie Madillc564c072017-06-01 12:45:42 -0400217gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400218 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400219 GLenum internalFormat,
220 const gl::Extents &size,
221 GLenum format,
222 GLenum type,
223 const gl::PixelUnpackState &unpack,
224 const uint8_t *pixels)
225{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400226 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400227 RendererVk *renderer = contextVk->getRenderer();
228
Jamie Madillc4f27e42018-03-31 14:19:18 -0400229 // Convert internalFormat to sized internal format.
230 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400231
Jamie Madill1b038242017-11-01 15:14:36 -0400232 if (mImage.valid())
233 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400234 const gl::ImageDesc &desc = mState.getImageDesc(index);
235 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
236 if (desc.size != size || mImage.getFormat() != vkFormat)
Jamie Madill1b038242017-11-01 15:14:36 -0400237 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400238 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400239 }
240 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400241
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500242 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400243 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500244 {
245 return gl::NoError();
246 }
247
Jamie Madill26084d02018-04-09 13:44:04 -0400248 // Create a new graph node to store image initialization commands.
249 getNewWritingNode(renderer);
250
Jamie Madill035fd6b2017-10-03 15:43:22 -0400251 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400252 if (pixels)
253 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400254 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
255 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400256 }
257
258 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400259}
260
Jamie Madillc564c072017-06-01 12:45:42 -0400261gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400262 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400263 const gl::Box &area,
264 GLenum format,
265 GLenum type,
266 const gl::PixelUnpackState &unpack,
267 const uint8_t *pixels)
268{
Jamie Madill5b18f482017-11-30 17:24:22 -0500269 ContextVk *contextVk = vk::GetImpl(context);
270 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400271 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
272 contextVk, index, gl::Extents(area.width, area.height, area.depth),
273 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400274
275 // Create a new graph node to store image initialization commands.
276 getNewWritingNode(contextVk->getRenderer());
277
Jamie Madill5b18f482017-11-30 17:24:22 -0500278 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400279}
280
Jamie Madillc564c072017-06-01 12:45:42 -0400281gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400282 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400283 GLenum internalFormat,
284 const gl::Extents &size,
285 const gl::PixelUnpackState &unpack,
286 size_t imageSize,
287 const uint8_t *pixels)
288{
289 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500290 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400291}
292
Jamie Madillc564c072017-06-01 12:45:42 -0400293gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400294 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400295 const gl::Box &area,
296 GLenum format,
297 const gl::PixelUnpackState &unpack,
298 size_t imageSize,
299 const uint8_t *pixels)
300{
301 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500302 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400303}
304
Jamie Madillc564c072017-06-01 12:45:42 -0400305gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400306 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400307 const gl::Rectangle &sourceArea,
308 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400309 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400310{
311 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500312 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400313}
314
Jamie Madillc564c072017-06-01 12:45:42 -0400315gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400316 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400317 const gl::Offset &destOffset,
318 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400319 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400320{
321 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500322 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400323}
324
Luc Ferronfa7503c2018-05-08 11:25:06 -0400325vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
326 vk::CommandBuffer **outCommandBuffer)
327{
328 const VkDevice device = renderer->getDevice();
329 updateQueueSerial(renderer->getCurrentQueueSerial());
330 if (!hasChildlessWritingNode())
331 {
332 beginWriteResource(renderer, outCommandBuffer);
333 }
334 else
335 {
336 vk::CommandGraphNode *node = getCurrentWritingNode();
337 *outCommandBuffer = node->getOutsideRenderPassCommands();
338 if (!(*outCommandBuffer)->valid())
339 {
340 ANGLE_TRY(node->beginOutsideRenderPassRecording(device, renderer->getCommandPool(),
341 outCommandBuffer));
342 }
343 }
344 return vk::NoError();
345}
346
Jamie Madillc564c072017-06-01 12:45:42 -0400347gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500348 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400349 size_t levels,
350 GLenum internalFormat,
351 const gl::Extents &size)
352{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400353 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
354 RendererVk *renderer = contextVk->getRenderer();
355 const vk::Format &format = renderer->getFormat(internalFormat);
356 vk::CommandBuffer *commandBuffer = nullptr;
357 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
358 ANGLE_TRY(initImage(renderer, format, size, static_cast<uint32_t>(levels), commandBuffer));
359 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400360}
361
Corentin Wallez99d492c2018-02-27 15:17:10 -0500362gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
363 gl::TextureType type,
364 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400365{
366 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500367 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400368}
369
Jamie Madill4928b7c2017-06-20 12:57:39 -0400370gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500371 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400372 egl::Stream *stream,
373 const egl::Stream::GLTextureDescription &desc)
374{
375 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500376 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400377}
378
Jamie Madillc564c072017-06-01 12:45:42 -0400379gl::Error TextureVk::generateMipmap(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400380{
381 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500382 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400383}
384
Jamie Madill4928b7c2017-06-20 12:57:39 -0400385gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400386{
387 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400388 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400389}
390
Jamie Madill4928b7c2017-06-20 12:57:39 -0400391gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400392{
393 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400394 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400395}
396
Jamie Madill4928b7c2017-06-20 12:57:39 -0400397gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400398{
399 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400400 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400401}
402
Jamie Madill4928b7c2017-06-20 12:57:39 -0400403gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
404 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400405 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400406 FramebufferAttachmentRenderTarget **rtOut)
407{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400408 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400409 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400410
411 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400412 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400413
414 ContextVk *contextVk = vk::GetImpl(context);
415 RendererVk *renderer = contextVk->getRenderer();
416
417 ANGLE_TRY(ensureImageInitialized(renderer));
418
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400419 *rtOut = &mRenderTarget;
420 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400421}
422
Jamie Madill26084d02018-04-09 13:44:04 -0400423vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
424{
Luc Ferron10434f62018-04-24 10:06:37 -0400425 if (mImage.valid() && mPixelBuffer.empty())
426 {
427 return vk::NoError();
428 }
429
Jamie Madill26084d02018-04-09 13:44:04 -0400430 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400431 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400432
433 if (!mImage.valid())
434 {
435 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
Jamie Madill26084d02018-04-09 13:44:04 -0400436 const vk::Format &format =
437 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400438 const gl::Extents &extents = baseLevelDesc.size;
Luc Ferron66410532018-04-20 12:47:45 -0400439 const uint32_t levelCount = getLevelCount();
Jamie Madill26084d02018-04-09 13:44:04 -0400440
Luc Ferronfa7503c2018-05-08 11:25:06 -0400441 ANGLE_TRY(initImage(renderer, format, extents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400442 }
443
Jamie Madilla7be1f72018-04-13 15:16:26 -0400444 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400445 return vk::NoError();
446}
447
Luc Ferron4bba74f2018-04-19 14:40:45 -0400448gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700449{
Luc Ferron20610902018-04-19 14:41:13 -0400450 if (dirtyBits.none() && mSampler.valid())
451 {
452 return gl::NoError();
453 }
454
455 ContextVk *contextVk = vk::GetImpl(context);
456 if (mSampler.valid())
457 {
458 RendererVk *renderer = contextVk->getRenderer();
459 renderer->releaseResource(*this, &mSampler);
460 }
461
462 const gl::SamplerState &samplerState = mState.getSamplerState();
463
464 // Create a simple sampler. Force basic parameter settings.
465 VkSamplerCreateInfo samplerInfo;
466 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
467 samplerInfo.pNext = nullptr;
468 samplerInfo.flags = 0;
469 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
470 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400471 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400472 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
473 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
474 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
475 samplerInfo.mipLodBias = 0.0f;
476 samplerInfo.anisotropyEnable = VK_FALSE;
477 samplerInfo.maxAnisotropy = 1.0f;
478 samplerInfo.compareEnable = VK_FALSE;
479 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400480 samplerInfo.minLod = samplerState.minLod;
481 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400482 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
483 samplerInfo.unnormalizedCoordinates = VK_FALSE;
484
485 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400486 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700487}
488
Jamie Madillc564c072017-06-01 12:45:42 -0400489gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500490 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800491 GLsizei samples,
492 GLint internalformat,
493 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500494 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800495{
496 UNIMPLEMENTED();
497 return gl::InternalError() << "setStorageMultisample is unimplemented.";
498}
499
Jamie Madill05b35b22017-10-03 09:01:44 -0400500gl::Error TextureVk::initializeContents(const gl::Context *context,
501 const gl::ImageIndex &imageIndex)
502{
503 UNIMPLEMENTED();
504 return gl::NoError();
505}
506
Jamie Madill858c1cc2018-03-31 14:19:13 -0400507const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400508{
509 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400510 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400511}
512
513const vk::ImageView &TextureVk::getImageView() const
514{
Jamie Madill93edca12018-03-30 10:43:18 -0400515 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400516
517 const GLenum minFilter = mState.getSamplerState().minFilter;
518 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
519 {
520 return mBaseLevelImageView;
521 }
522
523 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400524}
525
526const vk::Sampler &TextureVk::getSampler() const
527{
528 ASSERT(mSampler.valid());
529 return mSampler;
530}
531
Luc Ferronfa7503c2018-05-08 11:25:06 -0400532vk::Error TextureVk::initImage(RendererVk *renderer,
533 const vk::Format &format,
534 const gl::Extents &extents,
535 const uint32_t levelCount,
536 vk::CommandBuffer *commandBuffer)
537{
538 const VkDevice device = renderer->getDevice();
539
540 const VkImageUsageFlags usage =
541 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
542 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
543
544 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
545
546 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
547
548 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
549
550 gl::SwizzleState mappedSwizzle;
551 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
552
553 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
554 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
555 mappedSwizzle, &mMipmapImageView, levelCount));
556 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
557 mappedSwizzle, &mBaseLevelImageView, 1));
558
559 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
560 VkClearColorValue black = {{0}};
561 mImage.clearColor(black, commandBuffer);
562 return vk::NoError();
563}
564
Jamie Madillc4f27e42018-03-31 14:19:18 -0400565void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
566{
567 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Luc Ferron66410532018-04-20 12:47:45 -0400568 renderer->releaseResource(*this, &mBaseLevelImageView);
569 renderer->releaseResource(*this, &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -0400570 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
571}
572
Luc Ferron66410532018-04-20 12:47:45 -0400573uint32_t TextureVk::getLevelCount() const
574{
575 ASSERT(mState.getEffectiveBaseLevel() == 0);
576
577 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
578 return mState.getMipmapMaxLevel() + 1;
579}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400580} // namespace rx