blob: 84a63e4b6273c0cfe5144b98e9133669675228f6 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040013#include "libANGLE/Context.h"
14#include "libANGLE/renderer/vulkan/ContextVk.h"
15#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050016#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017
18namespace rx
19{
Luc Ferron5164b792018-03-06 09:10:12 -050020namespace
21{
Jamie Madill93edca12018-03-30 10:43:18 -040022void MapSwizzleState(GLenum internalFormat,
23 const gl::SwizzleState &swizzleState,
24 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050025{
26 switch (internalFormat)
27 {
Jamie Madill26084d02018-04-09 13:44:04 -040028 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040029 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
30 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
31 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050033 break;
Jamie Madill26084d02018-04-09 13:44:04 -040034 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040035 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
36 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
37 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050039 break;
Jamie Madill26084d02018-04-09 13:44:04 -040040 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040041 swizzleStateOut->swizzleRed = GL_ZERO;
42 swizzleStateOut->swizzleGreen = GL_ZERO;
43 swizzleStateOut->swizzleBlue = GL_ZERO;
44 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040045 break;
Luc Ferron5164b792018-03-06 09:10:12 -050046 default:
Jamie Madill93edca12018-03-30 10:43:18 -040047 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050048 break;
49 }
50}
Jamie Madill26084d02018-04-09 13:44:04 -040051
52constexpr VkBufferUsageFlags kStagingBufferFlags =
53 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
54constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron5164b792018-03-06 09:10:12 -050055} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040056
Jamie Madill26084d02018-04-09 13:44:04 -040057// StagingStorage implementation.
Jamie Madill20fa8d52018-04-15 10:09:32 -040058StagingStorage::StagingStorage() : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040059{
Jamie Madill20fa8d52018-04-15 10:09:32 -040060 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
61 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
62 mStagingBuffer.init(4);
Jamie Madill26084d02018-04-09 13:44:04 -040063}
64
65StagingStorage::~StagingStorage()
66{
67}
68
69void StagingStorage::release(RendererVk *renderer)
70{
71 mStagingBuffer.release(renderer);
72}
73
74gl::Error StagingStorage::stageSubresourceUpdate(ContextVk *contextVk,
Jamie Madill20fa8d52018-04-15 10:09:32 -040075 const gl::ImageIndex &index,
Jamie Madill26084d02018-04-09 13:44:04 -040076 const gl::Extents &extents,
77 const gl::InternalFormat &formatInfo,
78 const gl::PixelUnpackState &unpack,
79 GLenum type,
80 const uint8_t *pixels)
81{
82 GLuint inputRowPitch = 0;
83 ANGLE_TRY_RESULT(
84 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
85 inputRowPitch);
86
87 GLuint inputDepthPitch = 0;
88 ANGLE_TRY_RESULT(
89 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
90 inputDepthPitch);
91
92 // TODO(jmadill): skip images for 3D Textures.
93 bool applySkipImages = false;
94
95 GLuint inputSkipBytes = 0;
96 ANGLE_TRY_RESULT(
97 formatInfo.computeSkipBytes(inputRowPitch, inputDepthPitch, unpack, applySkipImages),
98 inputSkipBytes);
99
100 RendererVk *renderer = contextVk->getRenderer();
101
102 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
103 const angle::Format &storageFormat = vkFormat.textureFormat();
104
105 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
106 size_t outputDepthPitch = outputRowPitch * extents.height;
107
Jamie Madill20fa8d52018-04-15 10:09:32 -0400108 VkBuffer bufferHandle = VK_NULL_HANDLE;
109
Jamie Madill26084d02018-04-09 13:44:04 -0400110 uint8_t *stagingPointer = nullptr;
111 bool newBufferAllocated = false;
112 uint32_t stagingOffset = 0;
113 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400114 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400115 &stagingOffset, &newBufferAllocated);
116
117 const uint8_t *source = pixels + inputSkipBytes;
118
119 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
120
121 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
122 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
123
Jamie Madill20fa8d52018-04-15 10:09:32 -0400124 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400125
Jamie Madill20fa8d52018-04-15 10:09:32 -0400126 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
127 copy.bufferRowLength = extents.width;
128 copy.bufferImageHeight = extents.height;
129 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
130 copy.imageSubresource.mipLevel = index.getLevelIndex();
131 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
132 copy.imageSubresource.layerCount = index.getLayerCount();
133
134 gl_vk::GetOffset(gl::Offset(), &copy.imageOffset);
135 gl_vk::GetExtent(extents, &copy.imageExtent);
136
137 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400138
139 return gl::NoError();
140}
141
142vk::Error StagingStorage::flushUpdatesToImage(RendererVk *renderer,
143 vk::ImageHelper *image,
144 vk::CommandBuffer *commandBuffer)
145{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400146 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400147 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400148 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400149 }
150
Jamie Madill20fa8d52018-04-15 10:09:32 -0400151 // Conservatively flush all writes to the image. We could use a more restricted barrier.
152 image->changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
153 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
154 VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
155
156 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
157
158 for (const SubresourceUpdate &update : mSubresourceUpdates)
159 {
160 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
161 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
162 image->getCurrentLayout(), 1, &update.copyRegion);
163 }
164
165 mSubresourceUpdates.clear();
166
Jamie Madill26084d02018-04-09 13:44:04 -0400167 return vk::NoError();
168}
169
Jamie Madill20fa8d52018-04-15 10:09:32 -0400170StagingStorage::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
171{
172}
173
174StagingStorage::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
175 const VkBufferImageCopy &copyRegionIn)
176 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
177{
178}
179
180StagingStorage::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
181
Jamie Madill26084d02018-04-09 13:44:04 -0400182// TextureVk implementation.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400183TextureVk::TextureVk(const gl::TextureState &state) : TextureImpl(state)
184{
Jamie Madillbc543422018-03-30 10:43:19 -0400185 mRenderTarget.image = &mImage;
186 mRenderTarget.imageView = &mImageView;
187 mRenderTarget.resource = this;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400188}
189
190TextureVk::~TextureVk()
191{
192}
193
Jamie Madill035fd6b2017-10-03 15:43:22 -0400194gl::Error TextureVk::onDestroy(const gl::Context *context)
195{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400196 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400197 RendererVk *renderer = contextVk->getRenderer();
198
Jamie Madillc4f27e42018-03-31 14:19:18 -0400199 releaseImage(context, renderer);
Jamie Madille88ec8e2017-10-31 17:18:14 -0400200 renderer->releaseResource(*this, &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400201
Jamie Madill26084d02018-04-09 13:44:04 -0400202 mStagingStorage.release(renderer);
203
Jamie Madill035fd6b2017-10-03 15:43:22 -0400204 return gl::NoError();
205}
206
Jamie Madillc564c072017-06-01 12:45:42 -0400207gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400208 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400209 GLenum internalFormat,
210 const gl::Extents &size,
211 GLenum format,
212 GLenum type,
213 const gl::PixelUnpackState &unpack,
214 const uint8_t *pixels)
215{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400216 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400217 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill6a89d222017-11-02 11:59:51 -0400218 VkDevice device = contextVk->getDevice();
Jamie Madill1b038242017-11-01 15:14:36 -0400219
Jamie Madill035fd6b2017-10-03 15:43:22 -0400220 // TODO(jmadill): support multi-level textures.
Jamie Madill20fa8d52018-04-15 10:09:32 -0400221 if (index.getLevelIndex() != 0)
222 {
223 UNIMPLEMENTED();
224 return gl::InternalError();
225 }
Jamie Madillc4f27e42018-03-31 14:19:18 -0400226
227 // Convert internalFormat to sized internal format.
228 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400229
Jamie Madill1b038242017-11-01 15:14:36 -0400230 if (mImage.valid())
231 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400232 const gl::ImageDesc &desc = mState.getImageDesc(index);
233 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
234 if (desc.size != size || mImage.getFormat() != vkFormat)
Jamie Madill1b038242017-11-01 15:14:36 -0400235 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400236 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400237 }
238 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400239
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500240 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400241 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500242 {
243 return gl::NoError();
244 }
245
Jamie Madill1b038242017-11-01 15:14:36 -0400246 if (!mSampler.valid())
247 {
248 // Create a simple sampler. Force basic parameter settings.
249 // TODO(jmadill): Sampler parameters.
250 VkSamplerCreateInfo samplerInfo;
251 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
252 samplerInfo.pNext = nullptr;
253 samplerInfo.flags = 0;
254 samplerInfo.magFilter = VK_FILTER_NEAREST;
255 samplerInfo.minFilter = VK_FILTER_NEAREST;
256 samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
257 samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
258 samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
259 samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
260 samplerInfo.mipLodBias = 0.0f;
261 samplerInfo.anisotropyEnable = VK_FALSE;
262 samplerInfo.maxAnisotropy = 1.0f;
263 samplerInfo.compareEnable = VK_FALSE;
264 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
265 samplerInfo.minLod = 0.0f;
266 samplerInfo.maxLod = 1.0f;
267 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
268 samplerInfo.unnormalizedCoordinates = VK_FALSE;
269
270 ANGLE_TRY(mSampler.init(device, samplerInfo));
271 }
Jamie Madill5547b382017-10-23 18:16:01 -0400272
Jamie Madill26084d02018-04-09 13:44:04 -0400273 // Create a new graph node to store image initialization commands.
274 getNewWritingNode(renderer);
275
Jamie Madill035fd6b2017-10-03 15:43:22 -0400276 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400277 if (pixels)
278 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400279 ANGLE_TRY(mStagingStorage.stageSubresourceUpdate(contextVk, index, size, formatInfo, unpack,
280 type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400281 }
282
283 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400284}
285
Jamie Madillc564c072017-06-01 12:45:42 -0400286gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400287 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400288 const gl::Box &area,
289 GLenum format,
290 GLenum type,
291 const gl::PixelUnpackState &unpack,
292 const uint8_t *pixels)
293{
Jamie Madill5b18f482017-11-30 17:24:22 -0500294 ContextVk *contextVk = vk::GetImpl(context);
295 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Jamie Madill26084d02018-04-09 13:44:04 -0400296 ANGLE_TRY(mStagingStorage.stageSubresourceUpdate(
Jamie Madill20fa8d52018-04-15 10:09:32 -0400297 contextVk, index, gl::Extents(area.width, area.height, area.depth), formatInfo, unpack,
298 type, pixels));
Jamie Madill5b18f482017-11-30 17:24:22 -0500299 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400300}
301
Jamie Madillc564c072017-06-01 12:45:42 -0400302gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400303 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400304 GLenum internalFormat,
305 const gl::Extents &size,
306 const gl::PixelUnpackState &unpack,
307 size_t imageSize,
308 const uint8_t *pixels)
309{
310 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500311 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400312}
313
Jamie Madillc564c072017-06-01 12:45:42 -0400314gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400315 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400316 const gl::Box &area,
317 GLenum format,
318 const gl::PixelUnpackState &unpack,
319 size_t imageSize,
320 const uint8_t *pixels)
321{
322 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500323 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400324}
325
Jamie Madillc564c072017-06-01 12:45:42 -0400326gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400327 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400328 const gl::Rectangle &sourceArea,
329 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400330 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400331{
332 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500333 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400334}
335
Jamie Madillc564c072017-06-01 12:45:42 -0400336gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400337 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400338 const gl::Offset &destOffset,
339 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400340 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400341{
342 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500343 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400344}
345
Jamie Madillc564c072017-06-01 12:45:42 -0400346gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500347 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400348 size_t levels,
349 GLenum internalFormat,
350 const gl::Extents &size)
351{
352 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500353 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400354}
355
Corentin Wallez99d492c2018-02-27 15:17:10 -0500356gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
357 gl::TextureType type,
358 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400359{
360 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500361 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400362}
363
Jamie Madill4928b7c2017-06-20 12:57:39 -0400364gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500365 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400366 egl::Stream *stream,
367 const egl::Stream::GLTextureDescription &desc)
368{
369 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500370 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400371}
372
Jamie Madillc564c072017-06-01 12:45:42 -0400373gl::Error TextureVk::generateMipmap(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400374{
375 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500376 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400377}
378
Jamie Madill4928b7c2017-06-20 12:57:39 -0400379gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400380{
381 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400382 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400383}
384
Jamie Madill4928b7c2017-06-20 12:57:39 -0400385gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400386{
387 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400388 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400389}
390
Jamie Madill4928b7c2017-06-20 12:57:39 -0400391gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400392{
393 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400394 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400395}
396
Jamie Madill4928b7c2017-06-20 12:57:39 -0400397gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
398 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400399 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400400 FramebufferAttachmentRenderTarget **rtOut)
401{
Jamie Madill26084d02018-04-09 13:44:04 -0400402 // TODO(jmadill): Handle cube textures. http://anglebug.com/2318
Jamie Madillcc129372018-04-12 09:13:18 -0400403 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400404
405 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400406 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400407
408 ContextVk *contextVk = vk::GetImpl(context);
409 RendererVk *renderer = contextVk->getRenderer();
410
411 ANGLE_TRY(ensureImageInitialized(renderer));
412
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400413 *rtOut = &mRenderTarget;
414 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400415}
416
Jamie Madill26084d02018-04-09 13:44:04 -0400417vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
418{
419 VkDevice device = renderer->getDevice();
420 vk::CommandBuffer *commandBuffer = nullptr;
421
422 updateQueueSerial(renderer->getCurrentQueueSerial());
423 if (!hasChildlessWritingNode())
424 {
425 beginWriteResource(renderer, &commandBuffer);
426 }
427 else
428 {
429 vk::CommandGraphNode *node = getCurrentWritingNode();
430 commandBuffer = node->getOutsideRenderPassCommands();
431 if (!commandBuffer->valid())
432 {
433 ANGLE_TRY(node->beginOutsideRenderPassRecording(device, renderer->getCommandPool(),
434 &commandBuffer));
435 }
436 }
437
438 if (!mImage.valid())
439 {
440 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
441 const gl::Extents &extents = baseLevelDesc.size;
442 const vk::Format &format =
443 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
444
445 VkImageUsageFlags usage =
446 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
447 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400448
449 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage));
Jamie Madill26084d02018-04-09 13:44:04 -0400450
451 VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
452 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
453
454 gl::SwizzleState mappedSwizzle;
455 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
456
Jamie Madill20fa8d52018-04-15 10:09:32 -0400457 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
458 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
459 mappedSwizzle, &mImageView));
Jamie Madill26084d02018-04-09 13:44:04 -0400460
461 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
462
463 VkClearColorValue black = {{0}};
464 mImage.clearColor(black, commandBuffer);
465 }
466
467 ANGLE_TRY(mStagingStorage.flushUpdatesToImage(renderer, &mImage, commandBuffer));
468 return vk::NoError();
469}
470
Geoff Lang22416862016-06-08 16:14:36 -0700471void TextureVk::syncState(const gl::Texture::DirtyBits &dirtyBits)
472{
Jamie Madill5547b382017-10-23 18:16:01 -0400473 // TODO(jmadill): Texture sync state.
Geoff Lang22416862016-06-08 16:14:36 -0700474}
475
Jamie Madillc564c072017-06-01 12:45:42 -0400476gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500477 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800478 GLsizei samples,
479 GLint internalformat,
480 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500481 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800482{
483 UNIMPLEMENTED();
484 return gl::InternalError() << "setStorageMultisample is unimplemented.";
485}
486
Jamie Madill05b35b22017-10-03 09:01:44 -0400487gl::Error TextureVk::initializeContents(const gl::Context *context,
488 const gl::ImageIndex &imageIndex)
489{
490 UNIMPLEMENTED();
491 return gl::NoError();
492}
493
Jamie Madill858c1cc2018-03-31 14:19:13 -0400494const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400495{
496 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400497 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400498}
499
500const vk::ImageView &TextureVk::getImageView() const
501{
Jamie Madill93edca12018-03-30 10:43:18 -0400502 ASSERT(mImage.valid());
Jamie Madill5547b382017-10-23 18:16:01 -0400503 return mImageView;
504}
505
506const vk::Sampler &TextureVk::getSampler() const
507{
508 ASSERT(mSampler.valid());
509 return mSampler;
510}
511
Jamie Madillc4f27e42018-03-31 14:19:18 -0400512void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
513{
514 mImage.release(renderer->getCurrentQueueSerial(), renderer);
515 renderer->releaseResource(*this, &mImageView);
516 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
517}
518
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400519} // namespace rx