blob: d505af5f2bae6a166398437b085603272eb35034 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040013#include "libANGLE/Context.h"
14#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040015#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040016#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050017#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040018
19namespace rx
20{
Luc Ferron5164b792018-03-06 09:10:12 -050021namespace
22{
Jamie Madill93edca12018-03-30 10:43:18 -040023void MapSwizzleState(GLenum internalFormat,
24 const gl::SwizzleState &swizzleState,
25 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050026{
27 switch (internalFormat)
28 {
Jamie Madill26084d02018-04-09 13:44:04 -040029 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040030 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
31 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050034 break;
Jamie Madill26084d02018-04-09 13:44:04 -040035 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040036 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
37 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050040 break;
Jamie Madill26084d02018-04-09 13:44:04 -040041 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040042 swizzleStateOut->swizzleRed = GL_ZERO;
43 swizzleStateOut->swizzleGreen = GL_ZERO;
44 swizzleStateOut->swizzleBlue = GL_ZERO;
45 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040046 break;
Luc Ferron7348fc52018-05-09 07:17:16 -040047 case GL_RGB8:
48 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
49 swizzleStateOut->swizzleGreen = swizzleState.swizzleGreen;
50 swizzleStateOut->swizzleBlue = swizzleState.swizzleBlue;
51 swizzleStateOut->swizzleAlpha = GL_ONE;
52 break;
Luc Ferron5164b792018-03-06 09:10:12 -050053 default:
Jamie Madill93edca12018-03-30 10:43:18 -040054 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050055 break;
56 }
57}
Jamie Madill26084d02018-04-09 13:44:04 -040058
59constexpr VkBufferUsageFlags kStagingBufferFlags =
60 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
61constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron5164b792018-03-06 09:10:12 -050062} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040063
Jamie Madill26084d02018-04-09 13:44:04 -040064// StagingStorage implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -040065PixelBuffer::PixelBuffer(RendererVk *renderer)
66 : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040067{
Jamie Madill20fa8d52018-04-15 10:09:32 -040068 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
69 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
Luc Ferrona9ab0f32018-05-17 17:03:55 -040070 mStagingBuffer.init(4, renderer);
Jamie Madill26084d02018-04-09 13:44:04 -040071}
72
Jamie Madilla7be1f72018-04-13 15:16:26 -040073PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040074{
75}
76
Jamie Madilla7be1f72018-04-13 15:16:26 -040077void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040078{
79 mStagingBuffer.release(renderer);
80}
81
Jamie Madilla7be1f72018-04-13 15:16:26 -040082gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
83 const gl::ImageIndex &index,
84 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040085 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040086 const gl::InternalFormat &formatInfo,
87 const gl::PixelUnpackState &unpack,
88 GLenum type,
89 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -040090{
91 GLuint inputRowPitch = 0;
92 ANGLE_TRY_RESULT(
93 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
94 inputRowPitch);
95
96 GLuint inputDepthPitch = 0;
97 ANGLE_TRY_RESULT(
98 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
99 inputDepthPitch);
100
101 // TODO(jmadill): skip images for 3D Textures.
102 bool applySkipImages = false;
103
104 GLuint inputSkipBytes = 0;
105 ANGLE_TRY_RESULT(
Jeff Gilbert31d3deb2018-05-18 18:32:16 -0700106 formatInfo.computeSkipBytes(type, inputRowPitch, inputDepthPitch, unpack, applySkipImages),
Jamie Madill26084d02018-04-09 13:44:04 -0400107 inputSkipBytes);
108
109 RendererVk *renderer = contextVk->getRenderer();
110
111 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
112 const angle::Format &storageFormat = vkFormat.textureFormat();
113
114 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
115 size_t outputDepthPitch = outputRowPitch * extents.height;
116
Jamie Madill20fa8d52018-04-15 10:09:32 -0400117 VkBuffer bufferHandle = VK_NULL_HANDLE;
118
Jamie Madill26084d02018-04-09 13:44:04 -0400119 uint8_t *stagingPointer = nullptr;
120 bool newBufferAllocated = false;
121 uint32_t stagingOffset = 0;
122 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400123 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400124 &stagingOffset, &newBufferAllocated);
125
126 const uint8_t *source = pixels + inputSkipBytes;
127
128 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
129
130 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
131 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
132
Jamie Madill20fa8d52018-04-15 10:09:32 -0400133 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400134
Jamie Madill20fa8d52018-04-15 10:09:32 -0400135 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
136 copy.bufferRowLength = extents.width;
137 copy.bufferImageHeight = extents.height;
138 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
139 copy.imageSubresource.mipLevel = index.getLevelIndex();
140 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
141 copy.imageSubresource.layerCount = index.getLayerCount();
142
Luc Ferron33e05ba2018-04-23 15:12:34 -0400143 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400144 gl_vk::GetExtent(extents, &copy.imageExtent);
145
146 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400147
148 return gl::NoError();
149}
150
Jamie Madill58675012018-05-22 14:54:07 -0400151gl::Error PixelBuffer::stageSubresourceUpdateFromFramebuffer(const gl::Context *context,
152 const gl::ImageIndex &index,
153 const gl::Rectangle &sourceArea,
154 const gl::Offset &dstOffset,
155 const gl::Extents &dstExtent,
156 const gl::InternalFormat &formatInfo,
157 FramebufferVk *framebufferVk)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400158{
159 // If the extents and offset is outside the source image, we need to clip.
160 gl::Rectangle clippedRectangle;
Jamie Madill58675012018-05-22 14:54:07 -0400161 const gl::Extents readExtents = framebufferVk->getReadImageExtents();
162 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, readExtents.width, readExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400163 &clippedRectangle))
164 {
165 // Empty source area, nothing to do.
166 return gl::NoError();
167 }
168
169 // 1- obtain a buffer handle to copy to
Luc Ferron018709f2018-05-10 13:53:11 -0400170 RendererVk *renderer = GetImplAs<ContextVk>(context)->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400171
172 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
173 const angle::Format &storageFormat = vkFormat.textureFormat();
Luc Ferron018709f2018-05-10 13:53:11 -0400174 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400175
176 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
177 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
178
179 VkBuffer bufferHandle = VK_NULL_HANDLE;
180
181 uint8_t *stagingPointer = nullptr;
182 bool newBufferAllocated = false;
183 uint32_t stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400184
185 // The destination is only one layer deep.
186 size_t allocationSize = outputDepthPitch;
Luc Ferron2a849bf2018-05-10 13:19:11 -0400187 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
188 &stagingOffset, &newBufferAllocated);
189
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400190 PackPixelsParams params;
191 params.area = sourceArea;
192 params.format = formatInfo.internalFormat;
193 params.type = formatInfo.type;
194 params.outputPitch = static_cast<GLuint>(outputRowPitch);
195 params.packBuffer = nullptr;
196 params.pack = gl::PixelPackState();
197
Luc Ferron018709f2018-05-10 13:53:11 -0400198 // 2- copy the source image region to the pixel buffer using a cpu readback
199 if (loadFunction.requiresConversion)
200 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400201 // When a conversion is required, we need to use the loadFunction to read from a temporary
202 // buffer instead so its an even slower path.
203 size_t bufferSize = storageFormat.pixelBytes * sourceArea.width * sourceArea.height;
204 angle::MemoryBuffer *memoryBuffer = nullptr;
205 ANGLE_TRY(context->getScratchBuffer(bufferSize, &memoryBuffer));
206
207 // Read into the scratch buffer
Jamie Madill58675012018-05-22 14:54:07 -0400208 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, memoryBuffer->data()));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400209
210 // Load from scratch buffer to our pixel buffer
211 loadFunction.loadFunction(sourceArea.width, sourceArea.height, 1, memoryBuffer->data(),
212 outputRowPitch, 0, stagingPointer, outputRowPitch, 0);
Luc Ferron018709f2018-05-10 13:53:11 -0400213 }
214 else
215 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400216 // We read directly from the framebuffer into our pixel buffer.
Jamie Madill58675012018-05-22 14:54:07 -0400217 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, stagingPointer));
Luc Ferron018709f2018-05-10 13:53:11 -0400218 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400219
Luc Ferron018709f2018-05-10 13:53:11 -0400220 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400221 VkBufferImageCopy copyToImage;
222 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400223 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400224 copyToImage.bufferImageHeight = clippedRectangle.height;
225 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
226 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
227 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
228 copyToImage.imageSubresource.layerCount = index.getLayerCount();
229 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
230 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
231
232 // 3- enqueue the destination image subresource update
233 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
234 return gl::NoError();
235}
236
Jamie Madilla7be1f72018-04-13 15:16:26 -0400237vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
238 vk::ImageHelper *image,
239 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400240{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400241 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400242 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400243 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400244 }
245
Jamie Madill20fa8d52018-04-15 10:09:32 -0400246 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
247
248 for (const SubresourceUpdate &update : mSubresourceUpdates)
249 {
250 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400251
252 // Conservatively flush all writes to the image. We could use a more restricted barrier.
253 // Do not move this above the for loop, otherwise multiple updates can have race conditions
254 // and not be applied correctly as seen i:
255 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
256 image->changeLayoutWithStages(
257 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
258 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
259
Jamie Madill20fa8d52018-04-15 10:09:32 -0400260 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
261 image->getCurrentLayout(), 1, &update.copyRegion);
262 }
263
264 mSubresourceUpdates.clear();
Luc Ferron61859812018-05-09 14:17:39 -0400265 mStagingBuffer.releaseRetainedBuffers(renderer);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400266
Jamie Madill26084d02018-04-09 13:44:04 -0400267 return vk::NoError();
268}
269
Luc Ferron10434f62018-04-24 10:06:37 -0400270bool PixelBuffer::empty() const
271{
272 return mSubresourceUpdates.empty();
273}
274
Jamie Madilla7be1f72018-04-13 15:16:26 -0400275PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400276{
277}
278
Jamie Madilla7be1f72018-04-13 15:16:26 -0400279PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
280 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400281 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
282{
283}
284
Jamie Madilla7be1f72018-04-13 15:16:26 -0400285PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400286
Jamie Madill26084d02018-04-09 13:44:04 -0400287// TextureVk implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400288TextureVk::TextureVk(const gl::TextureState &state, RendererVk *renderer)
Jamie Madillbcf467f2018-05-23 09:46:00 -0400289 : TextureImpl(state), mRenderTarget(&mImage, &mBaseLevelImageView, this), mPixelBuffer(renderer)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400290{
291}
292
293TextureVk::~TextureVk()
294{
295}
296
Jamie Madill035fd6b2017-10-03 15:43:22 -0400297gl::Error TextureVk::onDestroy(const gl::Context *context)
298{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400299 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400300 RendererVk *renderer = contextVk->getRenderer();
301
Jamie Madillc4f27e42018-03-31 14:19:18 -0400302 releaseImage(context, renderer);
Jamie Madille88ec8e2017-10-31 17:18:14 -0400303 renderer->releaseResource(*this, &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400304
Jamie Madilla7be1f72018-04-13 15:16:26 -0400305 mPixelBuffer.release(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400306
Jamie Madill035fd6b2017-10-03 15:43:22 -0400307 return gl::NoError();
308}
309
Jamie Madillc564c072017-06-01 12:45:42 -0400310gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400311 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400312 GLenum internalFormat,
313 const gl::Extents &size,
314 GLenum format,
315 GLenum type,
316 const gl::PixelUnpackState &unpack,
317 const uint8_t *pixels)
318{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400319 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400320 RendererVk *renderer = contextVk->getRenderer();
321
Jamie Madillc4f27e42018-03-31 14:19:18 -0400322 // Convert internalFormat to sized internal format.
323 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400324
Jamie Madill1b038242017-11-01 15:14:36 -0400325 if (mImage.valid())
326 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400327 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400328
329 // Calculate the expected size for the index we are defining. If the size is different from
330 // the given size, or the format is different, we are redefining the image so we must
331 // release it.
332 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400333 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400334 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400335 }
336 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400337
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500338 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400339 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500340 {
341 return gl::NoError();
342 }
343
Jamie Madill26084d02018-04-09 13:44:04 -0400344 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400345 onResourceChanged(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400346
Jamie Madill035fd6b2017-10-03 15:43:22 -0400347 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400348 if (pixels)
349 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400350 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
351 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400352 }
353
354 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400355}
356
Jamie Madillc564c072017-06-01 12:45:42 -0400357gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400358 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400359 const gl::Box &area,
360 GLenum format,
361 GLenum type,
362 const gl::PixelUnpackState &unpack,
363 const uint8_t *pixels)
364{
Jamie Madill5b18f482017-11-30 17:24:22 -0500365 ContextVk *contextVk = vk::GetImpl(context);
366 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400367 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
368 contextVk, index, gl::Extents(area.width, area.height, area.depth),
369 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400370
371 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400372 onResourceChanged(contextVk->getRenderer());
Jamie Madillb2214862018-04-26 07:25:48 -0400373
Jamie Madill5b18f482017-11-30 17:24:22 -0500374 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400375}
376
Jamie Madillc564c072017-06-01 12:45:42 -0400377gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400378 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400379 GLenum internalFormat,
380 const gl::Extents &size,
381 const gl::PixelUnpackState &unpack,
382 size_t imageSize,
383 const uint8_t *pixels)
384{
385 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500386 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400387}
388
Jamie Madillc564c072017-06-01 12:45:42 -0400389gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400390 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400391 const gl::Box &area,
392 GLenum format,
393 const gl::PixelUnpackState &unpack,
394 size_t imageSize,
395 const uint8_t *pixels)
396{
397 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500398 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400399}
400
Jamie Madillc564c072017-06-01 12:45:42 -0400401gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400402 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400403 const gl::Rectangle &sourceArea,
404 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400405 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400406{
Luc Ferronf299a372018-05-14 14:44:54 -0400407 gl::Extents newImageSize(sourceArea.width, sourceArea.height, 1);
408 const gl::InternalFormat &internalFormatInfo =
409 gl::GetInternalFormatInfo(internalFormat, GL_UNSIGNED_BYTE);
410 ANGLE_TRY(setImage(context, index, internalFormat, newImageSize, internalFormatInfo.format,
411 internalFormatInfo.type, gl::PixelUnpackState(), nullptr));
412 return copySubImageImpl(context, index, gl::Offset(0, 0, 0), sourceArea, internalFormatInfo,
413 source);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400414}
415
Jamie Madillc564c072017-06-01 12:45:42 -0400416gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400417 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400418 const gl::Offset &destOffset,
419 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400420 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400421{
Luc Ferronf299a372018-05-14 14:44:54 -0400422 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
423 return copySubImageImpl(context, index, destOffset, sourceArea, currentFormat, source);
424}
425
426gl::Error TextureVk::copySubImageImpl(const gl::Context *context,
427 const gl::ImageIndex &index,
428 const gl::Offset &destOffset,
429 const gl::Rectangle &sourceArea,
430 const gl::InternalFormat &internalFormat,
431 gl::Framebuffer *source)
432{
Luc Ferron018709f2018-05-10 13:53:11 -0400433 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
434 gl::Rectangle clippedSourceArea;
435 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
436 &clippedSourceArea))
437 {
438 return gl::NoError();
439 }
440
441 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
442 destOffset.y + sourceArea.y - sourceArea.y, 0);
443
444 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill316c6062018-05-29 10:49:45 -0400445 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronf299a372018-05-14 14:44:54 -0400446 FramebufferVk *framebufferVk = vk::GetImpl(source);
Luc Ferron018709f2018-05-10 13:53:11 -0400447
448 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
449 // change to the pixel buffer.
450 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
451 // when its supported.
Jamie Madill58675012018-05-22 14:54:07 -0400452 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromFramebuffer(
Luc Ferron018709f2018-05-10 13:53:11 -0400453 context, index, clippedSourceArea, modifiedDestOffset,
Luc Ferronf299a372018-05-14 14:44:54 -0400454 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), internalFormat,
Jamie Madill58675012018-05-22 14:54:07 -0400455 framebufferVk));
Luc Ferron018709f2018-05-10 13:53:11 -0400456
Jamie Madill316c6062018-05-29 10:49:45 -0400457 onResourceChanged(renderer);
458 framebufferVk->addReadDependency(this);
Luc Ferron018709f2018-05-10 13:53:11 -0400459 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400460}
461
Luc Ferronfa7503c2018-05-08 11:25:06 -0400462vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
Jamie Madill316c6062018-05-29 10:49:45 -0400463 vk::CommandBuffer **commandBufferOut)
Luc Ferronfa7503c2018-05-08 11:25:06 -0400464{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400465 updateQueueSerial(renderer->getCurrentQueueSerial());
Jamie Madill316c6062018-05-29 10:49:45 -0400466 ANGLE_TRY(beginWriteResource(renderer, commandBufferOut));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400467 return vk::NoError();
468}
469
Jamie Madillc564c072017-06-01 12:45:42 -0400470gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500471 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400472 size_t levels,
473 GLenum internalFormat,
474 const gl::Extents &size)
475{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400476 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
477 RendererVk *renderer = contextVk->getRenderer();
478 const vk::Format &format = renderer->getFormat(internalFormat);
479 vk::CommandBuffer *commandBuffer = nullptr;
480 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
481 ANGLE_TRY(initImage(renderer, format, size, static_cast<uint32_t>(levels), commandBuffer));
482 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400483}
484
Corentin Wallez99d492c2018-02-27 15:17:10 -0500485gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
486 gl::TextureType type,
487 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400488{
489 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500490 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400491}
492
Jamie Madill4928b7c2017-06-20 12:57:39 -0400493gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500494 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400495 egl::Stream *stream,
496 const egl::Stream::GLTextureDescription &desc)
497{
498 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500499 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400500}
501
Jamie Madillc564c072017-06-01 12:45:42 -0400502gl::Error TextureVk::generateMipmap(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400503{
504 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500505 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400506}
507
Jamie Madill4928b7c2017-06-20 12:57:39 -0400508gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400509{
510 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400511 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400512}
513
Jamie Madill4928b7c2017-06-20 12:57:39 -0400514gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400515{
516 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400517 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400518}
519
Jamie Madill4928b7c2017-06-20 12:57:39 -0400520gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400521{
522 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400523 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400524}
525
Jamie Madill4928b7c2017-06-20 12:57:39 -0400526gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
527 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400528 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400529 FramebufferAttachmentRenderTarget **rtOut)
530{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400531 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400532 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400533
534 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400535 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400536
537 ContextVk *contextVk = vk::GetImpl(context);
538 RendererVk *renderer = contextVk->getRenderer();
539
540 ANGLE_TRY(ensureImageInitialized(renderer));
541
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400542 *rtOut = &mRenderTarget;
543 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400544}
545
Jamie Madill26084d02018-04-09 13:44:04 -0400546vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
547{
Luc Ferron10434f62018-04-24 10:06:37 -0400548 if (mImage.valid() && mPixelBuffer.empty())
549 {
550 return vk::NoError();
551 }
552
Jamie Madill26084d02018-04-09 13:44:04 -0400553 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400554 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400555
556 if (!mImage.valid())
557 {
558 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
Jamie Madill26084d02018-04-09 13:44:04 -0400559 const vk::Format &format =
560 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400561 const gl::Extents &extents = baseLevelDesc.size;
Luc Ferron66410532018-04-20 12:47:45 -0400562 const uint32_t levelCount = getLevelCount();
Jamie Madill26084d02018-04-09 13:44:04 -0400563
Luc Ferronfa7503c2018-05-08 11:25:06 -0400564 ANGLE_TRY(initImage(renderer, format, extents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400565 }
566
Jamie Madilla7be1f72018-04-13 15:16:26 -0400567 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400568 return vk::NoError();
569}
570
Luc Ferron4bba74f2018-04-19 14:40:45 -0400571gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700572{
Luc Ferron20610902018-04-19 14:41:13 -0400573 if (dirtyBits.none() && mSampler.valid())
574 {
575 return gl::NoError();
576 }
577
578 ContextVk *contextVk = vk::GetImpl(context);
579 if (mSampler.valid())
580 {
581 RendererVk *renderer = contextVk->getRenderer();
582 renderer->releaseResource(*this, &mSampler);
583 }
584
585 const gl::SamplerState &samplerState = mState.getSamplerState();
586
587 // Create a simple sampler. Force basic parameter settings.
588 VkSamplerCreateInfo samplerInfo;
589 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
590 samplerInfo.pNext = nullptr;
591 samplerInfo.flags = 0;
592 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
593 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400594 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400595 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
596 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
597 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
598 samplerInfo.mipLodBias = 0.0f;
599 samplerInfo.anisotropyEnable = VK_FALSE;
600 samplerInfo.maxAnisotropy = 1.0f;
601 samplerInfo.compareEnable = VK_FALSE;
602 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400603 samplerInfo.minLod = samplerState.minLod;
604 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400605 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
606 samplerInfo.unnormalizedCoordinates = VK_FALSE;
607
608 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400609 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700610}
611
Jamie Madillc564c072017-06-01 12:45:42 -0400612gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500613 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800614 GLsizei samples,
615 GLint internalformat,
616 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500617 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800618{
619 UNIMPLEMENTED();
620 return gl::InternalError() << "setStorageMultisample is unimplemented.";
621}
622
Jamie Madill05b35b22017-10-03 09:01:44 -0400623gl::Error TextureVk::initializeContents(const gl::Context *context,
624 const gl::ImageIndex &imageIndex)
625{
626 UNIMPLEMENTED();
627 return gl::NoError();
628}
629
Jamie Madill858c1cc2018-03-31 14:19:13 -0400630const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400631{
632 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400633 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400634}
635
636const vk::ImageView &TextureVk::getImageView() const
637{
Jamie Madill93edca12018-03-30 10:43:18 -0400638 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400639
640 const GLenum minFilter = mState.getSamplerState().minFilter;
641 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
642 {
643 return mBaseLevelImageView;
644 }
645
646 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400647}
648
649const vk::Sampler &TextureVk::getSampler() const
650{
651 ASSERT(mSampler.valid());
652 return mSampler;
653}
654
Luc Ferronfa7503c2018-05-08 11:25:06 -0400655vk::Error TextureVk::initImage(RendererVk *renderer,
656 const vk::Format &format,
657 const gl::Extents &extents,
658 const uint32_t levelCount,
659 vk::CommandBuffer *commandBuffer)
660{
661 const VkDevice device = renderer->getDevice();
662
663 const VkImageUsageFlags usage =
664 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
665 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
666
667 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
668
669 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
670
671 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
672
673 gl::SwizzleState mappedSwizzle;
674 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
675
676 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
677 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
678 mappedSwizzle, &mMipmapImageView, levelCount));
679 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
680 mappedSwizzle, &mBaseLevelImageView, 1));
681
682 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -0400683 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronfa7503c2018-05-08 11:25:06 -0400684 mImage.clearColor(black, commandBuffer);
685 return vk::NoError();
686}
687
Jamie Madillc4f27e42018-03-31 14:19:18 -0400688void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
689{
690 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Luc Ferron66410532018-04-20 12:47:45 -0400691 renderer->releaseResource(*this, &mBaseLevelImageView);
692 renderer->releaseResource(*this, &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -0400693 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
694}
695
Luc Ferron66410532018-04-20 12:47:45 -0400696uint32_t TextureVk::getLevelCount() const
697{
698 ASSERT(mState.getEffectiveBaseLevel() == 0);
699
700 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
701 return mState.getMipmapMaxLevel() + 1;
702}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400703} // namespace rx