blob: 3c98dd36f0dc760c2f24ea8b2ef7610402cbb324 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Luc Ferronc5181702018-05-17 09:44:42 -040013#include "image_util/generatemip.inl"
Jamie Madill035fd6b2017-10-03 15:43:22 -040014#include "libANGLE/Context.h"
15#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040016#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040017#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050018#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
Luc Ferron5164b792018-03-06 09:10:12 -050022namespace
23{
Jamie Madill93edca12018-03-30 10:43:18 -040024void MapSwizzleState(GLenum internalFormat,
25 const gl::SwizzleState &swizzleState,
26 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050027{
28 switch (internalFormat)
29 {
Jamie Madill26084d02018-04-09 13:44:04 -040030 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040031 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
34 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050035 break;
Jamie Madill26084d02018-04-09 13:44:04 -040036 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040037 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
40 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050041 break;
Jamie Madill26084d02018-04-09 13:44:04 -040042 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040043 swizzleStateOut->swizzleRed = GL_ZERO;
44 swizzleStateOut->swizzleGreen = GL_ZERO;
45 swizzleStateOut->swizzleBlue = GL_ZERO;
46 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040047 break;
Luc Ferron5164b792018-03-06 09:10:12 -050048 default:
Jamie Madill93edca12018-03-30 10:43:18 -040049 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050050 break;
51 }
52}
Jamie Madill26084d02018-04-09 13:44:04 -040053
54constexpr VkBufferUsageFlags kStagingBufferFlags =
55 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
56constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron05cd6df2018-05-24 15:51:29 -040057
58constexpr VkFormatFeatureFlags kBlitFeatureFlags =
59 VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT;
Luc Ferron5164b792018-03-06 09:10:12 -050060} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040061
Jamie Madill26084d02018-04-09 13:44:04 -040062// StagingStorage implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -040063PixelBuffer::PixelBuffer(RendererVk *renderer)
64 : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040065{
Jamie Madill20fa8d52018-04-15 10:09:32 -040066 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
67 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
Luc Ferrona9ab0f32018-05-17 17:03:55 -040068 mStagingBuffer.init(4, renderer);
Jamie Madill26084d02018-04-09 13:44:04 -040069}
70
Jamie Madilla7be1f72018-04-13 15:16:26 -040071PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040072{
73}
74
Jamie Madilla7be1f72018-04-13 15:16:26 -040075void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040076{
77 mStagingBuffer.release(renderer);
78}
79
Luc Ferron2f3f4142018-05-30 08:27:19 -040080void PixelBuffer::removeStagedUpdates(const gl::ImageIndex &index)
81{
82 // Find any staged updates for this index and removes them from the pending list.
83 uint32_t levelIndex = static_cast<uint32_t>(index.getLevelIndex());
84 uint32_t layerIndex = static_cast<uint32_t>(index.getLayerIndex());
85 auto removeIfStatement = [levelIndex, layerIndex](SubresourceUpdate &update) {
86 return update.copyRegion.imageSubresource.mipLevel == levelIndex &&
87 update.copyRegion.imageSubresource.baseArrayLayer == layerIndex;
88 };
89 mSubresourceUpdates.erase(
90 std::remove_if(mSubresourceUpdates.begin(), mSubresourceUpdates.end(), removeIfStatement),
91 mSubresourceUpdates.end());
92}
93
Jamie Madilleebe2192018-07-11 09:01:18 -040094vk::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
Jamie Madilla7be1f72018-04-13 15:16:26 -040095 const gl::ImageIndex &index,
96 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040097 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040098 const gl::InternalFormat &formatInfo,
99 const gl::PixelUnpackState &unpack,
100 GLenum type,
101 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -0400102{
103 GLuint inputRowPitch = 0;
Jamie Madilleebe2192018-07-11 09:01:18 -0400104 ANGLE_TRY_VK_CHECKED_MATH(formatInfo.computeRowPitch(type, extents.width, unpack.alignment,
105 unpack.rowLength, &inputRowPitch));
Jamie Madill26084d02018-04-09 13:44:04 -0400106
107 GLuint inputDepthPitch = 0;
Jamie Madilleebe2192018-07-11 09:01:18 -0400108 ANGLE_TRY_VK_CHECKED_MATH(formatInfo.computeDepthPitch(extents.height, unpack.imageHeight,
109 inputRowPitch, &inputDepthPitch));
Jamie Madill26084d02018-04-09 13:44:04 -0400110
111 // TODO(jmadill): skip images for 3D Textures.
112 bool applySkipImages = false;
113
114 GLuint inputSkipBytes = 0;
Jamie Madilleebe2192018-07-11 09:01:18 -0400115 ANGLE_TRY_VK_CHECKED_MATH(formatInfo.computeSkipBytes(
116 type, inputRowPitch, inputDepthPitch, unpack, applySkipImages, &inputSkipBytes));
Jamie Madill26084d02018-04-09 13:44:04 -0400117
118 RendererVk *renderer = contextVk->getRenderer();
119
120 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
121 const angle::Format &storageFormat = vkFormat.textureFormat();
122
123 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
124 size_t outputDepthPitch = outputRowPitch * extents.height;
125
Jamie Madill20fa8d52018-04-15 10:09:32 -0400126 VkBuffer bufferHandle = VK_NULL_HANDLE;
127
Jamie Madill26084d02018-04-09 13:44:04 -0400128 uint8_t *stagingPointer = nullptr;
129 bool newBufferAllocated = false;
130 uint32_t stagingOffset = 0;
131 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madilleebe2192018-07-11 09:01:18 -0400132 ANGLE_TRY(mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
133 &stagingOffset, &newBufferAllocated));
Jamie Madill26084d02018-04-09 13:44:04 -0400134
135 const uint8_t *source = pixels + inputSkipBytes;
136
137 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
138
139 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
140 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
141
Jamie Madill20fa8d52018-04-15 10:09:32 -0400142 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400143
Jamie Madill20fa8d52018-04-15 10:09:32 -0400144 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
145 copy.bufferRowLength = extents.width;
146 copy.bufferImageHeight = extents.height;
147 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
148 copy.imageSubresource.mipLevel = index.getLevelIndex();
149 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
150 copy.imageSubresource.layerCount = index.getLayerCount();
151
Luc Ferron33e05ba2018-04-23 15:12:34 -0400152 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400153 gl_vk::GetExtent(extents, &copy.imageExtent);
154
155 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400156
Jamie Madilleebe2192018-07-11 09:01:18 -0400157 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400158}
159
Jamie Madilleebe2192018-07-11 09:01:18 -0400160vk::Error PixelBuffer::stageSubresourceUpdateFromFramebuffer(const gl::Context *context,
Jamie Madill58675012018-05-22 14:54:07 -0400161 const gl::ImageIndex &index,
162 const gl::Rectangle &sourceArea,
163 const gl::Offset &dstOffset,
164 const gl::Extents &dstExtent,
165 const gl::InternalFormat &formatInfo,
166 FramebufferVk *framebufferVk)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400167{
Luc Ferronaa2126c2018-07-09 15:36:36 -0400168 ContextVk *contextVk = vk::GetImpl(context);
169
Luc Ferron2a849bf2018-05-10 13:19:11 -0400170 // If the extents and offset is outside the source image, we need to clip.
171 gl::Rectangle clippedRectangle;
Jamie Madill58675012018-05-22 14:54:07 -0400172 const gl::Extents readExtents = framebufferVk->getReadImageExtents();
173 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, readExtents.width, readExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400174 &clippedRectangle))
175 {
176 // Empty source area, nothing to do.
Jamie Madilleebe2192018-07-11 09:01:18 -0400177 return vk::NoError();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400178 }
179
Luc Ferronaa2126c2018-07-09 15:36:36 -0400180 bool isViewportFlipEnabled = contextVk->isViewportFlipEnabledForDrawFBO();
181 if (isViewportFlipEnabled)
182 {
183 clippedRectangle.y = readExtents.height - clippedRectangle.y - clippedRectangle.height;
184 }
185
Luc Ferron2a849bf2018-05-10 13:19:11 -0400186 // 1- obtain a buffer handle to copy to
Luc Ferron018709f2018-05-10 13:53:11 -0400187 RendererVk *renderer = GetImplAs<ContextVk>(context)->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400188
189 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
190 const angle::Format &storageFormat = vkFormat.textureFormat();
Luc Ferron018709f2018-05-10 13:53:11 -0400191 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400192
193 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
194 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
195
196 VkBuffer bufferHandle = VK_NULL_HANDLE;
197
198 uint8_t *stagingPointer = nullptr;
199 bool newBufferAllocated = false;
200 uint32_t stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400201
202 // The destination is only one layer deep.
203 size_t allocationSize = outputDepthPitch;
Jamie Madilleebe2192018-07-11 09:01:18 -0400204 ANGLE_TRY(mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
205 &stagingOffset, &newBufferAllocated));
Luc Ferron2a849bf2018-05-10 13:19:11 -0400206
Luc Ferronaa2126c2018-07-09 15:36:36 -0400207 gl::PixelPackState pixelPackState = gl::PixelPackState();
208 // TODO(lucferron): The pixel pack state alignment should probably be 1 instead of 4.
209 // http://anglebug.com/2718
210
211 if (isViewportFlipEnabled)
212 {
213 pixelPackState.reverseRowOrder = !pixelPackState.reverseRowOrder;
214 }
215
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400216 PackPixelsParams params;
Luc Ferronaa2126c2018-07-09 15:36:36 -0400217 params.area = clippedRectangle;
Luc Ferronc94ba1d2018-06-18 11:26:28 -0400218 params.format = formatInfo.format;
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400219 params.type = formatInfo.type;
220 params.outputPitch = static_cast<GLuint>(outputRowPitch);
221 params.packBuffer = nullptr;
Luc Ferronaa2126c2018-07-09 15:36:36 -0400222 params.pack = pixelPackState;
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400223
Luc Ferron018709f2018-05-10 13:53:11 -0400224 // 2- copy the source image region to the pixel buffer using a cpu readback
225 if (loadFunction.requiresConversion)
226 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400227 // When a conversion is required, we need to use the loadFunction to read from a temporary
228 // buffer instead so its an even slower path.
Luc Ferronaa2126c2018-07-09 15:36:36 -0400229 size_t bufferSize =
230 storageFormat.pixelBytes * clippedRectangle.width * clippedRectangle.height;
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400231 angle::MemoryBuffer *memoryBuffer = nullptr;
Jamie Madilleebe2192018-07-11 09:01:18 -0400232 ANGLE_TRY_VK_ALLOCATION(context->getScratchBuffer(bufferSize, &memoryBuffer));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400233
234 // Read into the scratch buffer
Luc Ferron1617e692018-07-11 11:08:19 -0400235 ANGLE_TRY(framebufferVk->readPixelsImpl(
236 context, clippedRectangle, params, VK_IMAGE_ASPECT_COLOR_BIT,
237 framebufferVk->getColorReadRenderTarget(), memoryBuffer->data()));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400238
239 // Load from scratch buffer to our pixel buffer
Luc Ferronaa2126c2018-07-09 15:36:36 -0400240 loadFunction.loadFunction(clippedRectangle.width, clippedRectangle.height, 1,
241 memoryBuffer->data(), outputRowPitch, 0, stagingPointer,
242 outputRowPitch, 0);
Luc Ferron018709f2018-05-10 13:53:11 -0400243 }
244 else
245 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400246 // We read directly from the framebuffer into our pixel buffer.
Luc Ferron1617e692018-07-11 11:08:19 -0400247 ANGLE_TRY(framebufferVk->readPixelsImpl(
248 context, clippedRectangle, params, VK_IMAGE_ASPECT_COLOR_BIT,
249 framebufferVk->getColorReadRenderTarget(), stagingPointer));
Luc Ferron018709f2018-05-10 13:53:11 -0400250 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400251
Luc Ferron018709f2018-05-10 13:53:11 -0400252 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400253 VkBufferImageCopy copyToImage;
254 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400255 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400256 copyToImage.bufferImageHeight = clippedRectangle.height;
257 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
258 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
259 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
260 copyToImage.imageSubresource.layerCount = index.getLayerCount();
261 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
262 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
263
264 // 3- enqueue the destination image subresource update
265 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
Jamie Madilleebe2192018-07-11 09:01:18 -0400266 return vk::NoError();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400267}
268
Jamie Madilleebe2192018-07-11 09:01:18 -0400269vk::Error PixelBuffer::allocate(RendererVk *renderer,
Luc Ferronc5181702018-05-17 09:44:42 -0400270 size_t sizeInBytes,
271 uint8_t **ptrOut,
272 VkBuffer *handleOut,
273 uint32_t *offsetOut,
274 bool *newBufferAllocatedOut)
275{
276 return mStagingBuffer.allocate(renderer, sizeInBytes, ptrOut, handleOut, offsetOut,
277 newBufferAllocatedOut);
278}
279
Jamie Madilla7be1f72018-04-13 15:16:26 -0400280vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
Luc Ferron2f3f4142018-05-30 08:27:19 -0400281 uint32_t levelCount,
Jamie Madilla7be1f72018-04-13 15:16:26 -0400282 vk::ImageHelper *image,
283 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400284{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400285 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400286 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400287 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400288 }
289
Jamie Madill20fa8d52018-04-15 10:09:32 -0400290 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
291
Luc Ferron2f3f4142018-05-30 08:27:19 -0400292 std::vector<SubresourceUpdate> updatesToKeep;
293
Jamie Madill20fa8d52018-04-15 10:09:32 -0400294 for (const SubresourceUpdate &update : mSubresourceUpdates)
295 {
296 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400297
Luc Ferron2f3f4142018-05-30 08:27:19 -0400298 const uint32_t updateMipLevel = update.copyRegion.imageSubresource.mipLevel;
299 // It's possible we've accumulated updates that are no longer applicable if the image has
300 // never been flushed but the image description has changed. Check if this level exist for
301 // this image.
302 if (updateMipLevel >= levelCount)
303 {
304 updatesToKeep.emplace_back(update);
305 continue;
306 }
307
Luc Ferron1a186b12018-04-24 15:25:35 -0400308 // Conservatively flush all writes to the image. We could use a more restricted barrier.
309 // Do not move this above the for loop, otherwise multiple updates can have race conditions
310 // and not be applied correctly as seen i:
311 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
312 image->changeLayoutWithStages(
313 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
314 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
315
Jamie Madill20fa8d52018-04-15 10:09:32 -0400316 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
317 image->getCurrentLayout(), 1, &update.copyRegion);
318 }
319
Luc Ferron2f3f4142018-05-30 08:27:19 -0400320 // Only remove the updates that were actually applied to the image.
321 mSubresourceUpdates = std::move(updatesToKeep);
322
323 if (mSubresourceUpdates.empty())
324 {
325 mStagingBuffer.releaseRetainedBuffers(renderer);
326 }
327 else
328 {
329 WARN() << "Internal Vulkan bufffer could not be released. This is likely due to having "
330 "extra images defined in the Texture.";
331 }
Jamie Madill20fa8d52018-04-15 10:09:32 -0400332
Jamie Madill26084d02018-04-09 13:44:04 -0400333 return vk::NoError();
334}
335
Luc Ferron10434f62018-04-24 10:06:37 -0400336bool PixelBuffer::empty() const
337{
338 return mSubresourceUpdates.empty();
339}
340
Jamie Madilleebe2192018-07-11 09:01:18 -0400341vk::Error PixelBuffer::stageSubresourceUpdateAndGetData(RendererVk *renderer,
Luc Ferronc5181702018-05-17 09:44:42 -0400342 size_t allocationSize,
343 const gl::ImageIndex &imageIndex,
344 const gl::Extents &extents,
345 const gl::Offset &offset,
346 uint8_t **destData)
347{
348 VkBuffer bufferHandle;
349 uint32_t stagingOffset = 0;
350 bool newBufferAllocated = false;
351 ANGLE_TRY(mStagingBuffer.allocate(renderer, allocationSize, destData, &bufferHandle,
352 &stagingOffset, &newBufferAllocated));
353
354 VkBufferImageCopy copy;
355 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
356 copy.bufferRowLength = extents.width;
357 copy.bufferImageHeight = extents.height;
358 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
359 copy.imageSubresource.mipLevel = imageIndex.getLevelIndex();
360 copy.imageSubresource.baseArrayLayer = imageIndex.hasLayer() ? imageIndex.getLayerIndex() : 0;
361 copy.imageSubresource.layerCount = imageIndex.getLayerCount();
362
363 gl_vk::GetOffset(offset, &copy.imageOffset);
364 gl_vk::GetExtent(extents, &copy.imageExtent);
365
366 mSubresourceUpdates.emplace_back(bufferHandle, copy);
367
Jamie Madilleebe2192018-07-11 09:01:18 -0400368 return vk::NoError();
Luc Ferronc5181702018-05-17 09:44:42 -0400369}
370
Jamie Madilleebe2192018-07-11 09:01:18 -0400371vk::Error TextureVk::generateMipmapLevelsWithCPU(ContextVk *contextVk,
Luc Ferron05cd6df2018-05-24 15:51:29 -0400372 const angle::Format &sourceFormat,
373 GLuint layer,
374 GLuint firstMipLevel,
375 GLuint maxMipLevel,
376 const size_t sourceWidth,
377 const size_t sourceHeight,
378 const size_t sourceRowPitch,
379 uint8_t *sourceData)
Luc Ferronc5181702018-05-17 09:44:42 -0400380{
381 RendererVk *renderer = contextVk->getRenderer();
382
383 size_t previousLevelWidth = sourceWidth;
384 size_t previousLevelHeight = sourceHeight;
385 uint8_t *previousLevelData = sourceData;
386 size_t previousLevelRowPitch = sourceRowPitch;
387
388 for (GLuint currentMipLevel = firstMipLevel; currentMipLevel <= maxMipLevel; currentMipLevel++)
389 {
390 // Compute next level width and height.
391 size_t mipWidth = std::max<size_t>(1, previousLevelWidth >> 1);
392 size_t mipHeight = std::max<size_t>(1, previousLevelHeight >> 1);
393
394 // With the width and height of the next mip, we can allocate the next buffer we need.
395 uint8_t *destData = nullptr;
396 size_t destRowPitch = mipWidth * sourceFormat.pixelBytes;
397
398 size_t mipAllocationSize = destRowPitch * mipHeight;
399 gl::Extents mipLevelExtents(static_cast<int>(mipWidth), static_cast<int>(mipHeight), 1);
400
401 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateAndGetData(
402 renderer, mipAllocationSize,
Luc Ferron22695bf2018-05-22 15:52:08 -0400403 gl::ImageIndex::MakeFromType(mState.getType(), currentMipLevel, layer), mipLevelExtents,
Luc Ferronc5181702018-05-17 09:44:42 -0400404 gl::Offset(), &destData));
405
406 // Generate the mipmap into that new buffer
407 sourceFormat.mipGenerationFunction(previousLevelWidth, previousLevelHeight, 1,
408 previousLevelData, previousLevelRowPitch, 0, destData,
409 destRowPitch, 0);
410
411 // Swap for the next iteration
412 previousLevelWidth = mipWidth;
413 previousLevelHeight = mipHeight;
414 previousLevelData = destData;
415 previousLevelRowPitch = destRowPitch;
416 }
417
Jamie Madilleebe2192018-07-11 09:01:18 -0400418 return vk::NoError();
Luc Ferronc5181702018-05-17 09:44:42 -0400419}
420
Jamie Madilla7be1f72018-04-13 15:16:26 -0400421PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400422{
423}
424
Jamie Madilla7be1f72018-04-13 15:16:26 -0400425PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
426 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400427 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
428{
429}
430
Jamie Madilla7be1f72018-04-13 15:16:26 -0400431PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400432
Jamie Madill26084d02018-04-09 13:44:04 -0400433// TextureVk implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400434TextureVk::TextureVk(const gl::TextureState &state, RendererVk *renderer)
Jamie Madillbcf467f2018-05-23 09:46:00 -0400435 : TextureImpl(state), mRenderTarget(&mImage, &mBaseLevelImageView, this), mPixelBuffer(renderer)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400436{
437}
438
439TextureVk::~TextureVk()
440{
441}
442
Jamie Madill035fd6b2017-10-03 15:43:22 -0400443gl::Error TextureVk::onDestroy(const gl::Context *context)
444{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400445 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400446 RendererVk *renderer = contextVk->getRenderer();
447
Jamie Madillc4f27e42018-03-31 14:19:18 -0400448 releaseImage(context, renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400449 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400450
Jamie Madilla7be1f72018-04-13 15:16:26 -0400451 mPixelBuffer.release(renderer);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400452 return gl::NoError();
453}
454
Jamie Madillc564c072017-06-01 12:45:42 -0400455gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400456 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400457 GLenum internalFormat,
458 const gl::Extents &size,
459 GLenum format,
460 GLenum type,
461 const gl::PixelUnpackState &unpack,
462 const uint8_t *pixels)
463{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400464 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400465 RendererVk *renderer = contextVk->getRenderer();
466
Luc Ferron2f3f4142018-05-30 08:27:19 -0400467 // If there is any staged changes for this index, we can remove them since we're going to
468 // override them with this call.
469 mPixelBuffer.removeStagedUpdates(index);
470
Jamie Madillc4f27e42018-03-31 14:19:18 -0400471 // Convert internalFormat to sized internal format.
472 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400473
Jamie Madill1b038242017-11-01 15:14:36 -0400474 if (mImage.valid())
475 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400476 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400477
478 // Calculate the expected size for the index we are defining. If the size is different from
479 // the given size, or the format is different, we are redefining the image so we must
480 // release it.
481 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400482 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400483 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400484 }
485 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400486
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500487 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400488 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500489 {
490 return gl::NoError();
491 }
492
Jamie Madill26084d02018-04-09 13:44:04 -0400493 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400494 onResourceChanged(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400495
Jamie Madill035fd6b2017-10-03 15:43:22 -0400496 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400497 if (pixels)
498 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400499 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
500 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400501 }
502
503 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400504}
505
Jamie Madillc564c072017-06-01 12:45:42 -0400506gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400507 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400508 const gl::Box &area,
509 GLenum format,
510 GLenum type,
511 const gl::PixelUnpackState &unpack,
512 const uint8_t *pixels)
513{
Jamie Madill5b18f482017-11-30 17:24:22 -0500514 ContextVk *contextVk = vk::GetImpl(context);
515 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400516 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
517 contextVk, index, gl::Extents(area.width, area.height, area.depth),
518 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400519
520 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400521 onResourceChanged(contextVk->getRenderer());
Jamie Madillb2214862018-04-26 07:25:48 -0400522
Jamie Madill5b18f482017-11-30 17:24:22 -0500523 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400524}
525
Jamie Madillc564c072017-06-01 12:45:42 -0400526gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400527 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400528 GLenum internalFormat,
529 const gl::Extents &size,
530 const gl::PixelUnpackState &unpack,
531 size_t imageSize,
532 const uint8_t *pixels)
533{
534 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500535 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400536}
537
Jamie Madillc564c072017-06-01 12:45:42 -0400538gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400539 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400540 const gl::Box &area,
541 GLenum format,
542 const gl::PixelUnpackState &unpack,
543 size_t imageSize,
544 const uint8_t *pixels)
545{
546 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500547 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400548}
549
Jamie Madillc564c072017-06-01 12:45:42 -0400550gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400551 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400552 const gl::Rectangle &sourceArea,
553 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400554 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400555{
Luc Ferronf299a372018-05-14 14:44:54 -0400556 gl::Extents newImageSize(sourceArea.width, sourceArea.height, 1);
557 const gl::InternalFormat &internalFormatInfo =
558 gl::GetInternalFormatInfo(internalFormat, GL_UNSIGNED_BYTE);
559 ANGLE_TRY(setImage(context, index, internalFormat, newImageSize, internalFormatInfo.format,
560 internalFormatInfo.type, gl::PixelUnpackState(), nullptr));
561 return copySubImageImpl(context, index, gl::Offset(0, 0, 0), sourceArea, internalFormatInfo,
562 source);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400563}
564
Jamie Madillc564c072017-06-01 12:45:42 -0400565gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400566 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400567 const gl::Offset &destOffset,
568 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400569 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400570{
Luc Ferronf299a372018-05-14 14:44:54 -0400571 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
572 return copySubImageImpl(context, index, destOffset, sourceArea, currentFormat, source);
573}
574
575gl::Error TextureVk::copySubImageImpl(const gl::Context *context,
576 const gl::ImageIndex &index,
577 const gl::Offset &destOffset,
578 const gl::Rectangle &sourceArea,
579 const gl::InternalFormat &internalFormat,
580 gl::Framebuffer *source)
581{
Luc Ferron018709f2018-05-10 13:53:11 -0400582 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
583 gl::Rectangle clippedSourceArea;
584 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
585 &clippedSourceArea))
586 {
587 return gl::NoError();
588 }
589
590 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
591 destOffset.y + sourceArea.y - sourceArea.y, 0);
592
593 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill316c6062018-05-29 10:49:45 -0400594 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronf299a372018-05-14 14:44:54 -0400595 FramebufferVk *framebufferVk = vk::GetImpl(source);
Luc Ferron018709f2018-05-10 13:53:11 -0400596
597 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
598 // change to the pixel buffer.
599 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
600 // when its supported.
Jamie Madill58675012018-05-22 14:54:07 -0400601 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromFramebuffer(
Luc Ferron018709f2018-05-10 13:53:11 -0400602 context, index, clippedSourceArea, modifiedDestOffset,
Luc Ferronf299a372018-05-14 14:44:54 -0400603 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), internalFormat,
Jamie Madill58675012018-05-22 14:54:07 -0400604 framebufferVk));
Luc Ferron018709f2018-05-10 13:53:11 -0400605
Jamie Madill316c6062018-05-29 10:49:45 -0400606 onResourceChanged(renderer);
607 framebufferVk->addReadDependency(this);
Luc Ferron018709f2018-05-10 13:53:11 -0400608 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400609}
610
Luc Ferronfa7503c2018-05-08 11:25:06 -0400611vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
Jamie Madill316c6062018-05-29 10:49:45 -0400612 vk::CommandBuffer **commandBufferOut)
Luc Ferronfa7503c2018-05-08 11:25:06 -0400613{
Luc Ferronc5181702018-05-17 09:44:42 -0400614 ANGLE_TRY(appendWriteResource(renderer, commandBufferOut));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400615 return vk::NoError();
616}
617
Jamie Madillc564c072017-06-01 12:45:42 -0400618gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500619 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400620 size_t levels,
621 GLenum internalFormat,
622 const gl::Extents &size)
623{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400624 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
625 RendererVk *renderer = contextVk->getRenderer();
626 const vk::Format &format = renderer->getFormat(internalFormat);
627 vk::CommandBuffer *commandBuffer = nullptr;
628 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Luc Ferronf6e160f2018-06-12 10:13:57 -0400629 ANGLE_TRY(initImage(contextVk, format, size, static_cast<uint32_t>(levels), commandBuffer));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400630 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400631}
632
Corentin Wallez99d492c2018-02-27 15:17:10 -0500633gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
634 gl::TextureType type,
635 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400636{
637 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500638 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400639}
640
Jamie Madill4928b7c2017-06-20 12:57:39 -0400641gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500642 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400643 egl::Stream *stream,
644 const egl::Stream::GLTextureDescription &desc)
645{
646 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500647 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400648}
649
Jamie Madilleebe2192018-07-11 09:01:18 -0400650vk::Error TextureVk::generateMipmapWithBlit(RendererVk *renderer)
Luc Ferron05cd6df2018-05-24 15:51:29 -0400651{
652 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
653 const gl::Extents baseLevelExtents = mImage.getExtents();
654 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madilleebe2192018-07-11 09:01:18 -0400655 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Luc Ferron05cd6df2018-05-24 15:51:29 -0400656
657 // We are able to use blitImage since the image format we are using supports it. This
658 // is a faster way we can generate the mips.
659 int32_t mipWidth = baseLevelExtents.width;
660 int32_t mipHeight = baseLevelExtents.height;
661
662 // Manually manage the image memory barrier because it uses a lot more parameters than our
663 // usual one.
664 VkImageMemoryBarrier barrier;
665 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
666 barrier.image = mImage.getImage().getHandle();
667 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
668 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
669 barrier.pNext = nullptr;
670 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
671 barrier.subresourceRange.baseArrayLayer = 0;
672 barrier.subresourceRange.layerCount = imageLayerCount;
673 barrier.subresourceRange.levelCount = 1;
674
675 for (uint32_t mipLevel = 1; mipLevel <= mState.getMipmapMaxLevel(); mipLevel++)
676 {
677 int32_t nextMipWidth = std::max<int32_t>(1, mipWidth >> 1);
678 int32_t nextMipHeight = std::max<int32_t>(1, mipHeight >> 1);
679
680 barrier.subresourceRange.baseMipLevel = mipLevel - 1;
681 barrier.oldLayout = mImage.getCurrentLayout();
682 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
683 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
684 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
685
686 // We can do it for all layers at once.
687 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
688 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
689
690 VkImageBlit blit = {};
691 blit.srcOffsets[0] = {0, 0, 0};
692 blit.srcOffsets[1] = {mipWidth, mipHeight, 1};
693 blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
694 blit.srcSubresource.mipLevel = mipLevel - 1;
695 blit.srcSubresource.baseArrayLayer = 0;
696 blit.srcSubresource.layerCount = imageLayerCount;
697 blit.dstOffsets[0] = {0, 0, 0};
698 blit.dstOffsets[1] = {nextMipWidth, nextMipHeight, 1};
699 blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
700 blit.dstSubresource.mipLevel = mipLevel;
701 blit.dstSubresource.baseArrayLayer = 0;
702 blit.dstSubresource.layerCount = imageLayerCount;
703
704 mipWidth = nextMipWidth;
705 mipHeight = nextMipHeight;
706
707 commandBuffer->blitImage(mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
708 mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit,
709 VK_FILTER_LINEAR);
710 }
711
712 // Transition the last mip level to the same layout as all the other ones, so we can declare
713 // our whole image layout to be SRC_OPTIMAL.
714 barrier.subresourceRange.baseMipLevel = mState.getMipmapMaxLevel();
715 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
716 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
717
718 // We can do it for all layers at once.
719 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
720 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
721
722 // This is just changing the internal state of the image helper so that the next call
723 // to changeLayoutWithStages will use this layout as the "oldLayout" argument.
724 mImage.updateLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
Jamie Madilleebe2192018-07-11 09:01:18 -0400725
726 return vk::NoError();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400727}
728
Jamie Madilleebe2192018-07-11 09:01:18 -0400729vk::Error TextureVk::generateMipmapWithCPU(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400730{
Luc Ferron22695bf2018-05-22 15:52:08 -0400731 ContextVk *contextVk = vk::GetImpl(context);
732 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronc5181702018-05-17 09:44:42 -0400733
Luc Ferronc5181702018-05-17 09:44:42 -0400734 bool newBufferAllocated = false;
Luc Ferronc5181702018-05-17 09:44:42 -0400735 const gl::Extents baseLevelExtents = mImage.getExtents();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400736 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
737 const angle::Format &angleFormat = mImage.getFormat().textureFormat();
Luc Ferronc5181702018-05-17 09:44:42 -0400738 GLuint sourceRowPitch = baseLevelExtents.width * angleFormat.pixelBytes;
739 size_t baseLevelAllocationSize = sourceRowPitch * baseLevelExtents.height;
740
Luc Ferron22695bf2018-05-22 15:52:08 -0400741 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madilleebe2192018-07-11 09:01:18 -0400742 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Luc Ferronc5181702018-05-17 09:44:42 -0400743
Luc Ferron22695bf2018-05-22 15:52:08 -0400744 // Requirement of the copyImageToBuffer, the source image must be in SRC_OPTIMAL layout.
745 mImage.changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
746 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
747 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
748
749 size_t totalAllocationSize = baseLevelAllocationSize * imageLayerCount;
750
751 VkBuffer copyBufferHandle;
752 uint8_t *baseLevelBuffers;
753 uint32_t copyBaseOffset;
754
755 // Allocate enough memory to copy every level 0 image (one for each layer of the texture).
756 ANGLE_TRY(mPixelBuffer.allocate(renderer, totalAllocationSize, &baseLevelBuffers,
757 &copyBufferHandle, &copyBaseOffset, &newBufferAllocated));
758
759 // Do only one copy for all layers at once.
Luc Ferronc5181702018-05-17 09:44:42 -0400760 VkBufferImageCopy region;
761 region.bufferImageHeight = baseLevelExtents.height;
Luc Ferron22695bf2018-05-22 15:52:08 -0400762 region.bufferOffset = static_cast<VkDeviceSize>(copyBaseOffset);
Luc Ferronc5181702018-05-17 09:44:42 -0400763 region.bufferRowLength = baseLevelExtents.width;
764 region.imageExtent.width = baseLevelExtents.width;
765 region.imageExtent.height = baseLevelExtents.height;
766 region.imageExtent.depth = 1;
767 region.imageOffset.x = 0;
768 region.imageOffset.y = 0;
769 region.imageOffset.z = 0;
770 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
771 region.imageSubresource.baseArrayLayer = 0;
Luc Ferron22695bf2018-05-22 15:52:08 -0400772 region.imageSubresource.layerCount = imageLayerCount;
Luc Ferronc5181702018-05-17 09:44:42 -0400773 region.imageSubresource.mipLevel = mState.getEffectiveBaseLevel();
774
Luc Ferron22695bf2018-05-22 15:52:08 -0400775 commandBuffer->copyImageToBuffer(mImage.getImage(), mImage.getCurrentLayout(), copyBufferHandle,
776 1, &region);
Luc Ferronc5181702018-05-17 09:44:42 -0400777
778 ANGLE_TRY(renderer->finish(context));
779
Luc Ferron2f3f4142018-05-30 08:27:19 -0400780 const uint32_t levelCount = getLevelCount();
781
Luc Ferronc5181702018-05-17 09:44:42 -0400782 // We now have the base level available to be manipulated in the baseLevelBuffer pointer.
783 // Generate all the missing mipmaps with the slow path. We can optimize with vkCmdBlitImage
784 // later.
Luc Ferron22695bf2018-05-22 15:52:08 -0400785 // For each layer, use the copied data to generate all the mips.
786 for (GLuint layer = 0; layer < imageLayerCount; layer++)
787 {
788 size_t bufferOffset = layer * baseLevelAllocationSize;
Luc Ferron05cd6df2018-05-24 15:51:29 -0400789
790 ANGLE_TRY(generateMipmapLevelsWithCPU(
Luc Ferron22695bf2018-05-22 15:52:08 -0400791 contextVk, angleFormat, layer, mState.getEffectiveBaseLevel() + 1,
792 mState.getMipmapMaxLevel(), baseLevelExtents.width, baseLevelExtents.height,
793 sourceRowPitch, baseLevelBuffers + bufferOffset));
794 }
Luc Ferronc5181702018-05-17 09:44:42 -0400795
Jamie Madilleebe2192018-07-11 09:01:18 -0400796 return mPixelBuffer.flushUpdatesToImage(renderer, levelCount, &mImage, commandBuffer);
Luc Ferron05cd6df2018-05-24 15:51:29 -0400797}
798
799gl::Error TextureVk::generateMipmap(const gl::Context *context)
800{
801 ContextVk *contextVk = vk::GetImpl(context);
Luc Ferron05cd6df2018-05-24 15:51:29 -0400802
803 // Some data is pending, or the image has not been defined at all yet
804 if (!mImage.valid())
805 {
806 // lets initialize the image so we can generate the next levels.
807 if (!mPixelBuffer.empty())
808 {
Luc Ferronf6e160f2018-06-12 10:13:57 -0400809 ANGLE_TRY(ensureImageInitialized(contextVk));
Luc Ferron05cd6df2018-05-24 15:51:29 -0400810 ASSERT(mImage.valid());
811 }
812 else
813 {
814 // There is nothing to generate if there is nothing uploaded so far.
815 return gl::NoError();
816 }
817 }
818
Luc Ferronf6e160f2018-06-12 10:13:57 -0400819 RendererVk *renderer = contextVk->getRenderer();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400820 VkFormatProperties imageProperties;
821 vk::GetFormatProperties(renderer->getPhysicalDevice(), mImage.getFormat().vkTextureFormat,
822 &imageProperties);
823
824 // Check if the image supports blit. If it does, we can do the mipmap generation on the gpu
825 // only.
826 if (IsMaskFlagSet(kBlitFeatureFlags, imageProperties.linearTilingFeatures))
827 {
Jamie Madilleebe2192018-07-11 09:01:18 -0400828 ANGLE_TRY(generateMipmapWithBlit(renderer));
Luc Ferron05cd6df2018-05-24 15:51:29 -0400829 }
830 else
831 {
832 ANGLE_TRY(generateMipmapWithCPU(context));
833 }
834
835 // We're changing this textureVk content, make sure we let the graph know.
836 onResourceChanged(renderer);
837
Luc Ferronc5181702018-05-17 09:44:42 -0400838 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400839}
840
Jamie Madill4928b7c2017-06-20 12:57:39 -0400841gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400842{
843 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400844 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400845}
846
Jamie Madill4928b7c2017-06-20 12:57:39 -0400847gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400848{
849 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400850 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400851}
852
Jamie Madill4928b7c2017-06-20 12:57:39 -0400853gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400854{
855 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400856 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400857}
858
Jamie Madill4928b7c2017-06-20 12:57:39 -0400859gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
860 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400861 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400862 FramebufferAttachmentRenderTarget **rtOut)
863{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400864 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400865 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400866
867 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400868 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400869
870 ContextVk *contextVk = vk::GetImpl(context);
Luc Ferronf6e160f2018-06-12 10:13:57 -0400871 ANGLE_TRY(ensureImageInitialized(contextVk));
Jamie Madill26084d02018-04-09 13:44:04 -0400872
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400873 *rtOut = &mRenderTarget;
874 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400875}
876
Luc Ferronf6e160f2018-06-12 10:13:57 -0400877vk::Error TextureVk::ensureImageInitialized(ContextVk *contextVk)
Jamie Madill26084d02018-04-09 13:44:04 -0400878{
Luc Ferron10434f62018-04-24 10:06:37 -0400879 if (mImage.valid() && mPixelBuffer.empty())
880 {
881 return vk::NoError();
882 }
Luc Ferronf6e160f2018-06-12 10:13:57 -0400883 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill26084d02018-04-09 13:44:04 -0400884 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400885 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400886
Luc Ferron2f3f4142018-05-30 08:27:19 -0400887 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
888 const gl::Extents &baseLevelExtents = baseLevelDesc.size;
889 const uint32_t levelCount = getLevelCount();
890
Jamie Madill26084d02018-04-09 13:44:04 -0400891 if (!mImage.valid())
892 {
Jamie Madill26084d02018-04-09 13:44:04 -0400893 const vk::Format &format =
894 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Jamie Madill26084d02018-04-09 13:44:04 -0400895
Luc Ferronf6e160f2018-06-12 10:13:57 -0400896 ANGLE_TRY(initImage(contextVk, format, baseLevelExtents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400897 }
898
Luc Ferron2f3f4142018-05-30 08:27:19 -0400899 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, levelCount, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400900 return vk::NoError();
901}
902
Luc Ferron4bba74f2018-04-19 14:40:45 -0400903gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700904{
Luc Ferron20610902018-04-19 14:41:13 -0400905 if (dirtyBits.none() && mSampler.valid())
906 {
907 return gl::NoError();
908 }
909
910 ContextVk *contextVk = vk::GetImpl(context);
911 if (mSampler.valid())
912 {
913 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc57ee252018-05-30 19:53:48 -0400914 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Luc Ferron20610902018-04-19 14:41:13 -0400915 }
916
917 const gl::SamplerState &samplerState = mState.getSamplerState();
918
919 // Create a simple sampler. Force basic parameter settings.
920 VkSamplerCreateInfo samplerInfo;
921 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
922 samplerInfo.pNext = nullptr;
923 samplerInfo.flags = 0;
924 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
925 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400926 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400927 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
928 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
929 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
930 samplerInfo.mipLodBias = 0.0f;
931 samplerInfo.anisotropyEnable = VK_FALSE;
932 samplerInfo.maxAnisotropy = 1.0f;
933 samplerInfo.compareEnable = VK_FALSE;
934 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400935 samplerInfo.minLod = samplerState.minLod;
936 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400937 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
938 samplerInfo.unnormalizedCoordinates = VK_FALSE;
939
940 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400941 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700942}
943
Jamie Madillc564c072017-06-01 12:45:42 -0400944gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500945 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800946 GLsizei samples,
947 GLint internalformat,
948 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500949 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800950{
951 UNIMPLEMENTED();
952 return gl::InternalError() << "setStorageMultisample is unimplemented.";
953}
954
Jamie Madill05b35b22017-10-03 09:01:44 -0400955gl::Error TextureVk::initializeContents(const gl::Context *context,
956 const gl::ImageIndex &imageIndex)
957{
958 UNIMPLEMENTED();
959 return gl::NoError();
960}
961
Jamie Madill858c1cc2018-03-31 14:19:13 -0400962const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400963{
964 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400965 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400966}
967
968const vk::ImageView &TextureVk::getImageView() const
969{
Jamie Madill93edca12018-03-30 10:43:18 -0400970 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400971
972 const GLenum minFilter = mState.getSamplerState().minFilter;
973 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
974 {
975 return mBaseLevelImageView;
976 }
977
978 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400979}
980
981const vk::Sampler &TextureVk::getSampler() const
982{
983 ASSERT(mSampler.valid());
984 return mSampler;
985}
986
Luc Ferronf6e160f2018-06-12 10:13:57 -0400987vk::Error TextureVk::initImage(ContextVk *contextVk,
Luc Ferronfa7503c2018-05-08 11:25:06 -0400988 const vk::Format &format,
989 const gl::Extents &extents,
990 const uint32_t levelCount,
991 vk::CommandBuffer *commandBuffer)
992{
Luc Ferronf6e160f2018-06-12 10:13:57 -0400993 const RendererVk *renderer = contextVk->getRenderer();
Luc Ferronfa7503c2018-05-08 11:25:06 -0400994 const VkDevice device = renderer->getDevice();
995
996 const VkImageUsageFlags usage =
997 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
998 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
999
1000 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
1001
1002 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1003
1004 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
1005
1006 gl::SwizzleState mappedSwizzle;
1007 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
1008
Luc Ferronf6e160f2018-06-12 10:13:57 -04001009 // Renderable textures cannot have a swizzle.
1010 ASSERT(!contextVk->getTextureCaps().get(format.internalFormat).textureAttachment ||
1011 !mappedSwizzle.swizzleRequired());
1012
Luc Ferronfa7503c2018-05-08 11:25:06 -04001013 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
1014 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
1015 mappedSwizzle, &mMipmapImageView, levelCount));
1016 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
1017 mappedSwizzle, &mBaseLevelImageView, 1));
1018
1019 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -04001020 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronc20b9502018-05-24 09:30:17 -04001021 mImage.clearColor(black, 0, levelCount, commandBuffer);
Luc Ferronfa7503c2018-05-08 11:25:06 -04001022 return vk::NoError();
1023}
1024
Jamie Madillc4f27e42018-03-31 14:19:18 -04001025void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
1026{
1027 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -04001028 renderer->releaseObject(getStoredQueueSerial(), &mBaseLevelImageView);
1029 renderer->releaseObject(getStoredQueueSerial(), &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -04001030 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
1031}
1032
Luc Ferron66410532018-04-20 12:47:45 -04001033uint32_t TextureVk::getLevelCount() const
1034{
1035 ASSERT(mState.getEffectiveBaseLevel() == 0);
1036
1037 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
1038 return mState.getMipmapMaxLevel() + 1;
1039}
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001040} // namespace rx