blob: 571ccd7831d944ed1e9222d4af41da70d0cddad2 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Luc Ferronc5181702018-05-17 09:44:42 -040013#include "image_util/generatemip.inl"
Jamie Madill035fd6b2017-10-03 15:43:22 -040014#include "libANGLE/Context.h"
15#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040016#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040017#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050018#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
Luc Ferron5164b792018-03-06 09:10:12 -050022namespace
23{
Jamie Madill93edca12018-03-30 10:43:18 -040024void MapSwizzleState(GLenum internalFormat,
25 const gl::SwizzleState &swizzleState,
26 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050027{
28 switch (internalFormat)
29 {
Jamie Madill26084d02018-04-09 13:44:04 -040030 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040031 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
34 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050035 break;
Jamie Madill26084d02018-04-09 13:44:04 -040036 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040037 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
40 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050041 break;
Jamie Madill26084d02018-04-09 13:44:04 -040042 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040043 swizzleStateOut->swizzleRed = GL_ZERO;
44 swizzleStateOut->swizzleGreen = GL_ZERO;
45 swizzleStateOut->swizzleBlue = GL_ZERO;
46 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040047 break;
Luc Ferron7348fc52018-05-09 07:17:16 -040048 case GL_RGB8:
49 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
50 swizzleStateOut->swizzleGreen = swizzleState.swizzleGreen;
51 swizzleStateOut->swizzleBlue = swizzleState.swizzleBlue;
52 swizzleStateOut->swizzleAlpha = GL_ONE;
53 break;
Luc Ferron5164b792018-03-06 09:10:12 -050054 default:
Jamie Madill93edca12018-03-30 10:43:18 -040055 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050056 break;
57 }
58}
Jamie Madill26084d02018-04-09 13:44:04 -040059
60constexpr VkBufferUsageFlags kStagingBufferFlags =
61 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
62constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron05cd6df2018-05-24 15:51:29 -040063
64constexpr VkFormatFeatureFlags kBlitFeatureFlags =
65 VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT;
Luc Ferron5164b792018-03-06 09:10:12 -050066} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040067
Jamie Madill26084d02018-04-09 13:44:04 -040068// StagingStorage implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -040069PixelBuffer::PixelBuffer(RendererVk *renderer)
70 : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040071{
Jamie Madill20fa8d52018-04-15 10:09:32 -040072 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
73 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
Luc Ferrona9ab0f32018-05-17 17:03:55 -040074 mStagingBuffer.init(4, renderer);
Jamie Madill26084d02018-04-09 13:44:04 -040075}
76
Jamie Madilla7be1f72018-04-13 15:16:26 -040077PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040078{
79}
80
Jamie Madilla7be1f72018-04-13 15:16:26 -040081void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040082{
83 mStagingBuffer.release(renderer);
84}
85
Luc Ferron2f3f4142018-05-30 08:27:19 -040086void PixelBuffer::removeStagedUpdates(const gl::ImageIndex &index)
87{
88 // Find any staged updates for this index and removes them from the pending list.
89 uint32_t levelIndex = static_cast<uint32_t>(index.getLevelIndex());
90 uint32_t layerIndex = static_cast<uint32_t>(index.getLayerIndex());
91 auto removeIfStatement = [levelIndex, layerIndex](SubresourceUpdate &update) {
92 return update.copyRegion.imageSubresource.mipLevel == levelIndex &&
93 update.copyRegion.imageSubresource.baseArrayLayer == layerIndex;
94 };
95 mSubresourceUpdates.erase(
96 std::remove_if(mSubresourceUpdates.begin(), mSubresourceUpdates.end(), removeIfStatement),
97 mSubresourceUpdates.end());
98}
99
Jamie Madilla7be1f72018-04-13 15:16:26 -0400100gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
101 const gl::ImageIndex &index,
102 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -0400103 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -0400104 const gl::InternalFormat &formatInfo,
105 const gl::PixelUnpackState &unpack,
106 GLenum type,
107 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -0400108{
109 GLuint inputRowPitch = 0;
110 ANGLE_TRY_RESULT(
111 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
112 inputRowPitch);
113
114 GLuint inputDepthPitch = 0;
115 ANGLE_TRY_RESULT(
116 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
117 inputDepthPitch);
118
119 // TODO(jmadill): skip images for 3D Textures.
120 bool applySkipImages = false;
121
122 GLuint inputSkipBytes = 0;
123 ANGLE_TRY_RESULT(
Jeff Gilbert31d3deb2018-05-18 18:32:16 -0700124 formatInfo.computeSkipBytes(type, inputRowPitch, inputDepthPitch, unpack, applySkipImages),
Jamie Madill26084d02018-04-09 13:44:04 -0400125 inputSkipBytes);
126
127 RendererVk *renderer = contextVk->getRenderer();
128
129 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
130 const angle::Format &storageFormat = vkFormat.textureFormat();
131
132 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
133 size_t outputDepthPitch = outputRowPitch * extents.height;
134
Jamie Madill20fa8d52018-04-15 10:09:32 -0400135 VkBuffer bufferHandle = VK_NULL_HANDLE;
136
Jamie Madill26084d02018-04-09 13:44:04 -0400137 uint8_t *stagingPointer = nullptr;
138 bool newBufferAllocated = false;
139 uint32_t stagingOffset = 0;
140 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400141 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400142 &stagingOffset, &newBufferAllocated);
143
144 const uint8_t *source = pixels + inputSkipBytes;
145
146 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
147
148 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
149 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
150
Jamie Madill20fa8d52018-04-15 10:09:32 -0400151 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400152
Jamie Madill20fa8d52018-04-15 10:09:32 -0400153 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
154 copy.bufferRowLength = extents.width;
155 copy.bufferImageHeight = extents.height;
156 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
157 copy.imageSubresource.mipLevel = index.getLevelIndex();
158 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
159 copy.imageSubresource.layerCount = index.getLayerCount();
160
Luc Ferron33e05ba2018-04-23 15:12:34 -0400161 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400162 gl_vk::GetExtent(extents, &copy.imageExtent);
163
164 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400165
166 return gl::NoError();
167}
168
Jamie Madill58675012018-05-22 14:54:07 -0400169gl::Error PixelBuffer::stageSubresourceUpdateFromFramebuffer(const gl::Context *context,
170 const gl::ImageIndex &index,
171 const gl::Rectangle &sourceArea,
172 const gl::Offset &dstOffset,
173 const gl::Extents &dstExtent,
174 const gl::InternalFormat &formatInfo,
175 FramebufferVk *framebufferVk)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400176{
177 // If the extents and offset is outside the source image, we need to clip.
178 gl::Rectangle clippedRectangle;
Jamie Madill58675012018-05-22 14:54:07 -0400179 const gl::Extents readExtents = framebufferVk->getReadImageExtents();
180 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, readExtents.width, readExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400181 &clippedRectangle))
182 {
183 // Empty source area, nothing to do.
184 return gl::NoError();
185 }
186
187 // 1- obtain a buffer handle to copy to
Luc Ferron018709f2018-05-10 13:53:11 -0400188 RendererVk *renderer = GetImplAs<ContextVk>(context)->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400189
190 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
191 const angle::Format &storageFormat = vkFormat.textureFormat();
Luc Ferron018709f2018-05-10 13:53:11 -0400192 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400193
194 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
195 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
196
197 VkBuffer bufferHandle = VK_NULL_HANDLE;
198
199 uint8_t *stagingPointer = nullptr;
200 bool newBufferAllocated = false;
201 uint32_t stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400202
203 // The destination is only one layer deep.
204 size_t allocationSize = outputDepthPitch;
Luc Ferron2a849bf2018-05-10 13:19:11 -0400205 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
206 &stagingOffset, &newBufferAllocated);
207
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400208 PackPixelsParams params;
209 params.area = sourceArea;
210 params.format = formatInfo.internalFormat;
211 params.type = formatInfo.type;
212 params.outputPitch = static_cast<GLuint>(outputRowPitch);
213 params.packBuffer = nullptr;
214 params.pack = gl::PixelPackState();
215
Luc Ferron018709f2018-05-10 13:53:11 -0400216 // 2- copy the source image region to the pixel buffer using a cpu readback
217 if (loadFunction.requiresConversion)
218 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400219 // When a conversion is required, we need to use the loadFunction to read from a temporary
220 // buffer instead so its an even slower path.
221 size_t bufferSize = storageFormat.pixelBytes * sourceArea.width * sourceArea.height;
222 angle::MemoryBuffer *memoryBuffer = nullptr;
223 ANGLE_TRY(context->getScratchBuffer(bufferSize, &memoryBuffer));
224
225 // Read into the scratch buffer
Jamie Madill58675012018-05-22 14:54:07 -0400226 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, memoryBuffer->data()));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400227
228 // Load from scratch buffer to our pixel buffer
229 loadFunction.loadFunction(sourceArea.width, sourceArea.height, 1, memoryBuffer->data(),
230 outputRowPitch, 0, stagingPointer, outputRowPitch, 0);
Luc Ferron018709f2018-05-10 13:53:11 -0400231 }
232 else
233 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400234 // We read directly from the framebuffer into our pixel buffer.
Jamie Madill58675012018-05-22 14:54:07 -0400235 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, stagingPointer));
Luc Ferron018709f2018-05-10 13:53:11 -0400236 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400237
Luc Ferron018709f2018-05-10 13:53:11 -0400238 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400239 VkBufferImageCopy copyToImage;
240 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400241 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400242 copyToImage.bufferImageHeight = clippedRectangle.height;
243 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
244 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
245 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
246 copyToImage.imageSubresource.layerCount = index.getLayerCount();
247 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
248 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
249
250 // 3- enqueue the destination image subresource update
251 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
252 return gl::NoError();
253}
254
Luc Ferronc5181702018-05-17 09:44:42 -0400255gl::Error PixelBuffer::allocate(RendererVk *renderer,
256 size_t sizeInBytes,
257 uint8_t **ptrOut,
258 VkBuffer *handleOut,
259 uint32_t *offsetOut,
260 bool *newBufferAllocatedOut)
261{
262 return mStagingBuffer.allocate(renderer, sizeInBytes, ptrOut, handleOut, offsetOut,
263 newBufferAllocatedOut);
264}
265
Jamie Madilla7be1f72018-04-13 15:16:26 -0400266vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
Luc Ferron2f3f4142018-05-30 08:27:19 -0400267 uint32_t levelCount,
Jamie Madilla7be1f72018-04-13 15:16:26 -0400268 vk::ImageHelper *image,
269 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400270{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400271 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400272 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400273 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400274 }
275
Jamie Madill20fa8d52018-04-15 10:09:32 -0400276 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
277
Luc Ferron2f3f4142018-05-30 08:27:19 -0400278 std::vector<SubresourceUpdate> updatesToKeep;
279
Jamie Madill20fa8d52018-04-15 10:09:32 -0400280 for (const SubresourceUpdate &update : mSubresourceUpdates)
281 {
282 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400283
Luc Ferron2f3f4142018-05-30 08:27:19 -0400284 const uint32_t updateMipLevel = update.copyRegion.imageSubresource.mipLevel;
285 // It's possible we've accumulated updates that are no longer applicable if the image has
286 // never been flushed but the image description has changed. Check if this level exist for
287 // this image.
288 if (updateMipLevel >= levelCount)
289 {
290 updatesToKeep.emplace_back(update);
291 continue;
292 }
293
Luc Ferron1a186b12018-04-24 15:25:35 -0400294 // Conservatively flush all writes to the image. We could use a more restricted barrier.
295 // Do not move this above the for loop, otherwise multiple updates can have race conditions
296 // and not be applied correctly as seen i:
297 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
298 image->changeLayoutWithStages(
299 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
300 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
301
Jamie Madill20fa8d52018-04-15 10:09:32 -0400302 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
303 image->getCurrentLayout(), 1, &update.copyRegion);
304 }
305
Luc Ferron2f3f4142018-05-30 08:27:19 -0400306 // Only remove the updates that were actually applied to the image.
307 mSubresourceUpdates = std::move(updatesToKeep);
308
309 if (mSubresourceUpdates.empty())
310 {
311 mStagingBuffer.releaseRetainedBuffers(renderer);
312 }
313 else
314 {
315 WARN() << "Internal Vulkan bufffer could not be released. This is likely due to having "
316 "extra images defined in the Texture.";
317 }
Jamie Madill20fa8d52018-04-15 10:09:32 -0400318
Jamie Madill26084d02018-04-09 13:44:04 -0400319 return vk::NoError();
320}
321
Luc Ferron10434f62018-04-24 10:06:37 -0400322bool PixelBuffer::empty() const
323{
324 return mSubresourceUpdates.empty();
325}
326
Luc Ferronc5181702018-05-17 09:44:42 -0400327gl::Error PixelBuffer::stageSubresourceUpdateAndGetData(RendererVk *renderer,
328 size_t allocationSize,
329 const gl::ImageIndex &imageIndex,
330 const gl::Extents &extents,
331 const gl::Offset &offset,
332 uint8_t **destData)
333{
334 VkBuffer bufferHandle;
335 uint32_t stagingOffset = 0;
336 bool newBufferAllocated = false;
337 ANGLE_TRY(mStagingBuffer.allocate(renderer, allocationSize, destData, &bufferHandle,
338 &stagingOffset, &newBufferAllocated));
339
340 VkBufferImageCopy copy;
341 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
342 copy.bufferRowLength = extents.width;
343 copy.bufferImageHeight = extents.height;
344 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
345 copy.imageSubresource.mipLevel = imageIndex.getLevelIndex();
346 copy.imageSubresource.baseArrayLayer = imageIndex.hasLayer() ? imageIndex.getLayerIndex() : 0;
347 copy.imageSubresource.layerCount = imageIndex.getLayerCount();
348
349 gl_vk::GetOffset(offset, &copy.imageOffset);
350 gl_vk::GetExtent(extents, &copy.imageExtent);
351
352 mSubresourceUpdates.emplace_back(bufferHandle, copy);
353
354 return gl::NoError();
355}
356
Luc Ferron05cd6df2018-05-24 15:51:29 -0400357gl::Error TextureVk::generateMipmapLevelsWithCPU(ContextVk *contextVk,
358 const angle::Format &sourceFormat,
359 GLuint layer,
360 GLuint firstMipLevel,
361 GLuint maxMipLevel,
362 const size_t sourceWidth,
363 const size_t sourceHeight,
364 const size_t sourceRowPitch,
365 uint8_t *sourceData)
Luc Ferronc5181702018-05-17 09:44:42 -0400366{
367 RendererVk *renderer = contextVk->getRenderer();
368
369 size_t previousLevelWidth = sourceWidth;
370 size_t previousLevelHeight = sourceHeight;
371 uint8_t *previousLevelData = sourceData;
372 size_t previousLevelRowPitch = sourceRowPitch;
373
374 for (GLuint currentMipLevel = firstMipLevel; currentMipLevel <= maxMipLevel; currentMipLevel++)
375 {
376 // Compute next level width and height.
377 size_t mipWidth = std::max<size_t>(1, previousLevelWidth >> 1);
378 size_t mipHeight = std::max<size_t>(1, previousLevelHeight >> 1);
379
380 // With the width and height of the next mip, we can allocate the next buffer we need.
381 uint8_t *destData = nullptr;
382 size_t destRowPitch = mipWidth * sourceFormat.pixelBytes;
383
384 size_t mipAllocationSize = destRowPitch * mipHeight;
385 gl::Extents mipLevelExtents(static_cast<int>(mipWidth), static_cast<int>(mipHeight), 1);
386
387 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateAndGetData(
388 renderer, mipAllocationSize,
Luc Ferron22695bf2018-05-22 15:52:08 -0400389 gl::ImageIndex::MakeFromType(mState.getType(), currentMipLevel, layer), mipLevelExtents,
Luc Ferronc5181702018-05-17 09:44:42 -0400390 gl::Offset(), &destData));
391
392 // Generate the mipmap into that new buffer
393 sourceFormat.mipGenerationFunction(previousLevelWidth, previousLevelHeight, 1,
394 previousLevelData, previousLevelRowPitch, 0, destData,
395 destRowPitch, 0);
396
397 // Swap for the next iteration
398 previousLevelWidth = mipWidth;
399 previousLevelHeight = mipHeight;
400 previousLevelData = destData;
401 previousLevelRowPitch = destRowPitch;
402 }
403
404 return gl::NoError();
405}
406
Jamie Madilla7be1f72018-04-13 15:16:26 -0400407PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400408{
409}
410
Jamie Madilla7be1f72018-04-13 15:16:26 -0400411PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
412 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400413 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
414{
415}
416
Jamie Madilla7be1f72018-04-13 15:16:26 -0400417PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400418
Jamie Madill26084d02018-04-09 13:44:04 -0400419// TextureVk implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400420TextureVk::TextureVk(const gl::TextureState &state, RendererVk *renderer)
Jamie Madillbcf467f2018-05-23 09:46:00 -0400421 : TextureImpl(state), mRenderTarget(&mImage, &mBaseLevelImageView, this), mPixelBuffer(renderer)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400422{
423}
424
425TextureVk::~TextureVk()
426{
427}
428
Jamie Madill035fd6b2017-10-03 15:43:22 -0400429gl::Error TextureVk::onDestroy(const gl::Context *context)
430{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400431 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400432 RendererVk *renderer = contextVk->getRenderer();
433
Jamie Madillc4f27e42018-03-31 14:19:18 -0400434 releaseImage(context, renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400435 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400436
Jamie Madilla7be1f72018-04-13 15:16:26 -0400437 mPixelBuffer.release(renderer);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400438 return gl::NoError();
439}
440
Jamie Madillc564c072017-06-01 12:45:42 -0400441gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400442 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400443 GLenum internalFormat,
444 const gl::Extents &size,
445 GLenum format,
446 GLenum type,
447 const gl::PixelUnpackState &unpack,
448 const uint8_t *pixels)
449{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400450 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400451 RendererVk *renderer = contextVk->getRenderer();
452
Luc Ferron2f3f4142018-05-30 08:27:19 -0400453 // If there is any staged changes for this index, we can remove them since we're going to
454 // override them with this call.
455 mPixelBuffer.removeStagedUpdates(index);
456
Jamie Madillc4f27e42018-03-31 14:19:18 -0400457 // Convert internalFormat to sized internal format.
458 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400459
Jamie Madill1b038242017-11-01 15:14:36 -0400460 if (mImage.valid())
461 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400462 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400463
464 // Calculate the expected size for the index we are defining. If the size is different from
465 // the given size, or the format is different, we are redefining the image so we must
466 // release it.
467 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400468 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400469 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400470 }
471 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400472
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500473 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400474 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500475 {
476 return gl::NoError();
477 }
478
Jamie Madill26084d02018-04-09 13:44:04 -0400479 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400480 onResourceChanged(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400481
Jamie Madill035fd6b2017-10-03 15:43:22 -0400482 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400483 if (pixels)
484 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400485 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
486 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400487 }
488
489 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400490}
491
Jamie Madillc564c072017-06-01 12:45:42 -0400492gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400493 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400494 const gl::Box &area,
495 GLenum format,
496 GLenum type,
497 const gl::PixelUnpackState &unpack,
498 const uint8_t *pixels)
499{
Jamie Madill5b18f482017-11-30 17:24:22 -0500500 ContextVk *contextVk = vk::GetImpl(context);
501 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400502 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
503 contextVk, index, gl::Extents(area.width, area.height, area.depth),
504 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400505
506 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400507 onResourceChanged(contextVk->getRenderer());
Jamie Madillb2214862018-04-26 07:25:48 -0400508
Jamie Madill5b18f482017-11-30 17:24:22 -0500509 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400510}
511
Jamie Madillc564c072017-06-01 12:45:42 -0400512gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400513 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400514 GLenum internalFormat,
515 const gl::Extents &size,
516 const gl::PixelUnpackState &unpack,
517 size_t imageSize,
518 const uint8_t *pixels)
519{
520 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500521 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400522}
523
Jamie Madillc564c072017-06-01 12:45:42 -0400524gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400525 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400526 const gl::Box &area,
527 GLenum format,
528 const gl::PixelUnpackState &unpack,
529 size_t imageSize,
530 const uint8_t *pixels)
531{
532 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500533 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400534}
535
Jamie Madillc564c072017-06-01 12:45:42 -0400536gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400537 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400538 const gl::Rectangle &sourceArea,
539 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400540 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400541{
Luc Ferronf299a372018-05-14 14:44:54 -0400542 gl::Extents newImageSize(sourceArea.width, sourceArea.height, 1);
543 const gl::InternalFormat &internalFormatInfo =
544 gl::GetInternalFormatInfo(internalFormat, GL_UNSIGNED_BYTE);
545 ANGLE_TRY(setImage(context, index, internalFormat, newImageSize, internalFormatInfo.format,
546 internalFormatInfo.type, gl::PixelUnpackState(), nullptr));
547 return copySubImageImpl(context, index, gl::Offset(0, 0, 0), sourceArea, internalFormatInfo,
548 source);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400549}
550
Jamie Madillc564c072017-06-01 12:45:42 -0400551gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400552 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400553 const gl::Offset &destOffset,
554 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400555 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400556{
Luc Ferronf299a372018-05-14 14:44:54 -0400557 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
558 return copySubImageImpl(context, index, destOffset, sourceArea, currentFormat, source);
559}
560
561gl::Error TextureVk::copySubImageImpl(const gl::Context *context,
562 const gl::ImageIndex &index,
563 const gl::Offset &destOffset,
564 const gl::Rectangle &sourceArea,
565 const gl::InternalFormat &internalFormat,
566 gl::Framebuffer *source)
567{
Luc Ferron018709f2018-05-10 13:53:11 -0400568 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
569 gl::Rectangle clippedSourceArea;
570 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
571 &clippedSourceArea))
572 {
573 return gl::NoError();
574 }
575
576 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
577 destOffset.y + sourceArea.y - sourceArea.y, 0);
578
579 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill316c6062018-05-29 10:49:45 -0400580 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronf299a372018-05-14 14:44:54 -0400581 FramebufferVk *framebufferVk = vk::GetImpl(source);
Luc Ferron018709f2018-05-10 13:53:11 -0400582
583 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
584 // change to the pixel buffer.
585 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
586 // when its supported.
Jamie Madill58675012018-05-22 14:54:07 -0400587 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromFramebuffer(
Luc Ferron018709f2018-05-10 13:53:11 -0400588 context, index, clippedSourceArea, modifiedDestOffset,
Luc Ferronf299a372018-05-14 14:44:54 -0400589 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), internalFormat,
Jamie Madill58675012018-05-22 14:54:07 -0400590 framebufferVk));
Luc Ferron018709f2018-05-10 13:53:11 -0400591
Jamie Madill316c6062018-05-29 10:49:45 -0400592 onResourceChanged(renderer);
593 framebufferVk->addReadDependency(this);
Luc Ferron018709f2018-05-10 13:53:11 -0400594 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400595}
596
Luc Ferronfa7503c2018-05-08 11:25:06 -0400597vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
Jamie Madill316c6062018-05-29 10:49:45 -0400598 vk::CommandBuffer **commandBufferOut)
Luc Ferronfa7503c2018-05-08 11:25:06 -0400599{
Luc Ferronc5181702018-05-17 09:44:42 -0400600 ANGLE_TRY(appendWriteResource(renderer, commandBufferOut));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400601 return vk::NoError();
602}
603
Jamie Madillc564c072017-06-01 12:45:42 -0400604gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500605 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400606 size_t levels,
607 GLenum internalFormat,
608 const gl::Extents &size)
609{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400610 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
611 RendererVk *renderer = contextVk->getRenderer();
612 const vk::Format &format = renderer->getFormat(internalFormat);
613 vk::CommandBuffer *commandBuffer = nullptr;
614 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
615 ANGLE_TRY(initImage(renderer, format, size, static_cast<uint32_t>(levels), commandBuffer));
616 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400617}
618
Corentin Wallez99d492c2018-02-27 15:17:10 -0500619gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
620 gl::TextureType type,
621 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400622{
623 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500624 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400625}
626
Jamie Madill4928b7c2017-06-20 12:57:39 -0400627gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500628 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400629 egl::Stream *stream,
630 const egl::Stream::GLTextureDescription &desc)
631{
632 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500633 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400634}
635
Luc Ferron05cd6df2018-05-24 15:51:29 -0400636void TextureVk::generateMipmapWithBlit(RendererVk *renderer)
637{
638 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
639 const gl::Extents baseLevelExtents = mImage.getExtents();
640 vk::CommandBuffer *commandBuffer = nullptr;
641 getCommandBufferForWrite(renderer, &commandBuffer);
642
643 // We are able to use blitImage since the image format we are using supports it. This
644 // is a faster way we can generate the mips.
645 int32_t mipWidth = baseLevelExtents.width;
646 int32_t mipHeight = baseLevelExtents.height;
647
648 // Manually manage the image memory barrier because it uses a lot more parameters than our
649 // usual one.
650 VkImageMemoryBarrier barrier;
651 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
652 barrier.image = mImage.getImage().getHandle();
653 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
654 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
655 barrier.pNext = nullptr;
656 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
657 barrier.subresourceRange.baseArrayLayer = 0;
658 barrier.subresourceRange.layerCount = imageLayerCount;
659 barrier.subresourceRange.levelCount = 1;
660
661 for (uint32_t mipLevel = 1; mipLevel <= mState.getMipmapMaxLevel(); mipLevel++)
662 {
663 int32_t nextMipWidth = std::max<int32_t>(1, mipWidth >> 1);
664 int32_t nextMipHeight = std::max<int32_t>(1, mipHeight >> 1);
665
666 barrier.subresourceRange.baseMipLevel = mipLevel - 1;
667 barrier.oldLayout = mImage.getCurrentLayout();
668 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
669 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
670 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
671
672 // We can do it for all layers at once.
673 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
674 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
675
676 VkImageBlit blit = {};
677 blit.srcOffsets[0] = {0, 0, 0};
678 blit.srcOffsets[1] = {mipWidth, mipHeight, 1};
679 blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
680 blit.srcSubresource.mipLevel = mipLevel - 1;
681 blit.srcSubresource.baseArrayLayer = 0;
682 blit.srcSubresource.layerCount = imageLayerCount;
683 blit.dstOffsets[0] = {0, 0, 0};
684 blit.dstOffsets[1] = {nextMipWidth, nextMipHeight, 1};
685 blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
686 blit.dstSubresource.mipLevel = mipLevel;
687 blit.dstSubresource.baseArrayLayer = 0;
688 blit.dstSubresource.layerCount = imageLayerCount;
689
690 mipWidth = nextMipWidth;
691 mipHeight = nextMipHeight;
692
693 commandBuffer->blitImage(mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
694 mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit,
695 VK_FILTER_LINEAR);
696 }
697
698 // Transition the last mip level to the same layout as all the other ones, so we can declare
699 // our whole image layout to be SRC_OPTIMAL.
700 barrier.subresourceRange.baseMipLevel = mState.getMipmapMaxLevel();
701 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
702 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
703
704 // We can do it for all layers at once.
705 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
706 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
707
708 // This is just changing the internal state of the image helper so that the next call
709 // to changeLayoutWithStages will use this layout as the "oldLayout" argument.
710 mImage.updateLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
711}
712
713gl::Error TextureVk::generateMipmapWithCPU(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400714{
Luc Ferron22695bf2018-05-22 15:52:08 -0400715 ContextVk *contextVk = vk::GetImpl(context);
716 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronc5181702018-05-17 09:44:42 -0400717
Luc Ferronc5181702018-05-17 09:44:42 -0400718 bool newBufferAllocated = false;
Luc Ferronc5181702018-05-17 09:44:42 -0400719 const gl::Extents baseLevelExtents = mImage.getExtents();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400720 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
721 const angle::Format &angleFormat = mImage.getFormat().textureFormat();
Luc Ferronc5181702018-05-17 09:44:42 -0400722 GLuint sourceRowPitch = baseLevelExtents.width * angleFormat.pixelBytes;
723 size_t baseLevelAllocationSize = sourceRowPitch * baseLevelExtents.height;
724
Luc Ferron22695bf2018-05-22 15:52:08 -0400725 vk::CommandBuffer *commandBuffer = nullptr;
726 getCommandBufferForWrite(renderer, &commandBuffer);
Luc Ferronc5181702018-05-17 09:44:42 -0400727
Luc Ferron22695bf2018-05-22 15:52:08 -0400728 // Requirement of the copyImageToBuffer, the source image must be in SRC_OPTIMAL layout.
729 mImage.changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
730 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
731 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
732
733 size_t totalAllocationSize = baseLevelAllocationSize * imageLayerCount;
734
735 VkBuffer copyBufferHandle;
736 uint8_t *baseLevelBuffers;
737 uint32_t copyBaseOffset;
738
739 // Allocate enough memory to copy every level 0 image (one for each layer of the texture).
740 ANGLE_TRY(mPixelBuffer.allocate(renderer, totalAllocationSize, &baseLevelBuffers,
741 &copyBufferHandle, &copyBaseOffset, &newBufferAllocated));
742
743 // Do only one copy for all layers at once.
Luc Ferronc5181702018-05-17 09:44:42 -0400744 VkBufferImageCopy region;
745 region.bufferImageHeight = baseLevelExtents.height;
Luc Ferron22695bf2018-05-22 15:52:08 -0400746 region.bufferOffset = static_cast<VkDeviceSize>(copyBaseOffset);
Luc Ferronc5181702018-05-17 09:44:42 -0400747 region.bufferRowLength = baseLevelExtents.width;
748 region.imageExtent.width = baseLevelExtents.width;
749 region.imageExtent.height = baseLevelExtents.height;
750 region.imageExtent.depth = 1;
751 region.imageOffset.x = 0;
752 region.imageOffset.y = 0;
753 region.imageOffset.z = 0;
754 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
755 region.imageSubresource.baseArrayLayer = 0;
Luc Ferron22695bf2018-05-22 15:52:08 -0400756 region.imageSubresource.layerCount = imageLayerCount;
Luc Ferronc5181702018-05-17 09:44:42 -0400757 region.imageSubresource.mipLevel = mState.getEffectiveBaseLevel();
758
Luc Ferron22695bf2018-05-22 15:52:08 -0400759 commandBuffer->copyImageToBuffer(mImage.getImage(), mImage.getCurrentLayout(), copyBufferHandle,
760 1, &region);
Luc Ferronc5181702018-05-17 09:44:42 -0400761
762 ANGLE_TRY(renderer->finish(context));
763
Luc Ferron2f3f4142018-05-30 08:27:19 -0400764 const uint32_t levelCount = getLevelCount();
765
Luc Ferronc5181702018-05-17 09:44:42 -0400766 // We now have the base level available to be manipulated in the baseLevelBuffer pointer.
767 // Generate all the missing mipmaps with the slow path. We can optimize with vkCmdBlitImage
768 // later.
Luc Ferron22695bf2018-05-22 15:52:08 -0400769 // For each layer, use the copied data to generate all the mips.
770 for (GLuint layer = 0; layer < imageLayerCount; layer++)
771 {
772 size_t bufferOffset = layer * baseLevelAllocationSize;
Luc Ferron05cd6df2018-05-24 15:51:29 -0400773
774 ANGLE_TRY(generateMipmapLevelsWithCPU(
Luc Ferron22695bf2018-05-22 15:52:08 -0400775 contextVk, angleFormat, layer, mState.getEffectiveBaseLevel() + 1,
776 mState.getMipmapMaxLevel(), baseLevelExtents.width, baseLevelExtents.height,
777 sourceRowPitch, baseLevelBuffers + bufferOffset));
778 }
Luc Ferronc5181702018-05-17 09:44:42 -0400779
Luc Ferron2f3f4142018-05-30 08:27:19 -0400780 mPixelBuffer.flushUpdatesToImage(renderer, levelCount, &mImage, commandBuffer);
Luc Ferron05cd6df2018-05-24 15:51:29 -0400781 return gl::NoError();
782}
783
784gl::Error TextureVk::generateMipmap(const gl::Context *context)
785{
786 ContextVk *contextVk = vk::GetImpl(context);
787 RendererVk *renderer = contextVk->getRenderer();
788
789 // Some data is pending, or the image has not been defined at all yet
790 if (!mImage.valid())
791 {
792 // lets initialize the image so we can generate the next levels.
793 if (!mPixelBuffer.empty())
794 {
795 ANGLE_TRY(ensureImageInitialized(renderer));
796 ASSERT(mImage.valid());
797 }
798 else
799 {
800 // There is nothing to generate if there is nothing uploaded so far.
801 return gl::NoError();
802 }
803 }
804
805 VkFormatProperties imageProperties;
806 vk::GetFormatProperties(renderer->getPhysicalDevice(), mImage.getFormat().vkTextureFormat,
807 &imageProperties);
808
809 // Check if the image supports blit. If it does, we can do the mipmap generation on the gpu
810 // only.
811 if (IsMaskFlagSet(kBlitFeatureFlags, imageProperties.linearTilingFeatures))
812 {
813 generateMipmapWithBlit(renderer);
814 }
815 else
816 {
817 ANGLE_TRY(generateMipmapWithCPU(context));
818 }
819
820 // We're changing this textureVk content, make sure we let the graph know.
821 onResourceChanged(renderer);
822
Luc Ferronc5181702018-05-17 09:44:42 -0400823 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400824}
825
Jamie Madill4928b7c2017-06-20 12:57:39 -0400826gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400827{
828 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400829 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400830}
831
Jamie Madill4928b7c2017-06-20 12:57:39 -0400832gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400833{
834 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400835 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400836}
837
Jamie Madill4928b7c2017-06-20 12:57:39 -0400838gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400839{
840 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400841 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400842}
843
Jamie Madill4928b7c2017-06-20 12:57:39 -0400844gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
845 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400846 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400847 FramebufferAttachmentRenderTarget **rtOut)
848{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400849 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400850 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400851
852 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400853 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400854
855 ContextVk *contextVk = vk::GetImpl(context);
856 RendererVk *renderer = contextVk->getRenderer();
857
858 ANGLE_TRY(ensureImageInitialized(renderer));
859
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400860 *rtOut = &mRenderTarget;
861 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400862}
863
Jamie Madill26084d02018-04-09 13:44:04 -0400864vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
865{
Luc Ferron10434f62018-04-24 10:06:37 -0400866 if (mImage.valid() && mPixelBuffer.empty())
867 {
868 return vk::NoError();
869 }
870
Jamie Madill26084d02018-04-09 13:44:04 -0400871 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400872 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400873
Luc Ferron2f3f4142018-05-30 08:27:19 -0400874 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
875 const gl::Extents &baseLevelExtents = baseLevelDesc.size;
876 const uint32_t levelCount = getLevelCount();
877
Jamie Madill26084d02018-04-09 13:44:04 -0400878 if (!mImage.valid())
879 {
Jamie Madill26084d02018-04-09 13:44:04 -0400880 const vk::Format &format =
881 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Jamie Madill26084d02018-04-09 13:44:04 -0400882
Luc Ferron2f3f4142018-05-30 08:27:19 -0400883 ANGLE_TRY(initImage(renderer, format, baseLevelExtents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400884 }
885
Luc Ferron2f3f4142018-05-30 08:27:19 -0400886 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, levelCount, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400887 return vk::NoError();
888}
889
Luc Ferron4bba74f2018-04-19 14:40:45 -0400890gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700891{
Luc Ferron20610902018-04-19 14:41:13 -0400892 if (dirtyBits.none() && mSampler.valid())
893 {
894 return gl::NoError();
895 }
896
897 ContextVk *contextVk = vk::GetImpl(context);
898 if (mSampler.valid())
899 {
900 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc57ee252018-05-30 19:53:48 -0400901 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Luc Ferron20610902018-04-19 14:41:13 -0400902 }
903
904 const gl::SamplerState &samplerState = mState.getSamplerState();
905
906 // Create a simple sampler. Force basic parameter settings.
907 VkSamplerCreateInfo samplerInfo;
908 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
909 samplerInfo.pNext = nullptr;
910 samplerInfo.flags = 0;
911 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
912 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400913 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400914 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
915 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
916 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
917 samplerInfo.mipLodBias = 0.0f;
918 samplerInfo.anisotropyEnable = VK_FALSE;
919 samplerInfo.maxAnisotropy = 1.0f;
920 samplerInfo.compareEnable = VK_FALSE;
921 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400922 samplerInfo.minLod = samplerState.minLod;
923 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400924 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
925 samplerInfo.unnormalizedCoordinates = VK_FALSE;
926
927 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400928 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700929}
930
Jamie Madillc564c072017-06-01 12:45:42 -0400931gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500932 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800933 GLsizei samples,
934 GLint internalformat,
935 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500936 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800937{
938 UNIMPLEMENTED();
939 return gl::InternalError() << "setStorageMultisample is unimplemented.";
940}
941
Jamie Madill05b35b22017-10-03 09:01:44 -0400942gl::Error TextureVk::initializeContents(const gl::Context *context,
943 const gl::ImageIndex &imageIndex)
944{
945 UNIMPLEMENTED();
946 return gl::NoError();
947}
948
Jamie Madill858c1cc2018-03-31 14:19:13 -0400949const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400950{
951 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400952 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400953}
954
955const vk::ImageView &TextureVk::getImageView() const
956{
Jamie Madill93edca12018-03-30 10:43:18 -0400957 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400958
959 const GLenum minFilter = mState.getSamplerState().minFilter;
960 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
961 {
962 return mBaseLevelImageView;
963 }
964
965 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400966}
967
968const vk::Sampler &TextureVk::getSampler() const
969{
970 ASSERT(mSampler.valid());
971 return mSampler;
972}
973
Luc Ferronfa7503c2018-05-08 11:25:06 -0400974vk::Error TextureVk::initImage(RendererVk *renderer,
975 const vk::Format &format,
976 const gl::Extents &extents,
977 const uint32_t levelCount,
978 vk::CommandBuffer *commandBuffer)
979{
980 const VkDevice device = renderer->getDevice();
981
982 const VkImageUsageFlags usage =
983 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
984 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
985
986 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
987
988 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
989
990 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
991
992 gl::SwizzleState mappedSwizzle;
993 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
994
995 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
996 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
997 mappedSwizzle, &mMipmapImageView, levelCount));
998 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
999 mappedSwizzle, &mBaseLevelImageView, 1));
1000
1001 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -04001002 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronc20b9502018-05-24 09:30:17 -04001003 mImage.clearColor(black, 0, levelCount, commandBuffer);
Luc Ferronfa7503c2018-05-08 11:25:06 -04001004 return vk::NoError();
1005}
1006
Jamie Madillc4f27e42018-03-31 14:19:18 -04001007void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
1008{
1009 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -04001010 renderer->releaseObject(getStoredQueueSerial(), &mBaseLevelImageView);
1011 renderer->releaseObject(getStoredQueueSerial(), &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -04001012 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
1013}
1014
Luc Ferron66410532018-04-20 12:47:45 -04001015uint32_t TextureVk::getLevelCount() const
1016{
1017 ASSERT(mState.getEffectiveBaseLevel() == 0);
1018
1019 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
1020 return mState.getMipmapMaxLevel() + 1;
1021}
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001022} // namespace rx