blob: 81107610ab4c8c49d7531ae4026392a211feafc7 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Luc Ferronc5181702018-05-17 09:44:42 -040013#include "image_util/generatemip.inl"
Jamie Madill035fd6b2017-10-03 15:43:22 -040014#include "libANGLE/Context.h"
15#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040016#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040017#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050018#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
Luc Ferron5164b792018-03-06 09:10:12 -050022namespace
23{
Jamie Madill93edca12018-03-30 10:43:18 -040024void MapSwizzleState(GLenum internalFormat,
25 const gl::SwizzleState &swizzleState,
26 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050027{
28 switch (internalFormat)
29 {
Jamie Madill26084d02018-04-09 13:44:04 -040030 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040031 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
34 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050035 break;
Jamie Madill26084d02018-04-09 13:44:04 -040036 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040037 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
40 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050041 break;
Jamie Madill26084d02018-04-09 13:44:04 -040042 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040043 swizzleStateOut->swizzleRed = GL_ZERO;
44 swizzleStateOut->swizzleGreen = GL_ZERO;
45 swizzleStateOut->swizzleBlue = GL_ZERO;
46 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040047 break;
Luc Ferron5164b792018-03-06 09:10:12 -050048 default:
Jamie Madill93edca12018-03-30 10:43:18 -040049 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050050 break;
51 }
52}
Jamie Madill26084d02018-04-09 13:44:04 -040053
54constexpr VkBufferUsageFlags kStagingBufferFlags =
55 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
56constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron05cd6df2018-05-24 15:51:29 -040057
58constexpr VkFormatFeatureFlags kBlitFeatureFlags =
59 VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT;
Luc Ferron5164b792018-03-06 09:10:12 -050060} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040061
Jamie Madill26084d02018-04-09 13:44:04 -040062// StagingStorage implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -040063PixelBuffer::PixelBuffer(RendererVk *renderer)
64 : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040065{
Jamie Madill20fa8d52018-04-15 10:09:32 -040066 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
67 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
Luc Ferrona9ab0f32018-05-17 17:03:55 -040068 mStagingBuffer.init(4, renderer);
Jamie Madill26084d02018-04-09 13:44:04 -040069}
70
Jamie Madilla7be1f72018-04-13 15:16:26 -040071PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040072{
73}
74
Jamie Madilla7be1f72018-04-13 15:16:26 -040075void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040076{
77 mStagingBuffer.release(renderer);
78}
79
Luc Ferron2f3f4142018-05-30 08:27:19 -040080void PixelBuffer::removeStagedUpdates(const gl::ImageIndex &index)
81{
82 // Find any staged updates for this index and removes them from the pending list.
83 uint32_t levelIndex = static_cast<uint32_t>(index.getLevelIndex());
84 uint32_t layerIndex = static_cast<uint32_t>(index.getLayerIndex());
85 auto removeIfStatement = [levelIndex, layerIndex](SubresourceUpdate &update) {
86 return update.copyRegion.imageSubresource.mipLevel == levelIndex &&
87 update.copyRegion.imageSubresource.baseArrayLayer == layerIndex;
88 };
89 mSubresourceUpdates.erase(
90 std::remove_if(mSubresourceUpdates.begin(), mSubresourceUpdates.end(), removeIfStatement),
91 mSubresourceUpdates.end());
92}
93
Jamie Madilla7be1f72018-04-13 15:16:26 -040094gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
95 const gl::ImageIndex &index,
96 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040097 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040098 const gl::InternalFormat &formatInfo,
99 const gl::PixelUnpackState &unpack,
100 GLenum type,
101 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -0400102{
103 GLuint inputRowPitch = 0;
104 ANGLE_TRY_RESULT(
105 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
106 inputRowPitch);
107
108 GLuint inputDepthPitch = 0;
109 ANGLE_TRY_RESULT(
110 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
111 inputDepthPitch);
112
113 // TODO(jmadill): skip images for 3D Textures.
114 bool applySkipImages = false;
115
116 GLuint inputSkipBytes = 0;
117 ANGLE_TRY_RESULT(
Jeff Gilbert31d3deb2018-05-18 18:32:16 -0700118 formatInfo.computeSkipBytes(type, inputRowPitch, inputDepthPitch, unpack, applySkipImages),
Jamie Madill26084d02018-04-09 13:44:04 -0400119 inputSkipBytes);
120
121 RendererVk *renderer = contextVk->getRenderer();
122
123 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
124 const angle::Format &storageFormat = vkFormat.textureFormat();
125
126 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
127 size_t outputDepthPitch = outputRowPitch * extents.height;
128
Jamie Madill20fa8d52018-04-15 10:09:32 -0400129 VkBuffer bufferHandle = VK_NULL_HANDLE;
130
Jamie Madill26084d02018-04-09 13:44:04 -0400131 uint8_t *stagingPointer = nullptr;
132 bool newBufferAllocated = false;
133 uint32_t stagingOffset = 0;
134 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400135 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400136 &stagingOffset, &newBufferAllocated);
137
138 const uint8_t *source = pixels + inputSkipBytes;
139
140 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
141
142 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
143 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
144
Jamie Madill20fa8d52018-04-15 10:09:32 -0400145 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400146
Jamie Madill20fa8d52018-04-15 10:09:32 -0400147 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
148 copy.bufferRowLength = extents.width;
149 copy.bufferImageHeight = extents.height;
150 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
151 copy.imageSubresource.mipLevel = index.getLevelIndex();
152 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
153 copy.imageSubresource.layerCount = index.getLayerCount();
154
Luc Ferron33e05ba2018-04-23 15:12:34 -0400155 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400156 gl_vk::GetExtent(extents, &copy.imageExtent);
157
158 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400159
160 return gl::NoError();
161}
162
Jamie Madill58675012018-05-22 14:54:07 -0400163gl::Error PixelBuffer::stageSubresourceUpdateFromFramebuffer(const gl::Context *context,
164 const gl::ImageIndex &index,
165 const gl::Rectangle &sourceArea,
166 const gl::Offset &dstOffset,
167 const gl::Extents &dstExtent,
168 const gl::InternalFormat &formatInfo,
169 FramebufferVk *framebufferVk)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400170{
171 // If the extents and offset is outside the source image, we need to clip.
172 gl::Rectangle clippedRectangle;
Jamie Madill58675012018-05-22 14:54:07 -0400173 const gl::Extents readExtents = framebufferVk->getReadImageExtents();
174 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, readExtents.width, readExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400175 &clippedRectangle))
176 {
177 // Empty source area, nothing to do.
178 return gl::NoError();
179 }
180
181 // 1- obtain a buffer handle to copy to
Luc Ferron018709f2018-05-10 13:53:11 -0400182 RendererVk *renderer = GetImplAs<ContextVk>(context)->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400183
184 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
185 const angle::Format &storageFormat = vkFormat.textureFormat();
Luc Ferron018709f2018-05-10 13:53:11 -0400186 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400187
188 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
189 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
190
191 VkBuffer bufferHandle = VK_NULL_HANDLE;
192
193 uint8_t *stagingPointer = nullptr;
194 bool newBufferAllocated = false;
195 uint32_t stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400196
197 // The destination is only one layer deep.
198 size_t allocationSize = outputDepthPitch;
Luc Ferron2a849bf2018-05-10 13:19:11 -0400199 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
200 &stagingOffset, &newBufferAllocated);
201
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400202 PackPixelsParams params;
203 params.area = sourceArea;
204 params.format = formatInfo.internalFormat;
205 params.type = formatInfo.type;
206 params.outputPitch = static_cast<GLuint>(outputRowPitch);
207 params.packBuffer = nullptr;
208 params.pack = gl::PixelPackState();
209
Luc Ferron018709f2018-05-10 13:53:11 -0400210 // 2- copy the source image region to the pixel buffer using a cpu readback
211 if (loadFunction.requiresConversion)
212 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400213 // When a conversion is required, we need to use the loadFunction to read from a temporary
214 // buffer instead so its an even slower path.
215 size_t bufferSize = storageFormat.pixelBytes * sourceArea.width * sourceArea.height;
216 angle::MemoryBuffer *memoryBuffer = nullptr;
217 ANGLE_TRY(context->getScratchBuffer(bufferSize, &memoryBuffer));
218
219 // Read into the scratch buffer
Jamie Madill58675012018-05-22 14:54:07 -0400220 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, memoryBuffer->data()));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400221
222 // Load from scratch buffer to our pixel buffer
223 loadFunction.loadFunction(sourceArea.width, sourceArea.height, 1, memoryBuffer->data(),
224 outputRowPitch, 0, stagingPointer, outputRowPitch, 0);
Luc Ferron018709f2018-05-10 13:53:11 -0400225 }
226 else
227 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400228 // We read directly from the framebuffer into our pixel buffer.
Jamie Madill58675012018-05-22 14:54:07 -0400229 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, stagingPointer));
Luc Ferron018709f2018-05-10 13:53:11 -0400230 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400231
Luc Ferron018709f2018-05-10 13:53:11 -0400232 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400233 VkBufferImageCopy copyToImage;
234 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400235 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400236 copyToImage.bufferImageHeight = clippedRectangle.height;
237 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
238 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
239 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
240 copyToImage.imageSubresource.layerCount = index.getLayerCount();
241 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
242 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
243
244 // 3- enqueue the destination image subresource update
245 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
246 return gl::NoError();
247}
248
Luc Ferronc5181702018-05-17 09:44:42 -0400249gl::Error PixelBuffer::allocate(RendererVk *renderer,
250 size_t sizeInBytes,
251 uint8_t **ptrOut,
252 VkBuffer *handleOut,
253 uint32_t *offsetOut,
254 bool *newBufferAllocatedOut)
255{
256 return mStagingBuffer.allocate(renderer, sizeInBytes, ptrOut, handleOut, offsetOut,
257 newBufferAllocatedOut);
258}
259
Jamie Madilla7be1f72018-04-13 15:16:26 -0400260vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
Luc Ferron2f3f4142018-05-30 08:27:19 -0400261 uint32_t levelCount,
Jamie Madilla7be1f72018-04-13 15:16:26 -0400262 vk::ImageHelper *image,
263 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400264{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400265 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400266 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400267 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400268 }
269
Jamie Madill20fa8d52018-04-15 10:09:32 -0400270 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
271
Luc Ferron2f3f4142018-05-30 08:27:19 -0400272 std::vector<SubresourceUpdate> updatesToKeep;
273
Jamie Madill20fa8d52018-04-15 10:09:32 -0400274 for (const SubresourceUpdate &update : mSubresourceUpdates)
275 {
276 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400277
Luc Ferron2f3f4142018-05-30 08:27:19 -0400278 const uint32_t updateMipLevel = update.copyRegion.imageSubresource.mipLevel;
279 // It's possible we've accumulated updates that are no longer applicable if the image has
280 // never been flushed but the image description has changed. Check if this level exist for
281 // this image.
282 if (updateMipLevel >= levelCount)
283 {
284 updatesToKeep.emplace_back(update);
285 continue;
286 }
287
Luc Ferron1a186b12018-04-24 15:25:35 -0400288 // Conservatively flush all writes to the image. We could use a more restricted barrier.
289 // Do not move this above the for loop, otherwise multiple updates can have race conditions
290 // and not be applied correctly as seen i:
291 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
292 image->changeLayoutWithStages(
293 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
294 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
295
Jamie Madill20fa8d52018-04-15 10:09:32 -0400296 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
297 image->getCurrentLayout(), 1, &update.copyRegion);
298 }
299
Luc Ferron2f3f4142018-05-30 08:27:19 -0400300 // Only remove the updates that were actually applied to the image.
301 mSubresourceUpdates = std::move(updatesToKeep);
302
303 if (mSubresourceUpdates.empty())
304 {
305 mStagingBuffer.releaseRetainedBuffers(renderer);
306 }
307 else
308 {
309 WARN() << "Internal Vulkan bufffer could not be released. This is likely due to having "
310 "extra images defined in the Texture.";
311 }
Jamie Madill20fa8d52018-04-15 10:09:32 -0400312
Jamie Madill26084d02018-04-09 13:44:04 -0400313 return vk::NoError();
314}
315
Luc Ferron10434f62018-04-24 10:06:37 -0400316bool PixelBuffer::empty() const
317{
318 return mSubresourceUpdates.empty();
319}
320
Luc Ferronc5181702018-05-17 09:44:42 -0400321gl::Error PixelBuffer::stageSubresourceUpdateAndGetData(RendererVk *renderer,
322 size_t allocationSize,
323 const gl::ImageIndex &imageIndex,
324 const gl::Extents &extents,
325 const gl::Offset &offset,
326 uint8_t **destData)
327{
328 VkBuffer bufferHandle;
329 uint32_t stagingOffset = 0;
330 bool newBufferAllocated = false;
331 ANGLE_TRY(mStagingBuffer.allocate(renderer, allocationSize, destData, &bufferHandle,
332 &stagingOffset, &newBufferAllocated));
333
334 VkBufferImageCopy copy;
335 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
336 copy.bufferRowLength = extents.width;
337 copy.bufferImageHeight = extents.height;
338 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
339 copy.imageSubresource.mipLevel = imageIndex.getLevelIndex();
340 copy.imageSubresource.baseArrayLayer = imageIndex.hasLayer() ? imageIndex.getLayerIndex() : 0;
341 copy.imageSubresource.layerCount = imageIndex.getLayerCount();
342
343 gl_vk::GetOffset(offset, &copy.imageOffset);
344 gl_vk::GetExtent(extents, &copy.imageExtent);
345
346 mSubresourceUpdates.emplace_back(bufferHandle, copy);
347
348 return gl::NoError();
349}
350
Luc Ferron05cd6df2018-05-24 15:51:29 -0400351gl::Error TextureVk::generateMipmapLevelsWithCPU(ContextVk *contextVk,
352 const angle::Format &sourceFormat,
353 GLuint layer,
354 GLuint firstMipLevel,
355 GLuint maxMipLevel,
356 const size_t sourceWidth,
357 const size_t sourceHeight,
358 const size_t sourceRowPitch,
359 uint8_t *sourceData)
Luc Ferronc5181702018-05-17 09:44:42 -0400360{
361 RendererVk *renderer = contextVk->getRenderer();
362
363 size_t previousLevelWidth = sourceWidth;
364 size_t previousLevelHeight = sourceHeight;
365 uint8_t *previousLevelData = sourceData;
366 size_t previousLevelRowPitch = sourceRowPitch;
367
368 for (GLuint currentMipLevel = firstMipLevel; currentMipLevel <= maxMipLevel; currentMipLevel++)
369 {
370 // Compute next level width and height.
371 size_t mipWidth = std::max<size_t>(1, previousLevelWidth >> 1);
372 size_t mipHeight = std::max<size_t>(1, previousLevelHeight >> 1);
373
374 // With the width and height of the next mip, we can allocate the next buffer we need.
375 uint8_t *destData = nullptr;
376 size_t destRowPitch = mipWidth * sourceFormat.pixelBytes;
377
378 size_t mipAllocationSize = destRowPitch * mipHeight;
379 gl::Extents mipLevelExtents(static_cast<int>(mipWidth), static_cast<int>(mipHeight), 1);
380
381 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateAndGetData(
382 renderer, mipAllocationSize,
Luc Ferron22695bf2018-05-22 15:52:08 -0400383 gl::ImageIndex::MakeFromType(mState.getType(), currentMipLevel, layer), mipLevelExtents,
Luc Ferronc5181702018-05-17 09:44:42 -0400384 gl::Offset(), &destData));
385
386 // Generate the mipmap into that new buffer
387 sourceFormat.mipGenerationFunction(previousLevelWidth, previousLevelHeight, 1,
388 previousLevelData, previousLevelRowPitch, 0, destData,
389 destRowPitch, 0);
390
391 // Swap for the next iteration
392 previousLevelWidth = mipWidth;
393 previousLevelHeight = mipHeight;
394 previousLevelData = destData;
395 previousLevelRowPitch = destRowPitch;
396 }
397
398 return gl::NoError();
399}
400
Jamie Madilla7be1f72018-04-13 15:16:26 -0400401PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400402{
403}
404
Jamie Madilla7be1f72018-04-13 15:16:26 -0400405PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
406 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400407 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
408{
409}
410
Jamie Madilla7be1f72018-04-13 15:16:26 -0400411PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400412
Jamie Madill26084d02018-04-09 13:44:04 -0400413// TextureVk implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400414TextureVk::TextureVk(const gl::TextureState &state, RendererVk *renderer)
Jamie Madillbcf467f2018-05-23 09:46:00 -0400415 : TextureImpl(state), mRenderTarget(&mImage, &mBaseLevelImageView, this), mPixelBuffer(renderer)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400416{
417}
418
419TextureVk::~TextureVk()
420{
421}
422
Jamie Madill035fd6b2017-10-03 15:43:22 -0400423gl::Error TextureVk::onDestroy(const gl::Context *context)
424{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400425 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400426 RendererVk *renderer = contextVk->getRenderer();
427
Jamie Madillc4f27e42018-03-31 14:19:18 -0400428 releaseImage(context, renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400429 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400430
Jamie Madilla7be1f72018-04-13 15:16:26 -0400431 mPixelBuffer.release(renderer);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400432 return gl::NoError();
433}
434
Jamie Madillc564c072017-06-01 12:45:42 -0400435gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400436 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400437 GLenum internalFormat,
438 const gl::Extents &size,
439 GLenum format,
440 GLenum type,
441 const gl::PixelUnpackState &unpack,
442 const uint8_t *pixels)
443{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400444 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400445 RendererVk *renderer = contextVk->getRenderer();
446
Luc Ferron2f3f4142018-05-30 08:27:19 -0400447 // If there is any staged changes for this index, we can remove them since we're going to
448 // override them with this call.
449 mPixelBuffer.removeStagedUpdates(index);
450
Jamie Madillc4f27e42018-03-31 14:19:18 -0400451 // Convert internalFormat to sized internal format.
452 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400453
Jamie Madill1b038242017-11-01 15:14:36 -0400454 if (mImage.valid())
455 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400456 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400457
458 // Calculate the expected size for the index we are defining. If the size is different from
459 // the given size, or the format is different, we are redefining the image so we must
460 // release it.
461 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400462 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400463 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400464 }
465 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400466
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500467 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400468 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500469 {
470 return gl::NoError();
471 }
472
Jamie Madill26084d02018-04-09 13:44:04 -0400473 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400474 onResourceChanged(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400475
Jamie Madill035fd6b2017-10-03 15:43:22 -0400476 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400477 if (pixels)
478 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400479 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
480 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400481 }
482
483 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400484}
485
Jamie Madillc564c072017-06-01 12:45:42 -0400486gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400487 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400488 const gl::Box &area,
489 GLenum format,
490 GLenum type,
491 const gl::PixelUnpackState &unpack,
492 const uint8_t *pixels)
493{
Jamie Madill5b18f482017-11-30 17:24:22 -0500494 ContextVk *contextVk = vk::GetImpl(context);
495 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400496 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
497 contextVk, index, gl::Extents(area.width, area.height, area.depth),
498 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400499
500 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400501 onResourceChanged(contextVk->getRenderer());
Jamie Madillb2214862018-04-26 07:25:48 -0400502
Jamie Madill5b18f482017-11-30 17:24:22 -0500503 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400504}
505
Jamie Madillc564c072017-06-01 12:45:42 -0400506gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400507 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400508 GLenum internalFormat,
509 const gl::Extents &size,
510 const gl::PixelUnpackState &unpack,
511 size_t imageSize,
512 const uint8_t *pixels)
513{
514 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500515 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400516}
517
Jamie Madillc564c072017-06-01 12:45:42 -0400518gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400519 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400520 const gl::Box &area,
521 GLenum format,
522 const gl::PixelUnpackState &unpack,
523 size_t imageSize,
524 const uint8_t *pixels)
525{
526 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500527 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400528}
529
Jamie Madillc564c072017-06-01 12:45:42 -0400530gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400531 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400532 const gl::Rectangle &sourceArea,
533 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400534 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400535{
Luc Ferronf299a372018-05-14 14:44:54 -0400536 gl::Extents newImageSize(sourceArea.width, sourceArea.height, 1);
537 const gl::InternalFormat &internalFormatInfo =
538 gl::GetInternalFormatInfo(internalFormat, GL_UNSIGNED_BYTE);
539 ANGLE_TRY(setImage(context, index, internalFormat, newImageSize, internalFormatInfo.format,
540 internalFormatInfo.type, gl::PixelUnpackState(), nullptr));
541 return copySubImageImpl(context, index, gl::Offset(0, 0, 0), sourceArea, internalFormatInfo,
542 source);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400543}
544
Jamie Madillc564c072017-06-01 12:45:42 -0400545gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400546 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400547 const gl::Offset &destOffset,
548 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400549 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400550{
Luc Ferronf299a372018-05-14 14:44:54 -0400551 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
552 return copySubImageImpl(context, index, destOffset, sourceArea, currentFormat, source);
553}
554
555gl::Error TextureVk::copySubImageImpl(const gl::Context *context,
556 const gl::ImageIndex &index,
557 const gl::Offset &destOffset,
558 const gl::Rectangle &sourceArea,
559 const gl::InternalFormat &internalFormat,
560 gl::Framebuffer *source)
561{
Luc Ferron018709f2018-05-10 13:53:11 -0400562 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
563 gl::Rectangle clippedSourceArea;
564 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
565 &clippedSourceArea))
566 {
567 return gl::NoError();
568 }
569
570 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
571 destOffset.y + sourceArea.y - sourceArea.y, 0);
572
573 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill316c6062018-05-29 10:49:45 -0400574 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronf299a372018-05-14 14:44:54 -0400575 FramebufferVk *framebufferVk = vk::GetImpl(source);
Luc Ferron018709f2018-05-10 13:53:11 -0400576
577 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
578 // change to the pixel buffer.
579 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
580 // when its supported.
Jamie Madill58675012018-05-22 14:54:07 -0400581 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromFramebuffer(
Luc Ferron018709f2018-05-10 13:53:11 -0400582 context, index, clippedSourceArea, modifiedDestOffset,
Luc Ferronf299a372018-05-14 14:44:54 -0400583 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), internalFormat,
Jamie Madill58675012018-05-22 14:54:07 -0400584 framebufferVk));
Luc Ferron018709f2018-05-10 13:53:11 -0400585
Jamie Madill316c6062018-05-29 10:49:45 -0400586 onResourceChanged(renderer);
587 framebufferVk->addReadDependency(this);
Luc Ferron018709f2018-05-10 13:53:11 -0400588 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400589}
590
Luc Ferronfa7503c2018-05-08 11:25:06 -0400591vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
Jamie Madill316c6062018-05-29 10:49:45 -0400592 vk::CommandBuffer **commandBufferOut)
Luc Ferronfa7503c2018-05-08 11:25:06 -0400593{
Luc Ferronc5181702018-05-17 09:44:42 -0400594 ANGLE_TRY(appendWriteResource(renderer, commandBufferOut));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400595 return vk::NoError();
596}
597
Jamie Madillc564c072017-06-01 12:45:42 -0400598gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500599 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400600 size_t levels,
601 GLenum internalFormat,
602 const gl::Extents &size)
603{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400604 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
605 RendererVk *renderer = contextVk->getRenderer();
606 const vk::Format &format = renderer->getFormat(internalFormat);
607 vk::CommandBuffer *commandBuffer = nullptr;
608 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Luc Ferronf6e160f2018-06-12 10:13:57 -0400609 ANGLE_TRY(initImage(contextVk, format, size, static_cast<uint32_t>(levels), commandBuffer));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400610 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400611}
612
Corentin Wallez99d492c2018-02-27 15:17:10 -0500613gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
614 gl::TextureType type,
615 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400616{
617 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500618 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400619}
620
Jamie Madill4928b7c2017-06-20 12:57:39 -0400621gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500622 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400623 egl::Stream *stream,
624 const egl::Stream::GLTextureDescription &desc)
625{
626 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500627 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400628}
629
Luc Ferron05cd6df2018-05-24 15:51:29 -0400630void TextureVk::generateMipmapWithBlit(RendererVk *renderer)
631{
632 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
633 const gl::Extents baseLevelExtents = mImage.getExtents();
634 vk::CommandBuffer *commandBuffer = nullptr;
635 getCommandBufferForWrite(renderer, &commandBuffer);
636
637 // We are able to use blitImage since the image format we are using supports it. This
638 // is a faster way we can generate the mips.
639 int32_t mipWidth = baseLevelExtents.width;
640 int32_t mipHeight = baseLevelExtents.height;
641
642 // Manually manage the image memory barrier because it uses a lot more parameters than our
643 // usual one.
644 VkImageMemoryBarrier barrier;
645 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
646 barrier.image = mImage.getImage().getHandle();
647 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
648 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
649 barrier.pNext = nullptr;
650 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
651 barrier.subresourceRange.baseArrayLayer = 0;
652 barrier.subresourceRange.layerCount = imageLayerCount;
653 barrier.subresourceRange.levelCount = 1;
654
655 for (uint32_t mipLevel = 1; mipLevel <= mState.getMipmapMaxLevel(); mipLevel++)
656 {
657 int32_t nextMipWidth = std::max<int32_t>(1, mipWidth >> 1);
658 int32_t nextMipHeight = std::max<int32_t>(1, mipHeight >> 1);
659
660 barrier.subresourceRange.baseMipLevel = mipLevel - 1;
661 barrier.oldLayout = mImage.getCurrentLayout();
662 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
663 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
664 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
665
666 // We can do it for all layers at once.
667 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
668 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
669
670 VkImageBlit blit = {};
671 blit.srcOffsets[0] = {0, 0, 0};
672 blit.srcOffsets[1] = {mipWidth, mipHeight, 1};
673 blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
674 blit.srcSubresource.mipLevel = mipLevel - 1;
675 blit.srcSubresource.baseArrayLayer = 0;
676 blit.srcSubresource.layerCount = imageLayerCount;
677 blit.dstOffsets[0] = {0, 0, 0};
678 blit.dstOffsets[1] = {nextMipWidth, nextMipHeight, 1};
679 blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
680 blit.dstSubresource.mipLevel = mipLevel;
681 blit.dstSubresource.baseArrayLayer = 0;
682 blit.dstSubresource.layerCount = imageLayerCount;
683
684 mipWidth = nextMipWidth;
685 mipHeight = nextMipHeight;
686
687 commandBuffer->blitImage(mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
688 mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit,
689 VK_FILTER_LINEAR);
690 }
691
692 // Transition the last mip level to the same layout as all the other ones, so we can declare
693 // our whole image layout to be SRC_OPTIMAL.
694 barrier.subresourceRange.baseMipLevel = mState.getMipmapMaxLevel();
695 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
696 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
697
698 // We can do it for all layers at once.
699 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
700 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
701
702 // This is just changing the internal state of the image helper so that the next call
703 // to changeLayoutWithStages will use this layout as the "oldLayout" argument.
704 mImage.updateLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
705}
706
707gl::Error TextureVk::generateMipmapWithCPU(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400708{
Luc Ferron22695bf2018-05-22 15:52:08 -0400709 ContextVk *contextVk = vk::GetImpl(context);
710 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronc5181702018-05-17 09:44:42 -0400711
Luc Ferronc5181702018-05-17 09:44:42 -0400712 bool newBufferAllocated = false;
Luc Ferronc5181702018-05-17 09:44:42 -0400713 const gl::Extents baseLevelExtents = mImage.getExtents();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400714 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
715 const angle::Format &angleFormat = mImage.getFormat().textureFormat();
Luc Ferronc5181702018-05-17 09:44:42 -0400716 GLuint sourceRowPitch = baseLevelExtents.width * angleFormat.pixelBytes;
717 size_t baseLevelAllocationSize = sourceRowPitch * baseLevelExtents.height;
718
Luc Ferron22695bf2018-05-22 15:52:08 -0400719 vk::CommandBuffer *commandBuffer = nullptr;
720 getCommandBufferForWrite(renderer, &commandBuffer);
Luc Ferronc5181702018-05-17 09:44:42 -0400721
Luc Ferron22695bf2018-05-22 15:52:08 -0400722 // Requirement of the copyImageToBuffer, the source image must be in SRC_OPTIMAL layout.
723 mImage.changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
724 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
725 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
726
727 size_t totalAllocationSize = baseLevelAllocationSize * imageLayerCount;
728
729 VkBuffer copyBufferHandle;
730 uint8_t *baseLevelBuffers;
731 uint32_t copyBaseOffset;
732
733 // Allocate enough memory to copy every level 0 image (one for each layer of the texture).
734 ANGLE_TRY(mPixelBuffer.allocate(renderer, totalAllocationSize, &baseLevelBuffers,
735 &copyBufferHandle, &copyBaseOffset, &newBufferAllocated));
736
737 // Do only one copy for all layers at once.
Luc Ferronc5181702018-05-17 09:44:42 -0400738 VkBufferImageCopy region;
739 region.bufferImageHeight = baseLevelExtents.height;
Luc Ferron22695bf2018-05-22 15:52:08 -0400740 region.bufferOffset = static_cast<VkDeviceSize>(copyBaseOffset);
Luc Ferronc5181702018-05-17 09:44:42 -0400741 region.bufferRowLength = baseLevelExtents.width;
742 region.imageExtent.width = baseLevelExtents.width;
743 region.imageExtent.height = baseLevelExtents.height;
744 region.imageExtent.depth = 1;
745 region.imageOffset.x = 0;
746 region.imageOffset.y = 0;
747 region.imageOffset.z = 0;
748 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
749 region.imageSubresource.baseArrayLayer = 0;
Luc Ferron22695bf2018-05-22 15:52:08 -0400750 region.imageSubresource.layerCount = imageLayerCount;
Luc Ferronc5181702018-05-17 09:44:42 -0400751 region.imageSubresource.mipLevel = mState.getEffectiveBaseLevel();
752
Luc Ferron22695bf2018-05-22 15:52:08 -0400753 commandBuffer->copyImageToBuffer(mImage.getImage(), mImage.getCurrentLayout(), copyBufferHandle,
754 1, &region);
Luc Ferronc5181702018-05-17 09:44:42 -0400755
756 ANGLE_TRY(renderer->finish(context));
757
Luc Ferron2f3f4142018-05-30 08:27:19 -0400758 const uint32_t levelCount = getLevelCount();
759
Luc Ferronc5181702018-05-17 09:44:42 -0400760 // We now have the base level available to be manipulated in the baseLevelBuffer pointer.
761 // Generate all the missing mipmaps with the slow path. We can optimize with vkCmdBlitImage
762 // later.
Luc Ferron22695bf2018-05-22 15:52:08 -0400763 // For each layer, use the copied data to generate all the mips.
764 for (GLuint layer = 0; layer < imageLayerCount; layer++)
765 {
766 size_t bufferOffset = layer * baseLevelAllocationSize;
Luc Ferron05cd6df2018-05-24 15:51:29 -0400767
768 ANGLE_TRY(generateMipmapLevelsWithCPU(
Luc Ferron22695bf2018-05-22 15:52:08 -0400769 contextVk, angleFormat, layer, mState.getEffectiveBaseLevel() + 1,
770 mState.getMipmapMaxLevel(), baseLevelExtents.width, baseLevelExtents.height,
771 sourceRowPitch, baseLevelBuffers + bufferOffset));
772 }
Luc Ferronc5181702018-05-17 09:44:42 -0400773
Luc Ferron2f3f4142018-05-30 08:27:19 -0400774 mPixelBuffer.flushUpdatesToImage(renderer, levelCount, &mImage, commandBuffer);
Luc Ferron05cd6df2018-05-24 15:51:29 -0400775 return gl::NoError();
776}
777
778gl::Error TextureVk::generateMipmap(const gl::Context *context)
779{
780 ContextVk *contextVk = vk::GetImpl(context);
Luc Ferron05cd6df2018-05-24 15:51:29 -0400781
782 // Some data is pending, or the image has not been defined at all yet
783 if (!mImage.valid())
784 {
785 // lets initialize the image so we can generate the next levels.
786 if (!mPixelBuffer.empty())
787 {
Luc Ferronf6e160f2018-06-12 10:13:57 -0400788 ANGLE_TRY(ensureImageInitialized(contextVk));
Luc Ferron05cd6df2018-05-24 15:51:29 -0400789 ASSERT(mImage.valid());
790 }
791 else
792 {
793 // There is nothing to generate if there is nothing uploaded so far.
794 return gl::NoError();
795 }
796 }
797
Luc Ferronf6e160f2018-06-12 10:13:57 -0400798 RendererVk *renderer = contextVk->getRenderer();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400799 VkFormatProperties imageProperties;
800 vk::GetFormatProperties(renderer->getPhysicalDevice(), mImage.getFormat().vkTextureFormat,
801 &imageProperties);
802
803 // Check if the image supports blit. If it does, we can do the mipmap generation on the gpu
804 // only.
805 if (IsMaskFlagSet(kBlitFeatureFlags, imageProperties.linearTilingFeatures))
806 {
807 generateMipmapWithBlit(renderer);
808 }
809 else
810 {
811 ANGLE_TRY(generateMipmapWithCPU(context));
812 }
813
814 // We're changing this textureVk content, make sure we let the graph know.
815 onResourceChanged(renderer);
816
Luc Ferronc5181702018-05-17 09:44:42 -0400817 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400818}
819
Jamie Madill4928b7c2017-06-20 12:57:39 -0400820gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400821{
822 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400823 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400824}
825
Jamie Madill4928b7c2017-06-20 12:57:39 -0400826gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400827{
828 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400829 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400830}
831
Jamie Madill4928b7c2017-06-20 12:57:39 -0400832gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400833{
834 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400835 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400836}
837
Jamie Madill4928b7c2017-06-20 12:57:39 -0400838gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
839 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400840 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400841 FramebufferAttachmentRenderTarget **rtOut)
842{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400843 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400844 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400845
846 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400847 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400848
849 ContextVk *contextVk = vk::GetImpl(context);
Luc Ferronf6e160f2018-06-12 10:13:57 -0400850 ANGLE_TRY(ensureImageInitialized(contextVk));
Jamie Madill26084d02018-04-09 13:44:04 -0400851
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400852 *rtOut = &mRenderTarget;
853 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400854}
855
Luc Ferronf6e160f2018-06-12 10:13:57 -0400856vk::Error TextureVk::ensureImageInitialized(ContextVk *contextVk)
Jamie Madill26084d02018-04-09 13:44:04 -0400857{
Luc Ferron10434f62018-04-24 10:06:37 -0400858 if (mImage.valid() && mPixelBuffer.empty())
859 {
860 return vk::NoError();
861 }
Luc Ferronf6e160f2018-06-12 10:13:57 -0400862 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill26084d02018-04-09 13:44:04 -0400863 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400864 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400865
Luc Ferron2f3f4142018-05-30 08:27:19 -0400866 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
867 const gl::Extents &baseLevelExtents = baseLevelDesc.size;
868 const uint32_t levelCount = getLevelCount();
869
Jamie Madill26084d02018-04-09 13:44:04 -0400870 if (!mImage.valid())
871 {
Jamie Madill26084d02018-04-09 13:44:04 -0400872 const vk::Format &format =
873 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Jamie Madill26084d02018-04-09 13:44:04 -0400874
Luc Ferronf6e160f2018-06-12 10:13:57 -0400875 ANGLE_TRY(initImage(contextVk, format, baseLevelExtents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400876 }
877
Luc Ferron2f3f4142018-05-30 08:27:19 -0400878 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, levelCount, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400879 return vk::NoError();
880}
881
Luc Ferron4bba74f2018-04-19 14:40:45 -0400882gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700883{
Luc Ferron20610902018-04-19 14:41:13 -0400884 if (dirtyBits.none() && mSampler.valid())
885 {
886 return gl::NoError();
887 }
888
889 ContextVk *contextVk = vk::GetImpl(context);
890 if (mSampler.valid())
891 {
892 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc57ee252018-05-30 19:53:48 -0400893 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Luc Ferron20610902018-04-19 14:41:13 -0400894 }
895
896 const gl::SamplerState &samplerState = mState.getSamplerState();
897
898 // Create a simple sampler. Force basic parameter settings.
899 VkSamplerCreateInfo samplerInfo;
900 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
901 samplerInfo.pNext = nullptr;
902 samplerInfo.flags = 0;
903 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
904 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400905 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400906 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
907 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
908 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
909 samplerInfo.mipLodBias = 0.0f;
910 samplerInfo.anisotropyEnable = VK_FALSE;
911 samplerInfo.maxAnisotropy = 1.0f;
912 samplerInfo.compareEnable = VK_FALSE;
913 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400914 samplerInfo.minLod = samplerState.minLod;
915 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400916 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
917 samplerInfo.unnormalizedCoordinates = VK_FALSE;
918
919 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400920 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700921}
922
Jamie Madillc564c072017-06-01 12:45:42 -0400923gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500924 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800925 GLsizei samples,
926 GLint internalformat,
927 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500928 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800929{
930 UNIMPLEMENTED();
931 return gl::InternalError() << "setStorageMultisample is unimplemented.";
932}
933
Jamie Madill05b35b22017-10-03 09:01:44 -0400934gl::Error TextureVk::initializeContents(const gl::Context *context,
935 const gl::ImageIndex &imageIndex)
936{
937 UNIMPLEMENTED();
938 return gl::NoError();
939}
940
Jamie Madill858c1cc2018-03-31 14:19:13 -0400941const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400942{
943 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400944 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400945}
946
947const vk::ImageView &TextureVk::getImageView() const
948{
Jamie Madill93edca12018-03-30 10:43:18 -0400949 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400950
951 const GLenum minFilter = mState.getSamplerState().minFilter;
952 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
953 {
954 return mBaseLevelImageView;
955 }
956
957 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400958}
959
960const vk::Sampler &TextureVk::getSampler() const
961{
962 ASSERT(mSampler.valid());
963 return mSampler;
964}
965
Luc Ferronf6e160f2018-06-12 10:13:57 -0400966vk::Error TextureVk::initImage(ContextVk *contextVk,
Luc Ferronfa7503c2018-05-08 11:25:06 -0400967 const vk::Format &format,
968 const gl::Extents &extents,
969 const uint32_t levelCount,
970 vk::CommandBuffer *commandBuffer)
971{
Luc Ferronf6e160f2018-06-12 10:13:57 -0400972 const RendererVk *renderer = contextVk->getRenderer();
Luc Ferronfa7503c2018-05-08 11:25:06 -0400973 const VkDevice device = renderer->getDevice();
974
975 const VkImageUsageFlags usage =
976 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
977 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
978
979 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
980
981 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
982
983 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
984
985 gl::SwizzleState mappedSwizzle;
986 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
987
Luc Ferronf6e160f2018-06-12 10:13:57 -0400988 // Renderable textures cannot have a swizzle.
989 ASSERT(!contextVk->getTextureCaps().get(format.internalFormat).textureAttachment ||
990 !mappedSwizzle.swizzleRequired());
991
Luc Ferronfa7503c2018-05-08 11:25:06 -0400992 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
993 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
994 mappedSwizzle, &mMipmapImageView, levelCount));
995 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
996 mappedSwizzle, &mBaseLevelImageView, 1));
997
998 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -0400999 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronc20b9502018-05-24 09:30:17 -04001000 mImage.clearColor(black, 0, levelCount, commandBuffer);
Luc Ferronfa7503c2018-05-08 11:25:06 -04001001 return vk::NoError();
1002}
1003
Jamie Madillc4f27e42018-03-31 14:19:18 -04001004void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
1005{
1006 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -04001007 renderer->releaseObject(getStoredQueueSerial(), &mBaseLevelImageView);
1008 renderer->releaseObject(getStoredQueueSerial(), &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -04001009 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
1010}
1011
Luc Ferron66410532018-04-20 12:47:45 -04001012uint32_t TextureVk::getLevelCount() const
1013{
1014 ASSERT(mState.getEffectiveBaseLevel() == 0);
1015
1016 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
1017 return mState.getMipmapMaxLevel() + 1;
1018}
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001019} // namespace rx