blob: b106f92cf7bebd77c38c6341d4e5c88d5acdba33 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Luc Ferronc5181702018-05-17 09:44:42 -040013#include "image_util/generatemip.inl"
Jamie Madill035fd6b2017-10-03 15:43:22 -040014#include "libANGLE/Context.h"
15#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040016#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040017#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050018#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
Luc Ferron5164b792018-03-06 09:10:12 -050022namespace
23{
Jamie Madill93edca12018-03-30 10:43:18 -040024void MapSwizzleState(GLenum internalFormat,
25 const gl::SwizzleState &swizzleState,
26 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050027{
28 switch (internalFormat)
29 {
Jamie Madill26084d02018-04-09 13:44:04 -040030 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040031 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
34 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050035 break;
Jamie Madill26084d02018-04-09 13:44:04 -040036 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040037 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
40 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050041 break;
Jamie Madill26084d02018-04-09 13:44:04 -040042 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040043 swizzleStateOut->swizzleRed = GL_ZERO;
44 swizzleStateOut->swizzleGreen = GL_ZERO;
45 swizzleStateOut->swizzleBlue = GL_ZERO;
46 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040047 break;
Luc Ferron5164b792018-03-06 09:10:12 -050048 default:
Jamie Madill93edca12018-03-30 10:43:18 -040049 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050050 break;
51 }
52}
Jamie Madill26084d02018-04-09 13:44:04 -040053
54constexpr VkBufferUsageFlags kStagingBufferFlags =
55 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
56constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron05cd6df2018-05-24 15:51:29 -040057
58constexpr VkFormatFeatureFlags kBlitFeatureFlags =
59 VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT;
Luc Ferron5164b792018-03-06 09:10:12 -050060} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040061
Jamie Madill26084d02018-04-09 13:44:04 -040062// StagingStorage implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -040063PixelBuffer::PixelBuffer(RendererVk *renderer)
64 : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040065{
Jamie Madill20fa8d52018-04-15 10:09:32 -040066 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
67 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
Luc Ferrona9ab0f32018-05-17 17:03:55 -040068 mStagingBuffer.init(4, renderer);
Jamie Madill26084d02018-04-09 13:44:04 -040069}
70
Jamie Madilla7be1f72018-04-13 15:16:26 -040071PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040072{
73}
74
Jamie Madilla7be1f72018-04-13 15:16:26 -040075void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040076{
77 mStagingBuffer.release(renderer);
78}
79
Luc Ferron2f3f4142018-05-30 08:27:19 -040080void PixelBuffer::removeStagedUpdates(const gl::ImageIndex &index)
81{
82 // Find any staged updates for this index and removes them from the pending list.
83 uint32_t levelIndex = static_cast<uint32_t>(index.getLevelIndex());
84 uint32_t layerIndex = static_cast<uint32_t>(index.getLayerIndex());
85 auto removeIfStatement = [levelIndex, layerIndex](SubresourceUpdate &update) {
86 return update.copyRegion.imageSubresource.mipLevel == levelIndex &&
87 update.copyRegion.imageSubresource.baseArrayLayer == layerIndex;
88 };
89 mSubresourceUpdates.erase(
90 std::remove_if(mSubresourceUpdates.begin(), mSubresourceUpdates.end(), removeIfStatement),
91 mSubresourceUpdates.end());
92}
93
Jamie Madill21061022018-07-12 23:56:30 -040094angle::Result PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
95 const gl::ImageIndex &index,
96 const gl::Extents &extents,
97 const gl::Offset &offset,
98 const gl::InternalFormat &formatInfo,
99 const gl::PixelUnpackState &unpack,
100 GLenum type,
101 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -0400102{
103 GLuint inputRowPitch = 0;
Jamie Madill21061022018-07-12 23:56:30 -0400104 ANGLE_VK_CHECK_MATH(contextVk, formatInfo.computeRowPitch(type, extents.width, unpack.alignment,
105 unpack.rowLength, &inputRowPitch));
Jamie Madill26084d02018-04-09 13:44:04 -0400106
107 GLuint inputDepthPitch = 0;
Jamie Madill21061022018-07-12 23:56:30 -0400108 ANGLE_VK_CHECK_MATH(contextVk, formatInfo.computeDepthPitch(extents.height, unpack.imageHeight,
109 inputRowPitch, &inputDepthPitch));
Jamie Madill26084d02018-04-09 13:44:04 -0400110
111 // TODO(jmadill): skip images for 3D Textures.
112 bool applySkipImages = false;
113
114 GLuint inputSkipBytes = 0;
Jamie Madill21061022018-07-12 23:56:30 -0400115 ANGLE_VK_CHECK_MATH(contextVk,
116 formatInfo.computeSkipBytes(type, inputRowPitch, inputDepthPitch, unpack,
117 applySkipImages, &inputSkipBytes));
Jamie Madill26084d02018-04-09 13:44:04 -0400118
119 RendererVk *renderer = contextVk->getRenderer();
120
121 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
122 const angle::Format &storageFormat = vkFormat.textureFormat();
123
124 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
125 size_t outputDepthPitch = outputRowPitch * extents.height;
126
Jamie Madill20fa8d52018-04-15 10:09:32 -0400127 VkBuffer bufferHandle = VK_NULL_HANDLE;
128
Jamie Madill7f232932018-09-12 11:03:06 -0400129 uint8_t *stagingPointer = nullptr;
130 bool newBufferAllocated = false;
Jamie Madill4c310832018-08-29 13:43:17 -0400131 VkDeviceSize stagingOffset = 0;
Jamie Madill7f232932018-09-12 11:03:06 -0400132 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill21061022018-07-12 23:56:30 -0400133 ANGLE_TRY(mStagingBuffer.allocate(contextVk, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madilleebe2192018-07-11 09:01:18 -0400134 &stagingOffset, &newBufferAllocated));
Jamie Madill26084d02018-04-09 13:44:04 -0400135
136 const uint8_t *source = pixels + inputSkipBytes;
137
Frank Henigmand9618bf2018-06-24 19:57:31 -0400138 LoadImageFunctionInfo loadFunction = vkFormat.textureLoadFunctions(type);
Jamie Madill26084d02018-04-09 13:44:04 -0400139
140 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
141 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
142
Jamie Madill20fa8d52018-04-15 10:09:32 -0400143 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400144
Jamie Madill4c310832018-08-29 13:43:17 -0400145 copy.bufferOffset = stagingOffset;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400146 copy.bufferRowLength = extents.width;
147 copy.bufferImageHeight = extents.height;
148 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
149 copy.imageSubresource.mipLevel = index.getLevelIndex();
150 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
151 copy.imageSubresource.layerCount = index.getLayerCount();
152
Luc Ferron33e05ba2018-04-23 15:12:34 -0400153 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400154 gl_vk::GetExtent(extents, &copy.imageExtent);
155
156 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400157
Jamie Madill21061022018-07-12 23:56:30 -0400158 return angle::Result::Continue();
Jamie Madill26084d02018-04-09 13:44:04 -0400159}
160
Jamie Madill21061022018-07-12 23:56:30 -0400161angle::Result PixelBuffer::stageSubresourceUpdateFromFramebuffer(
162 const gl::Context *context,
163 const gl::ImageIndex &index,
164 const gl::Rectangle &sourceArea,
165 const gl::Offset &dstOffset,
166 const gl::Extents &dstExtent,
167 const gl::InternalFormat &formatInfo,
168 FramebufferVk *framebufferVk)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400169{
Luc Ferronaa2126c2018-07-09 15:36:36 -0400170 ContextVk *contextVk = vk::GetImpl(context);
171
Luc Ferron2a849bf2018-05-10 13:19:11 -0400172 // If the extents and offset is outside the source image, we need to clip.
173 gl::Rectangle clippedRectangle;
Jamie Madill58675012018-05-22 14:54:07 -0400174 const gl::Extents readExtents = framebufferVk->getReadImageExtents();
175 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, readExtents.width, readExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400176 &clippedRectangle))
177 {
178 // Empty source area, nothing to do.
Jamie Madill21061022018-07-12 23:56:30 -0400179 return angle::Result::Continue();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400180 }
181
Luc Ferronaa2126c2018-07-09 15:36:36 -0400182 bool isViewportFlipEnabled = contextVk->isViewportFlipEnabledForDrawFBO();
183 if (isViewportFlipEnabled)
184 {
185 clippedRectangle.y = readExtents.height - clippedRectangle.y - clippedRectangle.height;
186 }
187
Luc Ferron2a849bf2018-05-10 13:19:11 -0400188 // 1- obtain a buffer handle to copy to
Jamie Madill21061022018-07-12 23:56:30 -0400189 RendererVk *renderer = contextVk->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400190
191 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
192 const angle::Format &storageFormat = vkFormat.textureFormat();
Frank Henigmand9618bf2018-06-24 19:57:31 -0400193 LoadImageFunctionInfo loadFunction = vkFormat.textureLoadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400194
195 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
196 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
197
198 VkBuffer bufferHandle = VK_NULL_HANDLE;
199
Jamie Madill7f232932018-09-12 11:03:06 -0400200 uint8_t *stagingPointer = nullptr;
201 bool newBufferAllocated = false;
Jamie Madill4c310832018-08-29 13:43:17 -0400202 VkDeviceSize stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400203
204 // The destination is only one layer deep.
205 size_t allocationSize = outputDepthPitch;
Jamie Madill21061022018-07-12 23:56:30 -0400206 ANGLE_TRY(mStagingBuffer.allocate(contextVk, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madilleebe2192018-07-11 09:01:18 -0400207 &stagingOffset, &newBufferAllocated));
Luc Ferron2a849bf2018-05-10 13:19:11 -0400208
Luc Ferronaa2126c2018-07-09 15:36:36 -0400209 gl::PixelPackState pixelPackState = gl::PixelPackState();
210 // TODO(lucferron): The pixel pack state alignment should probably be 1 instead of 4.
211 // http://anglebug.com/2718
212
213 if (isViewportFlipEnabled)
214 {
215 pixelPackState.reverseRowOrder = !pixelPackState.reverseRowOrder;
216 }
217
Jamie Madilldb9c69e2018-07-18 17:23:47 -0400218 const angle::Format &copyFormat =
219 GetFormatFromFormatType(formatInfo.internalFormat, formatInfo.type);
220 PackPixelsParams params(clippedRectangle, copyFormat, static_cast<GLuint>(outputRowPitch),
221 pixelPackState, nullptr, 0);
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400222
Luc Ferron018709f2018-05-10 13:53:11 -0400223 // 2- copy the source image region to the pixel buffer using a cpu readback
224 if (loadFunction.requiresConversion)
225 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400226 // When a conversion is required, we need to use the loadFunction to read from a temporary
227 // buffer instead so its an even slower path.
Luc Ferronaa2126c2018-07-09 15:36:36 -0400228 size_t bufferSize =
229 storageFormat.pixelBytes * clippedRectangle.width * clippedRectangle.height;
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400230 angle::MemoryBuffer *memoryBuffer = nullptr;
Jamie Madill21061022018-07-12 23:56:30 -0400231 ANGLE_VK_CHECK_ALLOC(contextVk, context->getScratchBuffer(bufferSize, &memoryBuffer));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400232
233 // Read into the scratch buffer
Luc Ferron1617e692018-07-11 11:08:19 -0400234 ANGLE_TRY(framebufferVk->readPixelsImpl(
Jamie Madill21061022018-07-12 23:56:30 -0400235 contextVk, clippedRectangle, params, VK_IMAGE_ASPECT_COLOR_BIT,
Luc Ferron1617e692018-07-11 11:08:19 -0400236 framebufferVk->getColorReadRenderTarget(), memoryBuffer->data()));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400237
238 // Load from scratch buffer to our pixel buffer
Luc Ferronaa2126c2018-07-09 15:36:36 -0400239 loadFunction.loadFunction(clippedRectangle.width, clippedRectangle.height, 1,
240 memoryBuffer->data(), outputRowPitch, 0, stagingPointer,
241 outputRowPitch, 0);
Luc Ferron018709f2018-05-10 13:53:11 -0400242 }
243 else
244 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400245 // We read directly from the framebuffer into our pixel buffer.
Luc Ferron1617e692018-07-11 11:08:19 -0400246 ANGLE_TRY(framebufferVk->readPixelsImpl(
Jamie Madill21061022018-07-12 23:56:30 -0400247 contextVk, clippedRectangle, params, VK_IMAGE_ASPECT_COLOR_BIT,
Luc Ferron1617e692018-07-11 11:08:19 -0400248 framebufferVk->getColorReadRenderTarget(), stagingPointer));
Luc Ferron018709f2018-05-10 13:53:11 -0400249 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400250
Luc Ferron018709f2018-05-10 13:53:11 -0400251 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400252 VkBufferImageCopy copyToImage;
253 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400254 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400255 copyToImage.bufferImageHeight = clippedRectangle.height;
256 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
257 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
258 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
259 copyToImage.imageSubresource.layerCount = index.getLayerCount();
260 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
261 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
262
263 // 3- enqueue the destination image subresource update
264 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
Jamie Madill21061022018-07-12 23:56:30 -0400265 return angle::Result::Continue();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400266}
267
Jamie Madill21061022018-07-12 23:56:30 -0400268angle::Result PixelBuffer::allocate(ContextVk *contextVk,
269 size_t sizeInBytes,
270 uint8_t **ptrOut,
271 VkBuffer *handleOut,
Jamie Madill4c310832018-08-29 13:43:17 -0400272 VkDeviceSize *offsetOut,
Jamie Madill21061022018-07-12 23:56:30 -0400273 bool *newBufferAllocatedOut)
Luc Ferronc5181702018-05-17 09:44:42 -0400274{
Jamie Madill21061022018-07-12 23:56:30 -0400275 return mStagingBuffer.allocate(contextVk, sizeInBytes, ptrOut, handleOut, offsetOut,
Luc Ferronc5181702018-05-17 09:44:42 -0400276 newBufferAllocatedOut);
277}
278
Jamie Madill21061022018-07-12 23:56:30 -0400279angle::Result PixelBuffer::flushUpdatesToImage(ContextVk *contextVk,
280 uint32_t levelCount,
281 vk::ImageHelper *image,
282 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400283{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400284 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400285 {
Jamie Madill21061022018-07-12 23:56:30 -0400286 return angle::Result::Continue();
Jamie Madill26084d02018-04-09 13:44:04 -0400287 }
288
Jamie Madill21061022018-07-12 23:56:30 -0400289 ANGLE_TRY(mStagingBuffer.flush(contextVk));
Jamie Madill20fa8d52018-04-15 10:09:32 -0400290
Luc Ferron2f3f4142018-05-30 08:27:19 -0400291 std::vector<SubresourceUpdate> updatesToKeep;
292
Jamie Madill20fa8d52018-04-15 10:09:32 -0400293 for (const SubresourceUpdate &update : mSubresourceUpdates)
294 {
295 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400296
Luc Ferron2f3f4142018-05-30 08:27:19 -0400297 const uint32_t updateMipLevel = update.copyRegion.imageSubresource.mipLevel;
298 // It's possible we've accumulated updates that are no longer applicable if the image has
299 // never been flushed but the image description has changed. Check if this level exist for
300 // this image.
301 if (updateMipLevel >= levelCount)
302 {
303 updatesToKeep.emplace_back(update);
304 continue;
305 }
306
Luc Ferron1a186b12018-04-24 15:25:35 -0400307 // Conservatively flush all writes to the image. We could use a more restricted barrier.
308 // Do not move this above the for loop, otherwise multiple updates can have race conditions
309 // and not be applied correctly as seen i:
310 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
311 image->changeLayoutWithStages(
312 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
313 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
314
Jamie Madill20fa8d52018-04-15 10:09:32 -0400315 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
316 image->getCurrentLayout(), 1, &update.copyRegion);
317 }
318
Luc Ferron2f3f4142018-05-30 08:27:19 -0400319 // Only remove the updates that were actually applied to the image.
320 mSubresourceUpdates = std::move(updatesToKeep);
321
322 if (mSubresourceUpdates.empty())
323 {
Jamie Madill21061022018-07-12 23:56:30 -0400324 mStagingBuffer.releaseRetainedBuffers(contextVk->getRenderer());
Luc Ferron2f3f4142018-05-30 08:27:19 -0400325 }
326 else
327 {
328 WARN() << "Internal Vulkan bufffer could not be released. This is likely due to having "
329 "extra images defined in the Texture.";
330 }
Jamie Madill20fa8d52018-04-15 10:09:32 -0400331
Jamie Madill21061022018-07-12 23:56:30 -0400332 return angle::Result::Continue();
Jamie Madill26084d02018-04-09 13:44:04 -0400333}
334
Luc Ferron10434f62018-04-24 10:06:37 -0400335bool PixelBuffer::empty() const
336{
337 return mSubresourceUpdates.empty();
338}
339
Jamie Madill21061022018-07-12 23:56:30 -0400340angle::Result PixelBuffer::stageSubresourceUpdateAndGetData(ContextVk *contextVk,
341 size_t allocationSize,
342 const gl::ImageIndex &imageIndex,
343 const gl::Extents &extents,
344 const gl::Offset &offset,
345 uint8_t **destData)
Luc Ferronc5181702018-05-17 09:44:42 -0400346{
347 VkBuffer bufferHandle;
Jamie Madill4c310832018-08-29 13:43:17 -0400348 VkDeviceSize stagingOffset = 0;
Jamie Madill7f232932018-09-12 11:03:06 -0400349 bool newBufferAllocated = false;
Jamie Madill21061022018-07-12 23:56:30 -0400350 ANGLE_TRY(mStagingBuffer.allocate(contextVk, allocationSize, destData, &bufferHandle,
Luc Ferronc5181702018-05-17 09:44:42 -0400351 &stagingOffset, &newBufferAllocated));
352
353 VkBufferImageCopy copy;
Jamie Madill4c310832018-08-29 13:43:17 -0400354 copy.bufferOffset = stagingOffset;
Luc Ferronc5181702018-05-17 09:44:42 -0400355 copy.bufferRowLength = extents.width;
356 copy.bufferImageHeight = extents.height;
357 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
358 copy.imageSubresource.mipLevel = imageIndex.getLevelIndex();
359 copy.imageSubresource.baseArrayLayer = imageIndex.hasLayer() ? imageIndex.getLayerIndex() : 0;
360 copy.imageSubresource.layerCount = imageIndex.getLayerCount();
361
362 gl_vk::GetOffset(offset, &copy.imageOffset);
363 gl_vk::GetExtent(extents, &copy.imageExtent);
364
365 mSubresourceUpdates.emplace_back(bufferHandle, copy);
366
Jamie Madill21061022018-07-12 23:56:30 -0400367 return angle::Result::Continue();
Luc Ferronc5181702018-05-17 09:44:42 -0400368}
369
Jamie Madill21061022018-07-12 23:56:30 -0400370angle::Result TextureVk::generateMipmapLevelsWithCPU(ContextVk *contextVk,
371 const angle::Format &sourceFormat,
372 GLuint layer,
373 GLuint firstMipLevel,
374 GLuint maxMipLevel,
375 const size_t sourceWidth,
376 const size_t sourceHeight,
377 const size_t sourceRowPitch,
378 uint8_t *sourceData)
Luc Ferronc5181702018-05-17 09:44:42 -0400379{
Luc Ferronc5181702018-05-17 09:44:42 -0400380 size_t previousLevelWidth = sourceWidth;
381 size_t previousLevelHeight = sourceHeight;
382 uint8_t *previousLevelData = sourceData;
383 size_t previousLevelRowPitch = sourceRowPitch;
384
385 for (GLuint currentMipLevel = firstMipLevel; currentMipLevel <= maxMipLevel; currentMipLevel++)
386 {
387 // Compute next level width and height.
388 size_t mipWidth = std::max<size_t>(1, previousLevelWidth >> 1);
389 size_t mipHeight = std::max<size_t>(1, previousLevelHeight >> 1);
390
391 // With the width and height of the next mip, we can allocate the next buffer we need.
392 uint8_t *destData = nullptr;
393 size_t destRowPitch = mipWidth * sourceFormat.pixelBytes;
394
395 size_t mipAllocationSize = destRowPitch * mipHeight;
396 gl::Extents mipLevelExtents(static_cast<int>(mipWidth), static_cast<int>(mipHeight), 1);
397
398 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateAndGetData(
Jamie Madill21061022018-07-12 23:56:30 -0400399 contextVk, mipAllocationSize,
Luc Ferron22695bf2018-05-22 15:52:08 -0400400 gl::ImageIndex::MakeFromType(mState.getType(), currentMipLevel, layer), mipLevelExtents,
Luc Ferronc5181702018-05-17 09:44:42 -0400401 gl::Offset(), &destData));
402
403 // Generate the mipmap into that new buffer
404 sourceFormat.mipGenerationFunction(previousLevelWidth, previousLevelHeight, 1,
405 previousLevelData, previousLevelRowPitch, 0, destData,
406 destRowPitch, 0);
407
408 // Swap for the next iteration
409 previousLevelWidth = mipWidth;
410 previousLevelHeight = mipHeight;
411 previousLevelData = destData;
412 previousLevelRowPitch = destRowPitch;
413 }
414
Jamie Madill21061022018-07-12 23:56:30 -0400415 return angle::Result::Continue();
Luc Ferronc5181702018-05-17 09:44:42 -0400416}
417
Jamie Madilla7be1f72018-04-13 15:16:26 -0400418PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400419{
420}
421
Jamie Madilla7be1f72018-04-13 15:16:26 -0400422PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
423 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400424 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
425{
426}
427
Jamie Madilla7be1f72018-04-13 15:16:26 -0400428PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400429
Jamie Madill26084d02018-04-09 13:44:04 -0400430// TextureVk implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400431TextureVk::TextureVk(const gl::TextureState &state, RendererVk *renderer)
Jamie Madill3f3b3582018-09-14 10:38:44 -0400432 : TextureImpl(state),
433 mRenderTarget(&mImage, &mBaseLevelImageView, this, 0),
434 mPixelBuffer(renderer)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400435{
436}
437
438TextureVk::~TextureVk()
439{
440}
441
Jamie Madill035fd6b2017-10-03 15:43:22 -0400442gl::Error TextureVk::onDestroy(const gl::Context *context)
443{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400444 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400445 RendererVk *renderer = contextVk->getRenderer();
446
Jamie Madillc4f27e42018-03-31 14:19:18 -0400447 releaseImage(context, renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400448 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400449
Jamie Madilla7be1f72018-04-13 15:16:26 -0400450 mPixelBuffer.release(renderer);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400451 return gl::NoError();
452}
453
Jamie Madillc564c072017-06-01 12:45:42 -0400454gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400455 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400456 GLenum internalFormat,
457 const gl::Extents &size,
458 GLenum format,
459 GLenum type,
460 const gl::PixelUnpackState &unpack,
461 const uint8_t *pixels)
462{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400463 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400464 RendererVk *renderer = contextVk->getRenderer();
465
Jamie Madillc4f27e42018-03-31 14:19:18 -0400466 // Convert internalFormat to sized internal format.
467 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400468
Geoff Langd691aee2018-07-11 16:32:06 -0400469 ANGLE_TRY(redefineImage(context, index, formatInfo, size));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400470
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500471 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400472 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500473 {
474 return gl::NoError();
475 }
476
Jamie Madill26084d02018-04-09 13:44:04 -0400477 // Create a new graph node to store image initialization commands.
Jamie Madille2d22702018-09-19 08:11:48 -0400478 finishCurrentCommands(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400479
Jamie Madill035fd6b2017-10-03 15:43:22 -0400480 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400481 if (pixels)
482 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400483 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
484 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400485 }
486
487 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400488}
489
Jamie Madillc564c072017-06-01 12:45:42 -0400490gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400491 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400492 const gl::Box &area,
493 GLenum format,
494 GLenum type,
495 const gl::PixelUnpackState &unpack,
Jamie Madill0d0fb432018-09-07 17:43:32 -0400496 gl::Buffer *unpackBuffer,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400497 const uint8_t *pixels)
498{
Jamie Madill5b18f482017-11-30 17:24:22 -0500499 ContextVk *contextVk = vk::GetImpl(context);
500 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400501 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
502 contextVk, index, gl::Extents(area.width, area.height, area.depth),
503 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400504
505 // Create a new graph node to store image initialization commands.
Jamie Madille2d22702018-09-19 08:11:48 -0400506 finishCurrentCommands(contextVk->getRenderer());
Jamie Madillb2214862018-04-26 07:25:48 -0400507
Jamie Madill5b18f482017-11-30 17:24:22 -0500508 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400509}
510
Jamie Madillc564c072017-06-01 12:45:42 -0400511gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400512 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400513 GLenum internalFormat,
514 const gl::Extents &size,
515 const gl::PixelUnpackState &unpack,
516 size_t imageSize,
517 const uint8_t *pixels)
518{
519 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500520 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400521}
522
Jamie Madillc564c072017-06-01 12:45:42 -0400523gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400524 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400525 const gl::Box &area,
526 GLenum format,
527 const gl::PixelUnpackState &unpack,
528 size_t imageSize,
529 const uint8_t *pixels)
530{
531 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500532 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400533}
534
Jamie Madillc564c072017-06-01 12:45:42 -0400535gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400536 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400537 const gl::Rectangle &sourceArea,
538 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400539 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400540{
Luc Ferronf299a372018-05-14 14:44:54 -0400541 gl::Extents newImageSize(sourceArea.width, sourceArea.height, 1);
542 const gl::InternalFormat &internalFormatInfo =
543 gl::GetInternalFormatInfo(internalFormat, GL_UNSIGNED_BYTE);
Geoff Langd691aee2018-07-11 16:32:06 -0400544 ANGLE_TRY(redefineImage(context, index, internalFormatInfo, newImageSize));
Luc Ferronf299a372018-05-14 14:44:54 -0400545 return copySubImageImpl(context, index, gl::Offset(0, 0, 0), sourceArea, internalFormatInfo,
546 source);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400547}
548
Jamie Madillc564c072017-06-01 12:45:42 -0400549gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400550 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400551 const gl::Offset &destOffset,
552 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400553 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400554{
Luc Ferronf299a372018-05-14 14:44:54 -0400555 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
556 return copySubImageImpl(context, index, destOffset, sourceArea, currentFormat, source);
557}
558
Geoff Langd691aee2018-07-11 16:32:06 -0400559gl::Error TextureVk::copyTexture(const gl::Context *context,
560 const gl::ImageIndex &index,
561 GLenum internalFormat,
562 GLenum type,
563 size_t sourceLevel,
564 bool unpackFlipY,
565 bool unpackPremultiplyAlpha,
566 bool unpackUnmultiplyAlpha,
567 const gl::Texture *source)
568{
569 TextureVk *sourceVk = vk::GetImpl(source);
570 const gl::ImageDesc &sourceImageDesc =
571 sourceVk->mState.getImageDesc(NonCubeTextureTypeToTarget(source->getType()), sourceLevel);
572 gl::Rectangle sourceArea(0, 0, sourceImageDesc.size.width, sourceImageDesc.size.height);
573
574 const gl::InternalFormat &destFormatInfo = gl::GetInternalFormatInfo(internalFormat, type);
575
576 ANGLE_TRY(redefineImage(context, index, destFormatInfo, sourceImageDesc.size));
577
578 return copySubTextureImpl(vk::GetImpl(context), index, gl::kOffsetZero, destFormatInfo,
579 sourceLevel, sourceArea, unpackFlipY, unpackPremultiplyAlpha,
580 unpackUnmultiplyAlpha, sourceVk);
581}
582
583gl::Error TextureVk::copySubTexture(const gl::Context *context,
584 const gl::ImageIndex &index,
585 const gl::Offset &destOffset,
586 size_t sourceLevel,
587 const gl::Rectangle &sourceArea,
588 bool unpackFlipY,
589 bool unpackPremultiplyAlpha,
590 bool unpackUnmultiplyAlpha,
591 const gl::Texture *source)
592{
593 gl::TextureTarget target = index.getTarget();
594 size_t level = static_cast<size_t>(index.getLevelIndex());
595 const gl::InternalFormat &destFormatInfo = *mState.getImageDesc(target, level).format.info;
596 return copySubTextureImpl(vk::GetImpl(context), index, destOffset, destFormatInfo, sourceLevel,
597 sourceArea, unpackFlipY, unpackPremultiplyAlpha,
598 unpackUnmultiplyAlpha, vk::GetImpl(source));
599}
600
Jamie Madill21061022018-07-12 23:56:30 -0400601angle::Result TextureVk::copySubImageImpl(const gl::Context *context,
602 const gl::ImageIndex &index,
603 const gl::Offset &destOffset,
604 const gl::Rectangle &sourceArea,
605 const gl::InternalFormat &internalFormat,
606 gl::Framebuffer *source)
Luc Ferronf299a372018-05-14 14:44:54 -0400607{
Luc Ferron018709f2018-05-10 13:53:11 -0400608 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
609 gl::Rectangle clippedSourceArea;
610 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
611 &clippedSourceArea))
612 {
Jamie Madill21061022018-07-12 23:56:30 -0400613 return angle::Result::Continue();
Luc Ferron018709f2018-05-10 13:53:11 -0400614 }
615
616 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
617 destOffset.y + sourceArea.y - sourceArea.y, 0);
618
Frank Henigmand9618bf2018-06-24 19:57:31 -0400619 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill316c6062018-05-29 10:49:45 -0400620 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronf299a372018-05-14 14:44:54 -0400621 FramebufferVk *framebufferVk = vk::GetImpl(source);
Luc Ferron018709f2018-05-10 13:53:11 -0400622
623 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
624 // change to the pixel buffer.
625 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
626 // when its supported.
Jamie Madill58675012018-05-22 14:54:07 -0400627 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromFramebuffer(
Luc Ferron018709f2018-05-10 13:53:11 -0400628 context, index, clippedSourceArea, modifiedDestOffset,
Luc Ferronf299a372018-05-14 14:44:54 -0400629 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), internalFormat,
Jamie Madill58675012018-05-22 14:54:07 -0400630 framebufferVk));
Luc Ferron018709f2018-05-10 13:53:11 -0400631
Jamie Madille2d22702018-09-19 08:11:48 -0400632 finishCurrentCommands(renderer);
Jamie Madill316c6062018-05-29 10:49:45 -0400633 framebufferVk->addReadDependency(this);
Jamie Madill21061022018-07-12 23:56:30 -0400634 return angle::Result::Continue();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400635}
636
Geoff Langd691aee2018-07-11 16:32:06 -0400637gl::Error TextureVk::copySubTextureImpl(ContextVk *contextVk,
638 const gl::ImageIndex &index,
639 const gl::Offset &destOffset,
640 const gl::InternalFormat &destFormat,
641 size_t sourceLevel,
642 const gl::Rectangle &sourceArea,
643 bool unpackFlipY,
644 bool unpackPremultiplyAlpha,
645 bool unpackUnmultiplyAlpha,
646 TextureVk *source)
647{
648 RendererVk *renderer = contextVk->getRenderer();
649
650 // Read back the requested region of the source texture
651 uint8_t *sourceData = nullptr;
652 ANGLE_TRY(source->copyImageDataToBuffer(contextVk, sourceLevel, sourceArea, &sourceData));
653
654 ANGLE_TRY(renderer->finish(contextVk));
655
656 // Using the front-end ANGLE format for the colorRead and colorWrite functions. Otherwise
657 // emulated formats like luminance-alpha would not know how to interpret the data.
658 const angle::Format &sourceAngleFormat = source->getImage().getFormat().angleFormat();
659 const angle::Format &destAngleFormat =
660 renderer->getFormat(destFormat.sizedInternalFormat).angleFormat();
661 size_t destinationAllocationSize =
662 sourceArea.width * sourceArea.height * destAngleFormat.pixelBytes;
663
664 // Allocate memory in the destination texture for the copy/conversion
665 uint8_t *destData = nullptr;
666 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateAndGetData(
667 contextVk, destinationAllocationSize, index,
668 gl::Extents(sourceArea.width, sourceArea.height, 1), destOffset, &destData));
669
670 // Source and dest data is tightly packed
671 GLuint sourceDataRowPitch = sourceArea.width * sourceAngleFormat.pixelBytes;
672 GLuint destDataRowPitch = sourceArea.width * destAngleFormat.pixelBytes;
673
674 CopyImageCHROMIUM(sourceData, sourceDataRowPitch, sourceAngleFormat.pixelBytes,
Jamie Madill522095f2018-07-23 14:59:41 -0400675 sourceAngleFormat.pixelReadFunction, destData, destDataRowPitch,
676 destAngleFormat.pixelBytes, destAngleFormat.pixelWriteFunction,
Geoff Langd691aee2018-07-11 16:32:06 -0400677 destFormat.format, destFormat.componentType, sourceArea.width,
678 sourceArea.height, unpackFlipY, unpackPremultiplyAlpha,
679 unpackUnmultiplyAlpha);
680
681 // Create a new graph node to store image initialization commands.
Jamie Madille2d22702018-09-19 08:11:48 -0400682 finishCurrentCommands(contextVk->getRenderer());
Geoff Langd691aee2018-07-11 16:32:06 -0400683
684 return angle::Result::Continue();
685}
686
Jamie Madill21061022018-07-12 23:56:30 -0400687angle::Result TextureVk::getCommandBufferForWrite(ContextVk *contextVk,
688 vk::CommandBuffer **commandBufferOut)
Luc Ferronfa7503c2018-05-08 11:25:06 -0400689{
Jamie Madille2d22702018-09-19 08:11:48 -0400690 ANGLE_TRY(recordCommands(contextVk, commandBufferOut));
Jamie Madill21061022018-07-12 23:56:30 -0400691 return angle::Result::Continue();
Luc Ferronfa7503c2018-05-08 11:25:06 -0400692}
693
Jamie Madillc564c072017-06-01 12:45:42 -0400694gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500695 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400696 size_t levels,
697 GLenum internalFormat,
698 const gl::Extents &size)
699{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400700 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
701 RendererVk *renderer = contextVk->getRenderer();
702 const vk::Format &format = renderer->getFormat(internalFormat);
703 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madill21061022018-07-12 23:56:30 -0400704 ANGLE_TRY(getCommandBufferForWrite(contextVk, &commandBuffer));
Jamie Madill3e29cf32018-08-31 17:19:17 -0400705
706 if (mImage.valid())
707 {
708 releaseImage(context, renderer);
709 }
710
Luc Ferronf6e160f2018-06-12 10:13:57 -0400711 ANGLE_TRY(initImage(contextVk, format, size, static_cast<uint32_t>(levels), commandBuffer));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400712 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400713}
714
Corentin Wallez99d492c2018-02-27 15:17:10 -0500715gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
716 gl::TextureType type,
717 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400718{
719 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500720 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400721}
722
Jamie Madill4928b7c2017-06-20 12:57:39 -0400723gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500724 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400725 egl::Stream *stream,
726 const egl::Stream::GLTextureDescription &desc)
727{
728 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500729 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400730}
731
Geoff Langd691aee2018-07-11 16:32:06 -0400732angle::Result TextureVk::redefineImage(const gl::Context *context,
733 const gl::ImageIndex &index,
734 const gl::InternalFormat &internalFormat,
735 const gl::Extents &size)
736{
737 ContextVk *contextVk = vk::GetImpl(context);
738 RendererVk *renderer = contextVk->getRenderer();
739
740 // If there is any staged changes for this index, we can remove them since we're going to
741 // override them with this call.
742 mPixelBuffer.removeStagedUpdates(index);
743
744 if (mImage.valid())
745 {
746 const vk::Format &vkFormat = renderer->getFormat(internalFormat.sizedInternalFormat);
747
748 // Calculate the expected size for the index we are defining. If the size is different from
749 // the given size, or the format is different, we are redefining the image so we must
750 // release it.
751 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
752 {
753 releaseImage(context, renderer);
754 }
755 }
756
757 return angle::Result::Continue();
758}
759
760angle::Result TextureVk::copyImageDataToBuffer(ContextVk *contextVk,
761 size_t sourceLevel,
762 const gl::Rectangle &sourceArea,
763 uint8_t **outDataPtr)
764{
765 if (sourceLevel != 0)
766 {
767 WARN() << "glCopyTextureCHROMIUM with sourceLevel != 0 not implemented.";
768 return angle::Result::Stop();
769 }
770
771 // Make sure the source is initialized and it's images are flushed.
772 ANGLE_TRY(ensureImageInitialized(contextVk));
773
774 const angle::Format &angleFormat = getImage().getFormat().textureFormat();
775 const gl::Extents imageSize =
776 mState.getImageDesc(NonCubeTextureTypeToTarget(mState.getType()), sourceLevel).size;
777 size_t sourceCopyAllocationSize = sourceArea.width * sourceArea.height * angleFormat.pixelBytes;
778
779 vk::CommandBuffer *commandBuffer = nullptr;
780 ANGLE_TRY(getCommandBufferForWrite(contextVk, &commandBuffer));
781
782 // Requirement of the copyImageToBuffer, the source image must be in SRC_OPTIMAL layout.
783 bool newBufferAllocated = false;
784 mImage.changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
785 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
786 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
787
788 // Allocate enough memory to copy the sourceArea region of the source texture into its pixel
789 // buffer.
790 VkBuffer copyBufferHandle;
Jamie Madill4c310832018-08-29 13:43:17 -0400791 VkDeviceSize sourceCopyOffset = 0;
Geoff Langd691aee2018-07-11 16:32:06 -0400792 ANGLE_TRY(mPixelBuffer.allocate(contextVk, sourceCopyAllocationSize, outDataPtr,
793 &copyBufferHandle, &sourceCopyOffset, &newBufferAllocated));
794
795 VkBufferImageCopy region;
Jamie Madill4c310832018-08-29 13:43:17 -0400796 region.bufferOffset = sourceCopyOffset;
Geoff Langd691aee2018-07-11 16:32:06 -0400797 region.bufferRowLength = imageSize.width;
798 region.bufferImageHeight = imageSize.height;
799 region.imageExtent.width = sourceArea.width;
800 region.imageExtent.height = sourceArea.height;
801 region.imageExtent.depth = 1;
802 region.imageOffset.x = sourceArea.x;
803 region.imageOffset.y = sourceArea.y;
804 region.imageOffset.z = 0;
805 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
806 region.imageSubresource.baseArrayLayer = 0;
807 region.imageSubresource.layerCount = 1;
Jamie Madillc6855862018-07-18 15:06:54 -0400808 region.imageSubresource.mipLevel = static_cast<uint32_t>(sourceLevel);
Geoff Langd691aee2018-07-11 16:32:06 -0400809
810 commandBuffer->copyImageToBuffer(mImage.getImage(), mImage.getCurrentLayout(), copyBufferHandle,
811 1, &region);
812
813 return angle::Result::Continue();
814}
815
Jamie Madill21061022018-07-12 23:56:30 -0400816angle::Result TextureVk::generateMipmapWithBlit(ContextVk *contextVk)
Luc Ferron05cd6df2018-05-24 15:51:29 -0400817{
818 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
819 const gl::Extents baseLevelExtents = mImage.getExtents();
820 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madill21061022018-07-12 23:56:30 -0400821 ANGLE_TRY(getCommandBufferForWrite(contextVk, &commandBuffer));
Luc Ferron05cd6df2018-05-24 15:51:29 -0400822
823 // We are able to use blitImage since the image format we are using supports it. This
824 // is a faster way we can generate the mips.
825 int32_t mipWidth = baseLevelExtents.width;
826 int32_t mipHeight = baseLevelExtents.height;
827
828 // Manually manage the image memory barrier because it uses a lot more parameters than our
829 // usual one.
830 VkImageMemoryBarrier barrier;
831 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
832 barrier.image = mImage.getImage().getHandle();
833 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
834 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
835 barrier.pNext = nullptr;
836 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
837 barrier.subresourceRange.baseArrayLayer = 0;
838 barrier.subresourceRange.layerCount = imageLayerCount;
839 barrier.subresourceRange.levelCount = 1;
840
841 for (uint32_t mipLevel = 1; mipLevel <= mState.getMipmapMaxLevel(); mipLevel++)
842 {
843 int32_t nextMipWidth = std::max<int32_t>(1, mipWidth >> 1);
844 int32_t nextMipHeight = std::max<int32_t>(1, mipHeight >> 1);
845
846 barrier.subresourceRange.baseMipLevel = mipLevel - 1;
847 barrier.oldLayout = mImage.getCurrentLayout();
848 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
849 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
850 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
851
852 // We can do it for all layers at once.
853 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
854 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
855
856 VkImageBlit blit = {};
857 blit.srcOffsets[0] = {0, 0, 0};
858 blit.srcOffsets[1] = {mipWidth, mipHeight, 1};
859 blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
860 blit.srcSubresource.mipLevel = mipLevel - 1;
861 blit.srcSubresource.baseArrayLayer = 0;
862 blit.srcSubresource.layerCount = imageLayerCount;
863 blit.dstOffsets[0] = {0, 0, 0};
864 blit.dstOffsets[1] = {nextMipWidth, nextMipHeight, 1};
865 blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
866 blit.dstSubresource.mipLevel = mipLevel;
867 blit.dstSubresource.baseArrayLayer = 0;
868 blit.dstSubresource.layerCount = imageLayerCount;
869
870 mipWidth = nextMipWidth;
871 mipHeight = nextMipHeight;
872
873 commandBuffer->blitImage(mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
874 mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit,
875 VK_FILTER_LINEAR);
876 }
877
878 // Transition the last mip level to the same layout as all the other ones, so we can declare
879 // our whole image layout to be SRC_OPTIMAL.
880 barrier.subresourceRange.baseMipLevel = mState.getMipmapMaxLevel();
881 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
882 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
883
884 // We can do it for all layers at once.
885 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
886 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
887
888 // This is just changing the internal state of the image helper so that the next call
889 // to changeLayoutWithStages will use this layout as the "oldLayout" argument.
890 mImage.updateLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
Jamie Madilleebe2192018-07-11 09:01:18 -0400891
Jamie Madill21061022018-07-12 23:56:30 -0400892 return angle::Result::Continue();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400893}
894
Jamie Madill21061022018-07-12 23:56:30 -0400895angle::Result TextureVk::generateMipmapWithCPU(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400896{
Luc Ferron22695bf2018-05-22 15:52:08 -0400897 ContextVk *contextVk = vk::GetImpl(context);
898 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronc5181702018-05-17 09:44:42 -0400899
Luc Ferronc5181702018-05-17 09:44:42 -0400900 bool newBufferAllocated = false;
Luc Ferronc5181702018-05-17 09:44:42 -0400901 const gl::Extents baseLevelExtents = mImage.getExtents();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400902 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
903 const angle::Format &angleFormat = mImage.getFormat().textureFormat();
Luc Ferronc5181702018-05-17 09:44:42 -0400904 GLuint sourceRowPitch = baseLevelExtents.width * angleFormat.pixelBytes;
905 size_t baseLevelAllocationSize = sourceRowPitch * baseLevelExtents.height;
906
Luc Ferron22695bf2018-05-22 15:52:08 -0400907 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madill21061022018-07-12 23:56:30 -0400908 ANGLE_TRY(getCommandBufferForWrite(contextVk, &commandBuffer));
Luc Ferronc5181702018-05-17 09:44:42 -0400909
Luc Ferron22695bf2018-05-22 15:52:08 -0400910 // Requirement of the copyImageToBuffer, the source image must be in SRC_OPTIMAL layout.
911 mImage.changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
912 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
913 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
914
915 size_t totalAllocationSize = baseLevelAllocationSize * imageLayerCount;
916
917 VkBuffer copyBufferHandle;
918 uint8_t *baseLevelBuffers;
Jamie Madill4c310832018-08-29 13:43:17 -0400919 VkDeviceSize copyBaseOffset;
Luc Ferron22695bf2018-05-22 15:52:08 -0400920
921 // Allocate enough memory to copy every level 0 image (one for each layer of the texture).
Jamie Madill21061022018-07-12 23:56:30 -0400922 ANGLE_TRY(mPixelBuffer.allocate(contextVk, totalAllocationSize, &baseLevelBuffers,
Luc Ferron22695bf2018-05-22 15:52:08 -0400923 &copyBufferHandle, &copyBaseOffset, &newBufferAllocated));
924
925 // Do only one copy for all layers at once.
Luc Ferronc5181702018-05-17 09:44:42 -0400926 VkBufferImageCopy region;
927 region.bufferImageHeight = baseLevelExtents.height;
Jamie Madill4c310832018-08-29 13:43:17 -0400928 region.bufferOffset = copyBaseOffset;
Luc Ferronc5181702018-05-17 09:44:42 -0400929 region.bufferRowLength = baseLevelExtents.width;
930 region.imageExtent.width = baseLevelExtents.width;
931 region.imageExtent.height = baseLevelExtents.height;
932 region.imageExtent.depth = 1;
933 region.imageOffset.x = 0;
934 region.imageOffset.y = 0;
935 region.imageOffset.z = 0;
936 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
937 region.imageSubresource.baseArrayLayer = 0;
Luc Ferron22695bf2018-05-22 15:52:08 -0400938 region.imageSubresource.layerCount = imageLayerCount;
Luc Ferronc5181702018-05-17 09:44:42 -0400939 region.imageSubresource.mipLevel = mState.getEffectiveBaseLevel();
940
Luc Ferron22695bf2018-05-22 15:52:08 -0400941 commandBuffer->copyImageToBuffer(mImage.getImage(), mImage.getCurrentLayout(), copyBufferHandle,
942 1, &region);
Luc Ferronc5181702018-05-17 09:44:42 -0400943
Jamie Madill21061022018-07-12 23:56:30 -0400944 ANGLE_TRY(renderer->finish(contextVk));
Luc Ferronc5181702018-05-17 09:44:42 -0400945
Luc Ferron2f3f4142018-05-30 08:27:19 -0400946 const uint32_t levelCount = getLevelCount();
947
Luc Ferronc5181702018-05-17 09:44:42 -0400948 // We now have the base level available to be manipulated in the baseLevelBuffer pointer.
949 // Generate all the missing mipmaps with the slow path. We can optimize with vkCmdBlitImage
950 // later.
Luc Ferron22695bf2018-05-22 15:52:08 -0400951 // For each layer, use the copied data to generate all the mips.
952 for (GLuint layer = 0; layer < imageLayerCount; layer++)
953 {
954 size_t bufferOffset = layer * baseLevelAllocationSize;
Luc Ferron05cd6df2018-05-24 15:51:29 -0400955
956 ANGLE_TRY(generateMipmapLevelsWithCPU(
Luc Ferron22695bf2018-05-22 15:52:08 -0400957 contextVk, angleFormat, layer, mState.getEffectiveBaseLevel() + 1,
958 mState.getMipmapMaxLevel(), baseLevelExtents.width, baseLevelExtents.height,
959 sourceRowPitch, baseLevelBuffers + bufferOffset));
960 }
Luc Ferronc5181702018-05-17 09:44:42 -0400961
Jamie Madill21061022018-07-12 23:56:30 -0400962 return mPixelBuffer.flushUpdatesToImage(contextVk, levelCount, &mImage, commandBuffer);
Luc Ferron05cd6df2018-05-24 15:51:29 -0400963}
964
965gl::Error TextureVk::generateMipmap(const gl::Context *context)
966{
967 ContextVk *contextVk = vk::GetImpl(context);
Luc Ferron05cd6df2018-05-24 15:51:29 -0400968
969 // Some data is pending, or the image has not been defined at all yet
970 if (!mImage.valid())
971 {
972 // lets initialize the image so we can generate the next levels.
973 if (!mPixelBuffer.empty())
974 {
Luc Ferronf6e160f2018-06-12 10:13:57 -0400975 ANGLE_TRY(ensureImageInitialized(contextVk));
Luc Ferron05cd6df2018-05-24 15:51:29 -0400976 ASSERT(mImage.valid());
977 }
978 else
979 {
980 // There is nothing to generate if there is nothing uploaded so far.
981 return gl::NoError();
982 }
983 }
984
Luc Ferronf6e160f2018-06-12 10:13:57 -0400985 RendererVk *renderer = contextVk->getRenderer();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400986 VkFormatProperties imageProperties;
987 vk::GetFormatProperties(renderer->getPhysicalDevice(), mImage.getFormat().vkTextureFormat,
988 &imageProperties);
989
990 // Check if the image supports blit. If it does, we can do the mipmap generation on the gpu
991 // only.
992 if (IsMaskFlagSet(kBlitFeatureFlags, imageProperties.linearTilingFeatures))
993 {
Jamie Madill21061022018-07-12 23:56:30 -0400994 ANGLE_TRY(generateMipmapWithBlit(contextVk));
Luc Ferron05cd6df2018-05-24 15:51:29 -0400995 }
996 else
997 {
998 ANGLE_TRY(generateMipmapWithCPU(context));
999 }
1000
1001 // We're changing this textureVk content, make sure we let the graph know.
Jamie Madille2d22702018-09-19 08:11:48 -04001002 finishCurrentCommands(renderer);
Luc Ferron05cd6df2018-05-24 15:51:29 -04001003
Luc Ferronc5181702018-05-17 09:44:42 -04001004 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001005}
1006
Jamie Madill4928b7c2017-06-20 12:57:39 -04001007gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001008{
1009 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -04001010 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001011}
1012
Jamie Madill4928b7c2017-06-20 12:57:39 -04001013gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001014{
1015 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -04001016 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001017}
1018
Jamie Madill4928b7c2017-06-20 12:57:39 -04001019gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001020{
1021 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -04001022 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001023}
1024
Jamie Madill4928b7c2017-06-20 12:57:39 -04001025gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
1026 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -04001027 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001028 FramebufferAttachmentRenderTarget **rtOut)
1029{
Jamie Madill26084d02018-04-09 13:44:04 -04001030 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madill71bb0262018-09-12 11:09:42 -04001031 ASSERT(imageIndex.getLevelIndex() == 0);
Jamie Madill26084d02018-04-09 13:44:04 -04001032
1033 ContextVk *contextVk = vk::GetImpl(context);
Luc Ferronf6e160f2018-06-12 10:13:57 -04001034 ANGLE_TRY(ensureImageInitialized(contextVk));
Jamie Madill26084d02018-04-09 13:44:04 -04001035
Jamie Madill71bb0262018-09-12 11:09:42 -04001036 switch (imageIndex.getType())
1037 {
1038 case gl::TextureType::_2D:
1039 *rtOut = &mRenderTarget;
1040 break;
1041 case gl::TextureType::CubeMap:
1042 ANGLE_TRY(initCubeMapRenderTargets(contextVk));
1043 *rtOut = &mCubeMapRenderTargets[imageIndex.cubeMapFaceIndex()];
1044 break;
1045 default:
1046 UNREACHABLE();
1047 }
1048
Jamie Madillb79e7bb2017-10-24 13:55:50 -04001049 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001050}
1051
Jamie Madill21061022018-07-12 23:56:30 -04001052angle::Result TextureVk::ensureImageInitialized(ContextVk *contextVk)
Jamie Madill26084d02018-04-09 13:44:04 -04001053{
Luc Ferron10434f62018-04-24 10:06:37 -04001054 if (mImage.valid() && mPixelBuffer.empty())
1055 {
Jamie Madill21061022018-07-12 23:56:30 -04001056 return angle::Result::Continue();
Luc Ferron10434f62018-04-24 10:06:37 -04001057 }
Luc Ferronf6e160f2018-06-12 10:13:57 -04001058 RendererVk *renderer = contextVk->getRenderer();
Jamie Madill26084d02018-04-09 13:44:04 -04001059 vk::CommandBuffer *commandBuffer = nullptr;
Jamie Madill21061022018-07-12 23:56:30 -04001060 ANGLE_TRY(getCommandBufferForWrite(contextVk, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -04001061
Luc Ferron2f3f4142018-05-30 08:27:19 -04001062 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
1063 const gl::Extents &baseLevelExtents = baseLevelDesc.size;
1064 const uint32_t levelCount = getLevelCount();
1065
Jamie Madill26084d02018-04-09 13:44:04 -04001066 if (!mImage.valid())
1067 {
Jamie Madill26084d02018-04-09 13:44:04 -04001068 const vk::Format &format =
1069 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Jamie Madill26084d02018-04-09 13:44:04 -04001070
Luc Ferronf6e160f2018-06-12 10:13:57 -04001071 ANGLE_TRY(initImage(contextVk, format, baseLevelExtents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -04001072 }
1073
Jamie Madill71bb0262018-09-12 11:09:42 -04001074 return mPixelBuffer.flushUpdatesToImage(contextVk, levelCount, &mImage, commandBuffer);
1075}
1076
1077angle::Result TextureVk::initCubeMapRenderTargets(ContextVk *contextVk)
1078{
1079 // Lazy init. Check if already initialized.
1080 if (!mCubeMapFaceImageViews.empty())
1081 return angle::Result::Continue();
1082
1083 mCubeMapFaceImageViews.resize(gl::kCubeFaceCount);
1084
1085 for (size_t cubeMapFaceIndex = 0; cubeMapFaceIndex < gl::kCubeFaceCount; ++cubeMapFaceIndex)
1086 {
1087 vk::ImageView &imageView = mCubeMapFaceImageViews[cubeMapFaceIndex];
1088 ANGLE_TRY(mImage.initLayerImageView(contextVk, gl::TextureType::CubeMap,
1089 VK_IMAGE_ASPECT_COLOR_BIT, gl::SwizzleState(),
1090 &imageView, 1, cubeMapFaceIndex, 1));
Jamie Madill3f3b3582018-09-14 10:38:44 -04001091 mCubeMapRenderTargets.emplace_back(&mImage, &imageView, this, cubeMapFaceIndex);
Jamie Madill71bb0262018-09-12 11:09:42 -04001092 }
Jamie Madill21061022018-07-12 23:56:30 -04001093 return angle::Result::Continue();
Jamie Madill26084d02018-04-09 13:44:04 -04001094}
1095
Luc Ferron4bba74f2018-04-19 14:40:45 -04001096gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -07001097{
Luc Ferron20610902018-04-19 14:41:13 -04001098 if (dirtyBits.none() && mSampler.valid())
1099 {
1100 return gl::NoError();
1101 }
1102
1103 ContextVk *contextVk = vk::GetImpl(context);
1104 if (mSampler.valid())
1105 {
1106 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc57ee252018-05-30 19:53:48 -04001107 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Luc Ferron20610902018-04-19 14:41:13 -04001108 }
1109
1110 const gl::SamplerState &samplerState = mState.getSamplerState();
1111
1112 // Create a simple sampler. Force basic parameter settings.
1113 VkSamplerCreateInfo samplerInfo;
1114 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
1115 samplerInfo.pNext = nullptr;
1116 samplerInfo.flags = 0;
Jamie Madill097d3c02018-09-12 11:03:05 -04001117 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.getMagFilter());
1118 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.getMinFilter());
1119 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.getMinFilter());
1120 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.getWrapS());
1121 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.getWrapT());
1122 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.getWrapR());
Luc Ferron20610902018-04-19 14:41:13 -04001123 samplerInfo.mipLodBias = 0.0f;
1124 samplerInfo.anisotropyEnable = VK_FALSE;
1125 samplerInfo.maxAnisotropy = 1.0f;
1126 samplerInfo.compareEnable = VK_FALSE;
1127 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Jamie Madill097d3c02018-09-12 11:03:05 -04001128 samplerInfo.minLod = samplerState.getMinLod();
1129 samplerInfo.maxLod = samplerState.getMaxLod();
Luc Ferron20610902018-04-19 14:41:13 -04001130 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
1131 samplerInfo.unnormalizedCoordinates = VK_FALSE;
1132
Jamie Madill71bb0262018-09-12 11:09:42 -04001133 return mSampler.init(contextVk, samplerInfo);
Geoff Lang22416862016-06-08 16:14:36 -07001134}
1135
Jamie Madillc564c072017-06-01 12:45:42 -04001136gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -05001137 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +08001138 GLsizei samples,
1139 GLint internalformat,
1140 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -05001141 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +08001142{
1143 UNIMPLEMENTED();
1144 return gl::InternalError() << "setStorageMultisample is unimplemented.";
1145}
1146
Jamie Madill05b35b22017-10-03 09:01:44 -04001147gl::Error TextureVk::initializeContents(const gl::Context *context,
1148 const gl::ImageIndex &imageIndex)
1149{
1150 UNIMPLEMENTED();
1151 return gl::NoError();
1152}
1153
Jamie Madill858c1cc2018-03-31 14:19:13 -04001154const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -04001155{
1156 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -04001157 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -04001158}
1159
1160const vk::ImageView &TextureVk::getImageView() const
1161{
Jamie Madill93edca12018-03-30 10:43:18 -04001162 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -04001163
Jamie Madill097d3c02018-09-12 11:03:05 -04001164 const GLenum minFilter = mState.getSamplerState().getMinFilter();
Luc Ferron66410532018-04-20 12:47:45 -04001165 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
1166 {
1167 return mBaseLevelImageView;
1168 }
1169
1170 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -04001171}
1172
1173const vk::Sampler &TextureVk::getSampler() const
1174{
1175 ASSERT(mSampler.valid());
1176 return mSampler;
1177}
1178
Jamie Madill21061022018-07-12 23:56:30 -04001179angle::Result TextureVk::initImage(ContextVk *contextVk,
1180 const vk::Format &format,
1181 const gl::Extents &extents,
1182 const uint32_t levelCount,
1183 vk::CommandBuffer *commandBuffer)
Luc Ferronfa7503c2018-05-08 11:25:06 -04001184{
Luc Ferronf6e160f2018-06-12 10:13:57 -04001185 const RendererVk *renderer = contextVk->getRenderer();
Luc Ferronfa7503c2018-05-08 11:25:06 -04001186
1187 const VkImageUsageFlags usage =
1188 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
1189 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
1190
Jamie Madill21061022018-07-12 23:56:30 -04001191 ANGLE_TRY(mImage.init(contextVk, mState.getType(), extents, format, 1, usage, levelCount));
Luc Ferronfa7503c2018-05-08 11:25:06 -04001192
1193 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1194
Jamie Madill21061022018-07-12 23:56:30 -04001195 ANGLE_TRY(mImage.initMemory(contextVk, renderer->getMemoryProperties(), flags));
Luc Ferronfa7503c2018-05-08 11:25:06 -04001196
1197 gl::SwizzleState mappedSwizzle;
1198 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
1199
Luc Ferronf6e160f2018-06-12 10:13:57 -04001200 // Renderable textures cannot have a swizzle.
1201 ASSERT(!contextVk->getTextureCaps().get(format.internalFormat).textureAttachment ||
1202 !mappedSwizzle.swizzleRequired());
1203
Luc Ferronfa7503c2018-05-08 11:25:06 -04001204 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
Jamie Madill21061022018-07-12 23:56:30 -04001205 ANGLE_TRY(mImage.initImageView(contextVk, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
Luc Ferronfa7503c2018-05-08 11:25:06 -04001206 mappedSwizzle, &mMipmapImageView, levelCount));
Jamie Madill21061022018-07-12 23:56:30 -04001207 ANGLE_TRY(mImage.initImageView(contextVk, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
Luc Ferronfa7503c2018-05-08 11:25:06 -04001208 mappedSwizzle, &mBaseLevelImageView, 1));
1209
1210 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -04001211 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronc20b9502018-05-24 09:30:17 -04001212 mImage.clearColor(black, 0, levelCount, commandBuffer);
Jamie Madill21061022018-07-12 23:56:30 -04001213 return angle::Result::Continue();
Luc Ferronfa7503c2018-05-08 11:25:06 -04001214}
1215
Jamie Madillc4f27e42018-03-31 14:19:18 -04001216void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
1217{
1218 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -04001219 renderer->releaseObject(getStoredQueueSerial(), &mBaseLevelImageView);
1220 renderer->releaseObject(getStoredQueueSerial(), &mMipmapImageView);
Jamie Madill71bb0262018-09-12 11:09:42 -04001221
1222 for (vk::ImageView &imageView : mCubeMapFaceImageViews)
1223 {
1224 renderer->releaseObject(getStoredQueueSerial(), &imageView);
1225 }
1226 mCubeMapFaceImageViews.clear();
1227 mCubeMapRenderTargets.clear();
Jamie Madillc4f27e42018-03-31 14:19:18 -04001228}
1229
Luc Ferron66410532018-04-20 12:47:45 -04001230uint32_t TextureVk::getLevelCount() const
1231{
1232 ASSERT(mState.getEffectiveBaseLevel() == 0);
1233
1234 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
1235 return mState.getMipmapMaxLevel() + 1;
1236}
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001237} // namespace rx