blob: 44003fc58b036b4a17468b22022da44297a1b5a8 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Luc Ferronc5181702018-05-17 09:44:42 -040013#include "image_util/generatemip.inl"
Jamie Madill035fd6b2017-10-03 15:43:22 -040014#include "libANGLE/Context.h"
15#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040016#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040017#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050018#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
Luc Ferron5164b792018-03-06 09:10:12 -050022namespace
23{
Jamie Madill93edca12018-03-30 10:43:18 -040024void MapSwizzleState(GLenum internalFormat,
25 const gl::SwizzleState &swizzleState,
26 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050027{
28 switch (internalFormat)
29 {
Jamie Madill26084d02018-04-09 13:44:04 -040030 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040031 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
34 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050035 break;
Jamie Madill26084d02018-04-09 13:44:04 -040036 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040037 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
40 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050041 break;
Jamie Madill26084d02018-04-09 13:44:04 -040042 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040043 swizzleStateOut->swizzleRed = GL_ZERO;
44 swizzleStateOut->swizzleGreen = GL_ZERO;
45 swizzleStateOut->swizzleBlue = GL_ZERO;
46 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040047 break;
Luc Ferron7348fc52018-05-09 07:17:16 -040048 case GL_RGB8:
49 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
50 swizzleStateOut->swizzleGreen = swizzleState.swizzleGreen;
51 swizzleStateOut->swizzleBlue = swizzleState.swizzleBlue;
52 swizzleStateOut->swizzleAlpha = GL_ONE;
53 break;
Luc Ferron5164b792018-03-06 09:10:12 -050054 default:
Jamie Madill93edca12018-03-30 10:43:18 -040055 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050056 break;
57 }
58}
Jamie Madill26084d02018-04-09 13:44:04 -040059
60constexpr VkBufferUsageFlags kStagingBufferFlags =
61 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
62constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron05cd6df2018-05-24 15:51:29 -040063
64constexpr VkFormatFeatureFlags kBlitFeatureFlags =
65 VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT;
Luc Ferron5164b792018-03-06 09:10:12 -050066} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040067
Jamie Madill26084d02018-04-09 13:44:04 -040068// StagingStorage implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -040069PixelBuffer::PixelBuffer(RendererVk *renderer)
70 : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040071{
Jamie Madill20fa8d52018-04-15 10:09:32 -040072 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
73 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
Luc Ferrona9ab0f32018-05-17 17:03:55 -040074 mStagingBuffer.init(4, renderer);
Jamie Madill26084d02018-04-09 13:44:04 -040075}
76
Jamie Madilla7be1f72018-04-13 15:16:26 -040077PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040078{
79}
80
Jamie Madilla7be1f72018-04-13 15:16:26 -040081void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040082{
83 mStagingBuffer.release(renderer);
84}
85
Jamie Madilla7be1f72018-04-13 15:16:26 -040086gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
87 const gl::ImageIndex &index,
88 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040089 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040090 const gl::InternalFormat &formatInfo,
91 const gl::PixelUnpackState &unpack,
92 GLenum type,
93 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -040094{
95 GLuint inputRowPitch = 0;
96 ANGLE_TRY_RESULT(
97 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
98 inputRowPitch);
99
100 GLuint inputDepthPitch = 0;
101 ANGLE_TRY_RESULT(
102 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
103 inputDepthPitch);
104
105 // TODO(jmadill): skip images for 3D Textures.
106 bool applySkipImages = false;
107
108 GLuint inputSkipBytes = 0;
109 ANGLE_TRY_RESULT(
Jeff Gilbert31d3deb2018-05-18 18:32:16 -0700110 formatInfo.computeSkipBytes(type, inputRowPitch, inputDepthPitch, unpack, applySkipImages),
Jamie Madill26084d02018-04-09 13:44:04 -0400111 inputSkipBytes);
112
113 RendererVk *renderer = contextVk->getRenderer();
114
115 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
116 const angle::Format &storageFormat = vkFormat.textureFormat();
117
118 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
119 size_t outputDepthPitch = outputRowPitch * extents.height;
120
Jamie Madill20fa8d52018-04-15 10:09:32 -0400121 VkBuffer bufferHandle = VK_NULL_HANDLE;
122
Jamie Madill26084d02018-04-09 13:44:04 -0400123 uint8_t *stagingPointer = nullptr;
124 bool newBufferAllocated = false;
125 uint32_t stagingOffset = 0;
126 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400127 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400128 &stagingOffset, &newBufferAllocated);
129
130 const uint8_t *source = pixels + inputSkipBytes;
131
132 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
133
134 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
135 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
136
Jamie Madill20fa8d52018-04-15 10:09:32 -0400137 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400138
Jamie Madill20fa8d52018-04-15 10:09:32 -0400139 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
140 copy.bufferRowLength = extents.width;
141 copy.bufferImageHeight = extents.height;
142 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
143 copy.imageSubresource.mipLevel = index.getLevelIndex();
144 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
145 copy.imageSubresource.layerCount = index.getLayerCount();
146
Luc Ferron33e05ba2018-04-23 15:12:34 -0400147 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400148 gl_vk::GetExtent(extents, &copy.imageExtent);
149
150 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400151
152 return gl::NoError();
153}
154
Jamie Madill58675012018-05-22 14:54:07 -0400155gl::Error PixelBuffer::stageSubresourceUpdateFromFramebuffer(const gl::Context *context,
156 const gl::ImageIndex &index,
157 const gl::Rectangle &sourceArea,
158 const gl::Offset &dstOffset,
159 const gl::Extents &dstExtent,
160 const gl::InternalFormat &formatInfo,
161 FramebufferVk *framebufferVk)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400162{
163 // If the extents and offset is outside the source image, we need to clip.
164 gl::Rectangle clippedRectangle;
Jamie Madill58675012018-05-22 14:54:07 -0400165 const gl::Extents readExtents = framebufferVk->getReadImageExtents();
166 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, readExtents.width, readExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400167 &clippedRectangle))
168 {
169 // Empty source area, nothing to do.
170 return gl::NoError();
171 }
172
173 // 1- obtain a buffer handle to copy to
Luc Ferron018709f2018-05-10 13:53:11 -0400174 RendererVk *renderer = GetImplAs<ContextVk>(context)->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400175
176 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
177 const angle::Format &storageFormat = vkFormat.textureFormat();
Luc Ferron018709f2018-05-10 13:53:11 -0400178 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400179
180 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
181 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
182
183 VkBuffer bufferHandle = VK_NULL_HANDLE;
184
185 uint8_t *stagingPointer = nullptr;
186 bool newBufferAllocated = false;
187 uint32_t stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400188
189 // The destination is only one layer deep.
190 size_t allocationSize = outputDepthPitch;
Luc Ferron2a849bf2018-05-10 13:19:11 -0400191 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
192 &stagingOffset, &newBufferAllocated);
193
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400194 PackPixelsParams params;
195 params.area = sourceArea;
196 params.format = formatInfo.internalFormat;
197 params.type = formatInfo.type;
198 params.outputPitch = static_cast<GLuint>(outputRowPitch);
199 params.packBuffer = nullptr;
200 params.pack = gl::PixelPackState();
201
Luc Ferron018709f2018-05-10 13:53:11 -0400202 // 2- copy the source image region to the pixel buffer using a cpu readback
203 if (loadFunction.requiresConversion)
204 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400205 // When a conversion is required, we need to use the loadFunction to read from a temporary
206 // buffer instead so its an even slower path.
207 size_t bufferSize = storageFormat.pixelBytes * sourceArea.width * sourceArea.height;
208 angle::MemoryBuffer *memoryBuffer = nullptr;
209 ANGLE_TRY(context->getScratchBuffer(bufferSize, &memoryBuffer));
210
211 // Read into the scratch buffer
Jamie Madill58675012018-05-22 14:54:07 -0400212 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, memoryBuffer->data()));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400213
214 // Load from scratch buffer to our pixel buffer
215 loadFunction.loadFunction(sourceArea.width, sourceArea.height, 1, memoryBuffer->data(),
216 outputRowPitch, 0, stagingPointer, outputRowPitch, 0);
Luc Ferron018709f2018-05-10 13:53:11 -0400217 }
218 else
219 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400220 // We read directly from the framebuffer into our pixel buffer.
Jamie Madill58675012018-05-22 14:54:07 -0400221 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, stagingPointer));
Luc Ferron018709f2018-05-10 13:53:11 -0400222 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400223
Luc Ferron018709f2018-05-10 13:53:11 -0400224 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400225 VkBufferImageCopy copyToImage;
226 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400227 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400228 copyToImage.bufferImageHeight = clippedRectangle.height;
229 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
230 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
231 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
232 copyToImage.imageSubresource.layerCount = index.getLayerCount();
233 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
234 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
235
236 // 3- enqueue the destination image subresource update
237 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
238 return gl::NoError();
239}
240
Luc Ferronc5181702018-05-17 09:44:42 -0400241gl::Error PixelBuffer::allocate(RendererVk *renderer,
242 size_t sizeInBytes,
243 uint8_t **ptrOut,
244 VkBuffer *handleOut,
245 uint32_t *offsetOut,
246 bool *newBufferAllocatedOut)
247{
248 return mStagingBuffer.allocate(renderer, sizeInBytes, ptrOut, handleOut, offsetOut,
249 newBufferAllocatedOut);
250}
251
Jamie Madilla7be1f72018-04-13 15:16:26 -0400252vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
253 vk::ImageHelper *image,
254 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400255{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400256 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400257 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400258 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400259 }
260
Jamie Madill20fa8d52018-04-15 10:09:32 -0400261 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
262
263 for (const SubresourceUpdate &update : mSubresourceUpdates)
264 {
265 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400266
267 // Conservatively flush all writes to the image. We could use a more restricted barrier.
268 // Do not move this above the for loop, otherwise multiple updates can have race conditions
269 // and not be applied correctly as seen i:
270 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
271 image->changeLayoutWithStages(
272 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
273 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
274
Jamie Madill20fa8d52018-04-15 10:09:32 -0400275 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
276 image->getCurrentLayout(), 1, &update.copyRegion);
277 }
278
279 mSubresourceUpdates.clear();
Luc Ferron61859812018-05-09 14:17:39 -0400280 mStagingBuffer.releaseRetainedBuffers(renderer);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400281
Jamie Madill26084d02018-04-09 13:44:04 -0400282 return vk::NoError();
283}
284
Luc Ferron10434f62018-04-24 10:06:37 -0400285bool PixelBuffer::empty() const
286{
287 return mSubresourceUpdates.empty();
288}
289
Luc Ferronc5181702018-05-17 09:44:42 -0400290gl::Error PixelBuffer::stageSubresourceUpdateAndGetData(RendererVk *renderer,
291 size_t allocationSize,
292 const gl::ImageIndex &imageIndex,
293 const gl::Extents &extents,
294 const gl::Offset &offset,
295 uint8_t **destData)
296{
297 VkBuffer bufferHandle;
298 uint32_t stagingOffset = 0;
299 bool newBufferAllocated = false;
300 ANGLE_TRY(mStagingBuffer.allocate(renderer, allocationSize, destData, &bufferHandle,
301 &stagingOffset, &newBufferAllocated));
302
303 VkBufferImageCopy copy;
304 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
305 copy.bufferRowLength = extents.width;
306 copy.bufferImageHeight = extents.height;
307 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
308 copy.imageSubresource.mipLevel = imageIndex.getLevelIndex();
309 copy.imageSubresource.baseArrayLayer = imageIndex.hasLayer() ? imageIndex.getLayerIndex() : 0;
310 copy.imageSubresource.layerCount = imageIndex.getLayerCount();
311
312 gl_vk::GetOffset(offset, &copy.imageOffset);
313 gl_vk::GetExtent(extents, &copy.imageExtent);
314
315 mSubresourceUpdates.emplace_back(bufferHandle, copy);
316
317 return gl::NoError();
318}
319
Luc Ferron05cd6df2018-05-24 15:51:29 -0400320gl::Error TextureVk::generateMipmapLevelsWithCPU(ContextVk *contextVk,
321 const angle::Format &sourceFormat,
322 GLuint layer,
323 GLuint firstMipLevel,
324 GLuint maxMipLevel,
325 const size_t sourceWidth,
326 const size_t sourceHeight,
327 const size_t sourceRowPitch,
328 uint8_t *sourceData)
Luc Ferronc5181702018-05-17 09:44:42 -0400329{
330 RendererVk *renderer = contextVk->getRenderer();
331
332 size_t previousLevelWidth = sourceWidth;
333 size_t previousLevelHeight = sourceHeight;
334 uint8_t *previousLevelData = sourceData;
335 size_t previousLevelRowPitch = sourceRowPitch;
336
337 for (GLuint currentMipLevel = firstMipLevel; currentMipLevel <= maxMipLevel; currentMipLevel++)
338 {
339 // Compute next level width and height.
340 size_t mipWidth = std::max<size_t>(1, previousLevelWidth >> 1);
341 size_t mipHeight = std::max<size_t>(1, previousLevelHeight >> 1);
342
343 // With the width and height of the next mip, we can allocate the next buffer we need.
344 uint8_t *destData = nullptr;
345 size_t destRowPitch = mipWidth * sourceFormat.pixelBytes;
346
347 size_t mipAllocationSize = destRowPitch * mipHeight;
348 gl::Extents mipLevelExtents(static_cast<int>(mipWidth), static_cast<int>(mipHeight), 1);
349
350 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateAndGetData(
351 renderer, mipAllocationSize,
Luc Ferron22695bf2018-05-22 15:52:08 -0400352 gl::ImageIndex::MakeFromType(mState.getType(), currentMipLevel, layer), mipLevelExtents,
Luc Ferronc5181702018-05-17 09:44:42 -0400353 gl::Offset(), &destData));
354
355 // Generate the mipmap into that new buffer
356 sourceFormat.mipGenerationFunction(previousLevelWidth, previousLevelHeight, 1,
357 previousLevelData, previousLevelRowPitch, 0, destData,
358 destRowPitch, 0);
359
360 // Swap for the next iteration
361 previousLevelWidth = mipWidth;
362 previousLevelHeight = mipHeight;
363 previousLevelData = destData;
364 previousLevelRowPitch = destRowPitch;
365 }
366
367 return gl::NoError();
368}
369
Jamie Madilla7be1f72018-04-13 15:16:26 -0400370PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400371{
372}
373
Jamie Madilla7be1f72018-04-13 15:16:26 -0400374PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
375 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400376 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
377{
378}
379
Jamie Madilla7be1f72018-04-13 15:16:26 -0400380PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400381
Jamie Madill26084d02018-04-09 13:44:04 -0400382// TextureVk implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400383TextureVk::TextureVk(const gl::TextureState &state, RendererVk *renderer)
Jamie Madillbcf467f2018-05-23 09:46:00 -0400384 : TextureImpl(state), mRenderTarget(&mImage, &mBaseLevelImageView, this), mPixelBuffer(renderer)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400385{
386}
387
388TextureVk::~TextureVk()
389{
390}
391
Jamie Madill035fd6b2017-10-03 15:43:22 -0400392gl::Error TextureVk::onDestroy(const gl::Context *context)
393{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400394 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400395 RendererVk *renderer = contextVk->getRenderer();
396
Jamie Madillc4f27e42018-03-31 14:19:18 -0400397 releaseImage(context, renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400398 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400399
Jamie Madilla7be1f72018-04-13 15:16:26 -0400400 mPixelBuffer.release(renderer);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400401 return gl::NoError();
402}
403
Jamie Madillc564c072017-06-01 12:45:42 -0400404gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400405 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400406 GLenum internalFormat,
407 const gl::Extents &size,
408 GLenum format,
409 GLenum type,
410 const gl::PixelUnpackState &unpack,
411 const uint8_t *pixels)
412{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400413 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400414 RendererVk *renderer = contextVk->getRenderer();
415
Jamie Madillc4f27e42018-03-31 14:19:18 -0400416 // Convert internalFormat to sized internal format.
417 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400418
Jamie Madill1b038242017-11-01 15:14:36 -0400419 if (mImage.valid())
420 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400421 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400422
423 // Calculate the expected size for the index we are defining. If the size is different from
424 // the given size, or the format is different, we are redefining the image so we must
425 // release it.
426 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400427 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400428 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400429 }
430 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400431
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500432 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400433 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500434 {
435 return gl::NoError();
436 }
437
Jamie Madill26084d02018-04-09 13:44:04 -0400438 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400439 onResourceChanged(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400440
Jamie Madill035fd6b2017-10-03 15:43:22 -0400441 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400442 if (pixels)
443 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400444 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
445 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400446 }
447
448 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400449}
450
Jamie Madillc564c072017-06-01 12:45:42 -0400451gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400452 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400453 const gl::Box &area,
454 GLenum format,
455 GLenum type,
456 const gl::PixelUnpackState &unpack,
457 const uint8_t *pixels)
458{
Jamie Madill5b18f482017-11-30 17:24:22 -0500459 ContextVk *contextVk = vk::GetImpl(context);
460 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400461 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
462 contextVk, index, gl::Extents(area.width, area.height, area.depth),
463 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400464
465 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400466 onResourceChanged(contextVk->getRenderer());
Jamie Madillb2214862018-04-26 07:25:48 -0400467
Jamie Madill5b18f482017-11-30 17:24:22 -0500468 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400469}
470
Jamie Madillc564c072017-06-01 12:45:42 -0400471gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400472 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400473 GLenum internalFormat,
474 const gl::Extents &size,
475 const gl::PixelUnpackState &unpack,
476 size_t imageSize,
477 const uint8_t *pixels)
478{
479 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500480 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400481}
482
Jamie Madillc564c072017-06-01 12:45:42 -0400483gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400484 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400485 const gl::Box &area,
486 GLenum format,
487 const gl::PixelUnpackState &unpack,
488 size_t imageSize,
489 const uint8_t *pixels)
490{
491 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500492 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400493}
494
Jamie Madillc564c072017-06-01 12:45:42 -0400495gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400496 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400497 const gl::Rectangle &sourceArea,
498 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400499 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400500{
Luc Ferronf299a372018-05-14 14:44:54 -0400501 gl::Extents newImageSize(sourceArea.width, sourceArea.height, 1);
502 const gl::InternalFormat &internalFormatInfo =
503 gl::GetInternalFormatInfo(internalFormat, GL_UNSIGNED_BYTE);
504 ANGLE_TRY(setImage(context, index, internalFormat, newImageSize, internalFormatInfo.format,
505 internalFormatInfo.type, gl::PixelUnpackState(), nullptr));
506 return copySubImageImpl(context, index, gl::Offset(0, 0, 0), sourceArea, internalFormatInfo,
507 source);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400508}
509
Jamie Madillc564c072017-06-01 12:45:42 -0400510gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400511 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400512 const gl::Offset &destOffset,
513 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400514 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400515{
Luc Ferronf299a372018-05-14 14:44:54 -0400516 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
517 return copySubImageImpl(context, index, destOffset, sourceArea, currentFormat, source);
518}
519
520gl::Error TextureVk::copySubImageImpl(const gl::Context *context,
521 const gl::ImageIndex &index,
522 const gl::Offset &destOffset,
523 const gl::Rectangle &sourceArea,
524 const gl::InternalFormat &internalFormat,
525 gl::Framebuffer *source)
526{
Luc Ferron018709f2018-05-10 13:53:11 -0400527 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
528 gl::Rectangle clippedSourceArea;
529 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
530 &clippedSourceArea))
531 {
532 return gl::NoError();
533 }
534
535 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
536 destOffset.y + sourceArea.y - sourceArea.y, 0);
537
538 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill316c6062018-05-29 10:49:45 -0400539 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronf299a372018-05-14 14:44:54 -0400540 FramebufferVk *framebufferVk = vk::GetImpl(source);
Luc Ferron018709f2018-05-10 13:53:11 -0400541
542 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
543 // change to the pixel buffer.
544 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
545 // when its supported.
Jamie Madill58675012018-05-22 14:54:07 -0400546 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromFramebuffer(
Luc Ferron018709f2018-05-10 13:53:11 -0400547 context, index, clippedSourceArea, modifiedDestOffset,
Luc Ferronf299a372018-05-14 14:44:54 -0400548 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), internalFormat,
Jamie Madill58675012018-05-22 14:54:07 -0400549 framebufferVk));
Luc Ferron018709f2018-05-10 13:53:11 -0400550
Jamie Madill316c6062018-05-29 10:49:45 -0400551 onResourceChanged(renderer);
552 framebufferVk->addReadDependency(this);
Luc Ferron018709f2018-05-10 13:53:11 -0400553 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400554}
555
Luc Ferronfa7503c2018-05-08 11:25:06 -0400556vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
Jamie Madill316c6062018-05-29 10:49:45 -0400557 vk::CommandBuffer **commandBufferOut)
Luc Ferronfa7503c2018-05-08 11:25:06 -0400558{
Luc Ferronc5181702018-05-17 09:44:42 -0400559 ANGLE_TRY(appendWriteResource(renderer, commandBufferOut));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400560 return vk::NoError();
561}
562
Jamie Madillc564c072017-06-01 12:45:42 -0400563gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500564 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400565 size_t levels,
566 GLenum internalFormat,
567 const gl::Extents &size)
568{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400569 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
570 RendererVk *renderer = contextVk->getRenderer();
571 const vk::Format &format = renderer->getFormat(internalFormat);
572 vk::CommandBuffer *commandBuffer = nullptr;
573 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
574 ANGLE_TRY(initImage(renderer, format, size, static_cast<uint32_t>(levels), commandBuffer));
575 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400576}
577
Corentin Wallez99d492c2018-02-27 15:17:10 -0500578gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
579 gl::TextureType type,
580 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400581{
582 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500583 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400584}
585
Jamie Madill4928b7c2017-06-20 12:57:39 -0400586gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500587 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400588 egl::Stream *stream,
589 const egl::Stream::GLTextureDescription &desc)
590{
591 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500592 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400593}
594
Luc Ferron05cd6df2018-05-24 15:51:29 -0400595void TextureVk::generateMipmapWithBlit(RendererVk *renderer)
596{
597 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
598 const gl::Extents baseLevelExtents = mImage.getExtents();
599 vk::CommandBuffer *commandBuffer = nullptr;
600 getCommandBufferForWrite(renderer, &commandBuffer);
601
602 // We are able to use blitImage since the image format we are using supports it. This
603 // is a faster way we can generate the mips.
604 int32_t mipWidth = baseLevelExtents.width;
605 int32_t mipHeight = baseLevelExtents.height;
606
607 // Manually manage the image memory barrier because it uses a lot more parameters than our
608 // usual one.
609 VkImageMemoryBarrier barrier;
610 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
611 barrier.image = mImage.getImage().getHandle();
612 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
613 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
614 barrier.pNext = nullptr;
615 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
616 barrier.subresourceRange.baseArrayLayer = 0;
617 barrier.subresourceRange.layerCount = imageLayerCount;
618 barrier.subresourceRange.levelCount = 1;
619
620 for (uint32_t mipLevel = 1; mipLevel <= mState.getMipmapMaxLevel(); mipLevel++)
621 {
622 int32_t nextMipWidth = std::max<int32_t>(1, mipWidth >> 1);
623 int32_t nextMipHeight = std::max<int32_t>(1, mipHeight >> 1);
624
625 barrier.subresourceRange.baseMipLevel = mipLevel - 1;
626 barrier.oldLayout = mImage.getCurrentLayout();
627 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
628 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
629 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
630
631 // We can do it for all layers at once.
632 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
633 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
634
635 VkImageBlit blit = {};
636 blit.srcOffsets[0] = {0, 0, 0};
637 blit.srcOffsets[1] = {mipWidth, mipHeight, 1};
638 blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
639 blit.srcSubresource.mipLevel = mipLevel - 1;
640 blit.srcSubresource.baseArrayLayer = 0;
641 blit.srcSubresource.layerCount = imageLayerCount;
642 blit.dstOffsets[0] = {0, 0, 0};
643 blit.dstOffsets[1] = {nextMipWidth, nextMipHeight, 1};
644 blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
645 blit.dstSubresource.mipLevel = mipLevel;
646 blit.dstSubresource.baseArrayLayer = 0;
647 blit.dstSubresource.layerCount = imageLayerCount;
648
649 mipWidth = nextMipWidth;
650 mipHeight = nextMipHeight;
651
652 commandBuffer->blitImage(mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
653 mImage.getImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit,
654 VK_FILTER_LINEAR);
655 }
656
657 // Transition the last mip level to the same layout as all the other ones, so we can declare
658 // our whole image layout to be SRC_OPTIMAL.
659 barrier.subresourceRange.baseMipLevel = mState.getMipmapMaxLevel();
660 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
661 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
662
663 // We can do it for all layers at once.
664 commandBuffer->singleImageBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
665 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, barrier);
666
667 // This is just changing the internal state of the image helper so that the next call
668 // to changeLayoutWithStages will use this layout as the "oldLayout" argument.
669 mImage.updateLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
670}
671
672gl::Error TextureVk::generateMipmapWithCPU(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400673{
Luc Ferron22695bf2018-05-22 15:52:08 -0400674 ContextVk *contextVk = vk::GetImpl(context);
675 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronc5181702018-05-17 09:44:42 -0400676
Luc Ferronc5181702018-05-17 09:44:42 -0400677 bool newBufferAllocated = false;
Luc Ferronc5181702018-05-17 09:44:42 -0400678 const gl::Extents baseLevelExtents = mImage.getExtents();
Luc Ferron05cd6df2018-05-24 15:51:29 -0400679 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
680 const angle::Format &angleFormat = mImage.getFormat().textureFormat();
Luc Ferronc5181702018-05-17 09:44:42 -0400681 GLuint sourceRowPitch = baseLevelExtents.width * angleFormat.pixelBytes;
682 size_t baseLevelAllocationSize = sourceRowPitch * baseLevelExtents.height;
683
Luc Ferron22695bf2018-05-22 15:52:08 -0400684 vk::CommandBuffer *commandBuffer = nullptr;
685 getCommandBufferForWrite(renderer, &commandBuffer);
Luc Ferronc5181702018-05-17 09:44:42 -0400686
Luc Ferron22695bf2018-05-22 15:52:08 -0400687 // Requirement of the copyImageToBuffer, the source image must be in SRC_OPTIMAL layout.
688 mImage.changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
689 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
690 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
691
692 size_t totalAllocationSize = baseLevelAllocationSize * imageLayerCount;
693
694 VkBuffer copyBufferHandle;
695 uint8_t *baseLevelBuffers;
696 uint32_t copyBaseOffset;
697
698 // Allocate enough memory to copy every level 0 image (one for each layer of the texture).
699 ANGLE_TRY(mPixelBuffer.allocate(renderer, totalAllocationSize, &baseLevelBuffers,
700 &copyBufferHandle, &copyBaseOffset, &newBufferAllocated));
701
702 // Do only one copy for all layers at once.
Luc Ferronc5181702018-05-17 09:44:42 -0400703 VkBufferImageCopy region;
704 region.bufferImageHeight = baseLevelExtents.height;
Luc Ferron22695bf2018-05-22 15:52:08 -0400705 region.bufferOffset = static_cast<VkDeviceSize>(copyBaseOffset);
Luc Ferronc5181702018-05-17 09:44:42 -0400706 region.bufferRowLength = baseLevelExtents.width;
707 region.imageExtent.width = baseLevelExtents.width;
708 region.imageExtent.height = baseLevelExtents.height;
709 region.imageExtent.depth = 1;
710 region.imageOffset.x = 0;
711 region.imageOffset.y = 0;
712 region.imageOffset.z = 0;
713 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
714 region.imageSubresource.baseArrayLayer = 0;
Luc Ferron22695bf2018-05-22 15:52:08 -0400715 region.imageSubresource.layerCount = imageLayerCount;
Luc Ferronc5181702018-05-17 09:44:42 -0400716 region.imageSubresource.mipLevel = mState.getEffectiveBaseLevel();
717
Luc Ferron22695bf2018-05-22 15:52:08 -0400718 commandBuffer->copyImageToBuffer(mImage.getImage(), mImage.getCurrentLayout(), copyBufferHandle,
719 1, &region);
Luc Ferronc5181702018-05-17 09:44:42 -0400720
721 ANGLE_TRY(renderer->finish(context));
722
Luc Ferronc5181702018-05-17 09:44:42 -0400723 // We now have the base level available to be manipulated in the baseLevelBuffer pointer.
724 // Generate all the missing mipmaps with the slow path. We can optimize with vkCmdBlitImage
725 // later.
Luc Ferron22695bf2018-05-22 15:52:08 -0400726 // For each layer, use the copied data to generate all the mips.
727 for (GLuint layer = 0; layer < imageLayerCount; layer++)
728 {
729 size_t bufferOffset = layer * baseLevelAllocationSize;
Luc Ferron05cd6df2018-05-24 15:51:29 -0400730
731 ANGLE_TRY(generateMipmapLevelsWithCPU(
Luc Ferron22695bf2018-05-22 15:52:08 -0400732 contextVk, angleFormat, layer, mState.getEffectiveBaseLevel() + 1,
733 mState.getMipmapMaxLevel(), baseLevelExtents.width, baseLevelExtents.height,
734 sourceRowPitch, baseLevelBuffers + bufferOffset));
735 }
Luc Ferronc5181702018-05-17 09:44:42 -0400736
Luc Ferron05cd6df2018-05-24 15:51:29 -0400737 mPixelBuffer.flushUpdatesToImage(renderer, &mImage, commandBuffer);
738 return gl::NoError();
739}
740
741gl::Error TextureVk::generateMipmap(const gl::Context *context)
742{
743 ContextVk *contextVk = vk::GetImpl(context);
744 RendererVk *renderer = contextVk->getRenderer();
745
746 // Some data is pending, or the image has not been defined at all yet
747 if (!mImage.valid())
748 {
749 // lets initialize the image so we can generate the next levels.
750 if (!mPixelBuffer.empty())
751 {
752 ANGLE_TRY(ensureImageInitialized(renderer));
753 ASSERT(mImage.valid());
754 }
755 else
756 {
757 // There is nothing to generate if there is nothing uploaded so far.
758 return gl::NoError();
759 }
760 }
761
762 VkFormatProperties imageProperties;
763 vk::GetFormatProperties(renderer->getPhysicalDevice(), mImage.getFormat().vkTextureFormat,
764 &imageProperties);
765
766 // Check if the image supports blit. If it does, we can do the mipmap generation on the gpu
767 // only.
768 if (IsMaskFlagSet(kBlitFeatureFlags, imageProperties.linearTilingFeatures))
769 {
770 generateMipmapWithBlit(renderer);
771 }
772 else
773 {
774 ANGLE_TRY(generateMipmapWithCPU(context));
775 }
776
777 // We're changing this textureVk content, make sure we let the graph know.
778 onResourceChanged(renderer);
779
Luc Ferronc5181702018-05-17 09:44:42 -0400780 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400781}
782
Jamie Madill4928b7c2017-06-20 12:57:39 -0400783gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400784{
785 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400786 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400787}
788
Jamie Madill4928b7c2017-06-20 12:57:39 -0400789gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400790{
791 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400792 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400793}
794
Jamie Madill4928b7c2017-06-20 12:57:39 -0400795gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400796{
797 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400798 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400799}
800
Jamie Madill4928b7c2017-06-20 12:57:39 -0400801gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
802 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400803 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400804 FramebufferAttachmentRenderTarget **rtOut)
805{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400806 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400807 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400808
809 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400810 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400811
812 ContextVk *contextVk = vk::GetImpl(context);
813 RendererVk *renderer = contextVk->getRenderer();
814
815 ANGLE_TRY(ensureImageInitialized(renderer));
816
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400817 *rtOut = &mRenderTarget;
818 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400819}
820
Jamie Madill26084d02018-04-09 13:44:04 -0400821vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
822{
Luc Ferron10434f62018-04-24 10:06:37 -0400823 if (mImage.valid() && mPixelBuffer.empty())
824 {
825 return vk::NoError();
826 }
827
Jamie Madill26084d02018-04-09 13:44:04 -0400828 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400829 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400830
831 if (!mImage.valid())
832 {
833 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
Jamie Madill26084d02018-04-09 13:44:04 -0400834 const vk::Format &format =
835 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400836 const gl::Extents &extents = baseLevelDesc.size;
Luc Ferron66410532018-04-20 12:47:45 -0400837 const uint32_t levelCount = getLevelCount();
Jamie Madill26084d02018-04-09 13:44:04 -0400838
Luc Ferronfa7503c2018-05-08 11:25:06 -0400839 ANGLE_TRY(initImage(renderer, format, extents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400840 }
841
Jamie Madilla7be1f72018-04-13 15:16:26 -0400842 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400843 return vk::NoError();
844}
845
Luc Ferron4bba74f2018-04-19 14:40:45 -0400846gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700847{
Luc Ferron20610902018-04-19 14:41:13 -0400848 if (dirtyBits.none() && mSampler.valid())
849 {
850 return gl::NoError();
851 }
852
853 ContextVk *contextVk = vk::GetImpl(context);
854 if (mSampler.valid())
855 {
856 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc57ee252018-05-30 19:53:48 -0400857 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Luc Ferron20610902018-04-19 14:41:13 -0400858 }
859
860 const gl::SamplerState &samplerState = mState.getSamplerState();
861
862 // Create a simple sampler. Force basic parameter settings.
863 VkSamplerCreateInfo samplerInfo;
864 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
865 samplerInfo.pNext = nullptr;
866 samplerInfo.flags = 0;
867 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
868 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400869 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400870 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
871 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
872 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
873 samplerInfo.mipLodBias = 0.0f;
874 samplerInfo.anisotropyEnable = VK_FALSE;
875 samplerInfo.maxAnisotropy = 1.0f;
876 samplerInfo.compareEnable = VK_FALSE;
877 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400878 samplerInfo.minLod = samplerState.minLod;
879 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400880 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
881 samplerInfo.unnormalizedCoordinates = VK_FALSE;
882
883 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400884 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700885}
886
Jamie Madillc564c072017-06-01 12:45:42 -0400887gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500888 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800889 GLsizei samples,
890 GLint internalformat,
891 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500892 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800893{
894 UNIMPLEMENTED();
895 return gl::InternalError() << "setStorageMultisample is unimplemented.";
896}
897
Jamie Madill05b35b22017-10-03 09:01:44 -0400898gl::Error TextureVk::initializeContents(const gl::Context *context,
899 const gl::ImageIndex &imageIndex)
900{
901 UNIMPLEMENTED();
902 return gl::NoError();
903}
904
Jamie Madill858c1cc2018-03-31 14:19:13 -0400905const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400906{
907 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400908 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400909}
910
911const vk::ImageView &TextureVk::getImageView() const
912{
Jamie Madill93edca12018-03-30 10:43:18 -0400913 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400914
915 const GLenum minFilter = mState.getSamplerState().minFilter;
916 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
917 {
918 return mBaseLevelImageView;
919 }
920
921 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400922}
923
924const vk::Sampler &TextureVk::getSampler() const
925{
926 ASSERT(mSampler.valid());
927 return mSampler;
928}
929
Luc Ferronfa7503c2018-05-08 11:25:06 -0400930vk::Error TextureVk::initImage(RendererVk *renderer,
931 const vk::Format &format,
932 const gl::Extents &extents,
933 const uint32_t levelCount,
934 vk::CommandBuffer *commandBuffer)
935{
936 const VkDevice device = renderer->getDevice();
937
938 const VkImageUsageFlags usage =
939 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
940 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
941
942 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
943
944 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
945
946 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
947
948 gl::SwizzleState mappedSwizzle;
949 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
950
951 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
952 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
953 mappedSwizzle, &mMipmapImageView, levelCount));
954 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
955 mappedSwizzle, &mBaseLevelImageView, 1));
956
957 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -0400958 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronc20b9502018-05-24 09:30:17 -0400959 mImage.clearColor(black, 0, levelCount, commandBuffer);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400960 return vk::NoError();
961}
962
Jamie Madillc4f27e42018-03-31 14:19:18 -0400963void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
964{
965 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400966 renderer->releaseObject(getStoredQueueSerial(), &mBaseLevelImageView);
967 renderer->releaseObject(getStoredQueueSerial(), &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -0400968 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
969}
970
Luc Ferron66410532018-04-20 12:47:45 -0400971uint32_t TextureVk::getLevelCount() const
972{
973 ASSERT(mState.getEffectiveBaseLevel() == 0);
974
975 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
976 return mState.getMipmapMaxLevel() + 1;
977}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400978} // namespace rx