blob: e83f7021b9e46b1c71f8924036e353306b11e58b [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Luc Ferronc5181702018-05-17 09:44:42 -040013#include "image_util/generatemip.inl"
Jamie Madill035fd6b2017-10-03 15:43:22 -040014#include "libANGLE/Context.h"
15#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040016#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040017#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050018#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040019
20namespace rx
21{
Luc Ferron5164b792018-03-06 09:10:12 -050022namespace
23{
Jamie Madill93edca12018-03-30 10:43:18 -040024void MapSwizzleState(GLenum internalFormat,
25 const gl::SwizzleState &swizzleState,
26 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050027{
28 switch (internalFormat)
29 {
Jamie Madill26084d02018-04-09 13:44:04 -040030 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040031 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
34 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050035 break;
Jamie Madill26084d02018-04-09 13:44:04 -040036 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040037 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
40 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050041 break;
Jamie Madill26084d02018-04-09 13:44:04 -040042 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040043 swizzleStateOut->swizzleRed = GL_ZERO;
44 swizzleStateOut->swizzleGreen = GL_ZERO;
45 swizzleStateOut->swizzleBlue = GL_ZERO;
46 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040047 break;
Luc Ferron7348fc52018-05-09 07:17:16 -040048 case GL_RGB8:
49 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
50 swizzleStateOut->swizzleGreen = swizzleState.swizzleGreen;
51 swizzleStateOut->swizzleBlue = swizzleState.swizzleBlue;
52 swizzleStateOut->swizzleAlpha = GL_ONE;
53 break;
Luc Ferron5164b792018-03-06 09:10:12 -050054 default:
Jamie Madill93edca12018-03-30 10:43:18 -040055 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050056 break;
57 }
58}
Jamie Madill26084d02018-04-09 13:44:04 -040059
60constexpr VkBufferUsageFlags kStagingBufferFlags =
61 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
62constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron5164b792018-03-06 09:10:12 -050063} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040064
Jamie Madill26084d02018-04-09 13:44:04 -040065// StagingStorage implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -040066PixelBuffer::PixelBuffer(RendererVk *renderer)
67 : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040068{
Jamie Madill20fa8d52018-04-15 10:09:32 -040069 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
70 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
Luc Ferrona9ab0f32018-05-17 17:03:55 -040071 mStagingBuffer.init(4, renderer);
Jamie Madill26084d02018-04-09 13:44:04 -040072}
73
Jamie Madilla7be1f72018-04-13 15:16:26 -040074PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040075{
76}
77
Jamie Madilla7be1f72018-04-13 15:16:26 -040078void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040079{
80 mStagingBuffer.release(renderer);
81}
82
Jamie Madilla7be1f72018-04-13 15:16:26 -040083gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
84 const gl::ImageIndex &index,
85 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040086 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040087 const gl::InternalFormat &formatInfo,
88 const gl::PixelUnpackState &unpack,
89 GLenum type,
90 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -040091{
92 GLuint inputRowPitch = 0;
93 ANGLE_TRY_RESULT(
94 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
95 inputRowPitch);
96
97 GLuint inputDepthPitch = 0;
98 ANGLE_TRY_RESULT(
99 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
100 inputDepthPitch);
101
102 // TODO(jmadill): skip images for 3D Textures.
103 bool applySkipImages = false;
104
105 GLuint inputSkipBytes = 0;
106 ANGLE_TRY_RESULT(
Jeff Gilbert31d3deb2018-05-18 18:32:16 -0700107 formatInfo.computeSkipBytes(type, inputRowPitch, inputDepthPitch, unpack, applySkipImages),
Jamie Madill26084d02018-04-09 13:44:04 -0400108 inputSkipBytes);
109
110 RendererVk *renderer = contextVk->getRenderer();
111
112 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
113 const angle::Format &storageFormat = vkFormat.textureFormat();
114
115 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
116 size_t outputDepthPitch = outputRowPitch * extents.height;
117
Jamie Madill20fa8d52018-04-15 10:09:32 -0400118 VkBuffer bufferHandle = VK_NULL_HANDLE;
119
Jamie Madill26084d02018-04-09 13:44:04 -0400120 uint8_t *stagingPointer = nullptr;
121 bool newBufferAllocated = false;
122 uint32_t stagingOffset = 0;
123 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400124 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400125 &stagingOffset, &newBufferAllocated);
126
127 const uint8_t *source = pixels + inputSkipBytes;
128
129 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
130
131 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
132 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
133
Jamie Madill20fa8d52018-04-15 10:09:32 -0400134 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400135
Jamie Madill20fa8d52018-04-15 10:09:32 -0400136 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
137 copy.bufferRowLength = extents.width;
138 copy.bufferImageHeight = extents.height;
139 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
140 copy.imageSubresource.mipLevel = index.getLevelIndex();
141 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
142 copy.imageSubresource.layerCount = index.getLayerCount();
143
Luc Ferron33e05ba2018-04-23 15:12:34 -0400144 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400145 gl_vk::GetExtent(extents, &copy.imageExtent);
146
147 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400148
149 return gl::NoError();
150}
151
Jamie Madill58675012018-05-22 14:54:07 -0400152gl::Error PixelBuffer::stageSubresourceUpdateFromFramebuffer(const gl::Context *context,
153 const gl::ImageIndex &index,
154 const gl::Rectangle &sourceArea,
155 const gl::Offset &dstOffset,
156 const gl::Extents &dstExtent,
157 const gl::InternalFormat &formatInfo,
158 FramebufferVk *framebufferVk)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400159{
160 // If the extents and offset is outside the source image, we need to clip.
161 gl::Rectangle clippedRectangle;
Jamie Madill58675012018-05-22 14:54:07 -0400162 const gl::Extents readExtents = framebufferVk->getReadImageExtents();
163 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, readExtents.width, readExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400164 &clippedRectangle))
165 {
166 // Empty source area, nothing to do.
167 return gl::NoError();
168 }
169
170 // 1- obtain a buffer handle to copy to
Luc Ferron018709f2018-05-10 13:53:11 -0400171 RendererVk *renderer = GetImplAs<ContextVk>(context)->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400172
173 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
174 const angle::Format &storageFormat = vkFormat.textureFormat();
Luc Ferron018709f2018-05-10 13:53:11 -0400175 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400176
177 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
178 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
179
180 VkBuffer bufferHandle = VK_NULL_HANDLE;
181
182 uint8_t *stagingPointer = nullptr;
183 bool newBufferAllocated = false;
184 uint32_t stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400185
186 // The destination is only one layer deep.
187 size_t allocationSize = outputDepthPitch;
Luc Ferron2a849bf2018-05-10 13:19:11 -0400188 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
189 &stagingOffset, &newBufferAllocated);
190
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400191 PackPixelsParams params;
192 params.area = sourceArea;
193 params.format = formatInfo.internalFormat;
194 params.type = formatInfo.type;
195 params.outputPitch = static_cast<GLuint>(outputRowPitch);
196 params.packBuffer = nullptr;
197 params.pack = gl::PixelPackState();
198
Luc Ferron018709f2018-05-10 13:53:11 -0400199 // 2- copy the source image region to the pixel buffer using a cpu readback
200 if (loadFunction.requiresConversion)
201 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400202 // When a conversion is required, we need to use the loadFunction to read from a temporary
203 // buffer instead so its an even slower path.
204 size_t bufferSize = storageFormat.pixelBytes * sourceArea.width * sourceArea.height;
205 angle::MemoryBuffer *memoryBuffer = nullptr;
206 ANGLE_TRY(context->getScratchBuffer(bufferSize, &memoryBuffer));
207
208 // Read into the scratch buffer
Jamie Madill58675012018-05-22 14:54:07 -0400209 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, memoryBuffer->data()));
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400210
211 // Load from scratch buffer to our pixel buffer
212 loadFunction.loadFunction(sourceArea.width, sourceArea.height, 1, memoryBuffer->data(),
213 outputRowPitch, 0, stagingPointer, outputRowPitch, 0);
Luc Ferron018709f2018-05-10 13:53:11 -0400214 }
215 else
216 {
Luc Ferrondaf7ace2018-05-14 13:44:15 -0400217 // We read directly from the framebuffer into our pixel buffer.
Jamie Madill58675012018-05-22 14:54:07 -0400218 ANGLE_TRY(framebufferVk->readPixelsImpl(context, sourceArea, params, stagingPointer));
Luc Ferron018709f2018-05-10 13:53:11 -0400219 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400220
Luc Ferron018709f2018-05-10 13:53:11 -0400221 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400222 VkBufferImageCopy copyToImage;
223 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400224 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400225 copyToImage.bufferImageHeight = clippedRectangle.height;
226 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
227 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
228 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
229 copyToImage.imageSubresource.layerCount = index.getLayerCount();
230 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
231 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
232
233 // 3- enqueue the destination image subresource update
234 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
235 return gl::NoError();
236}
237
Luc Ferronc5181702018-05-17 09:44:42 -0400238gl::Error PixelBuffer::allocate(RendererVk *renderer,
239 size_t sizeInBytes,
240 uint8_t **ptrOut,
241 VkBuffer *handleOut,
242 uint32_t *offsetOut,
243 bool *newBufferAllocatedOut)
244{
245 return mStagingBuffer.allocate(renderer, sizeInBytes, ptrOut, handleOut, offsetOut,
246 newBufferAllocatedOut);
247}
248
Jamie Madilla7be1f72018-04-13 15:16:26 -0400249vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
250 vk::ImageHelper *image,
251 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400252{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400253 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400254 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400255 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400256 }
257
Jamie Madill20fa8d52018-04-15 10:09:32 -0400258 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
259
260 for (const SubresourceUpdate &update : mSubresourceUpdates)
261 {
262 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400263
264 // Conservatively flush all writes to the image. We could use a more restricted barrier.
265 // Do not move this above the for loop, otherwise multiple updates can have race conditions
266 // and not be applied correctly as seen i:
267 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
268 image->changeLayoutWithStages(
269 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
270 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
271
Jamie Madill20fa8d52018-04-15 10:09:32 -0400272 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
273 image->getCurrentLayout(), 1, &update.copyRegion);
274 }
275
276 mSubresourceUpdates.clear();
Luc Ferron61859812018-05-09 14:17:39 -0400277 mStagingBuffer.releaseRetainedBuffers(renderer);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400278
Jamie Madill26084d02018-04-09 13:44:04 -0400279 return vk::NoError();
280}
281
Luc Ferron10434f62018-04-24 10:06:37 -0400282bool PixelBuffer::empty() const
283{
284 return mSubresourceUpdates.empty();
285}
286
Luc Ferronc5181702018-05-17 09:44:42 -0400287gl::Error PixelBuffer::stageSubresourceUpdateAndGetData(RendererVk *renderer,
288 size_t allocationSize,
289 const gl::ImageIndex &imageIndex,
290 const gl::Extents &extents,
291 const gl::Offset &offset,
292 uint8_t **destData)
293{
294 VkBuffer bufferHandle;
295 uint32_t stagingOffset = 0;
296 bool newBufferAllocated = false;
297 ANGLE_TRY(mStagingBuffer.allocate(renderer, allocationSize, destData, &bufferHandle,
298 &stagingOffset, &newBufferAllocated));
299
300 VkBufferImageCopy copy;
301 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
302 copy.bufferRowLength = extents.width;
303 copy.bufferImageHeight = extents.height;
304 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
305 copy.imageSubresource.mipLevel = imageIndex.getLevelIndex();
306 copy.imageSubresource.baseArrayLayer = imageIndex.hasLayer() ? imageIndex.getLayerIndex() : 0;
307 copy.imageSubresource.layerCount = imageIndex.getLayerCount();
308
309 gl_vk::GetOffset(offset, &copy.imageOffset);
310 gl_vk::GetExtent(extents, &copy.imageExtent);
311
312 mSubresourceUpdates.emplace_back(bufferHandle, copy);
313
314 return gl::NoError();
315}
316
317gl::Error TextureVk::generateMipmapLevels(ContextVk *contextVk,
318 const angle::Format &sourceFormat,
Luc Ferron22695bf2018-05-22 15:52:08 -0400319 GLuint layer,
Luc Ferronc5181702018-05-17 09:44:42 -0400320 GLuint firstMipLevel,
321 GLuint maxMipLevel,
322 const size_t sourceWidth,
323 const size_t sourceHeight,
324 const size_t sourceRowPitch,
325 uint8_t *sourceData)
326{
327 RendererVk *renderer = contextVk->getRenderer();
328
329 size_t previousLevelWidth = sourceWidth;
330 size_t previousLevelHeight = sourceHeight;
331 uint8_t *previousLevelData = sourceData;
332 size_t previousLevelRowPitch = sourceRowPitch;
333
334 for (GLuint currentMipLevel = firstMipLevel; currentMipLevel <= maxMipLevel; currentMipLevel++)
335 {
336 // Compute next level width and height.
337 size_t mipWidth = std::max<size_t>(1, previousLevelWidth >> 1);
338 size_t mipHeight = std::max<size_t>(1, previousLevelHeight >> 1);
339
340 // With the width and height of the next mip, we can allocate the next buffer we need.
341 uint8_t *destData = nullptr;
342 size_t destRowPitch = mipWidth * sourceFormat.pixelBytes;
343
344 size_t mipAllocationSize = destRowPitch * mipHeight;
345 gl::Extents mipLevelExtents(static_cast<int>(mipWidth), static_cast<int>(mipHeight), 1);
346
347 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateAndGetData(
348 renderer, mipAllocationSize,
Luc Ferron22695bf2018-05-22 15:52:08 -0400349 gl::ImageIndex::MakeFromType(mState.getType(), currentMipLevel, layer), mipLevelExtents,
Luc Ferronc5181702018-05-17 09:44:42 -0400350 gl::Offset(), &destData));
351
352 // Generate the mipmap into that new buffer
353 sourceFormat.mipGenerationFunction(previousLevelWidth, previousLevelHeight, 1,
354 previousLevelData, previousLevelRowPitch, 0, destData,
355 destRowPitch, 0);
356
357 // Swap for the next iteration
358 previousLevelWidth = mipWidth;
359 previousLevelHeight = mipHeight;
360 previousLevelData = destData;
361 previousLevelRowPitch = destRowPitch;
362 }
363
364 return gl::NoError();
365}
366
Jamie Madilla7be1f72018-04-13 15:16:26 -0400367PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400368{
369}
370
Jamie Madilla7be1f72018-04-13 15:16:26 -0400371PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
372 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400373 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
374{
375}
376
Jamie Madilla7be1f72018-04-13 15:16:26 -0400377PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400378
Jamie Madill26084d02018-04-09 13:44:04 -0400379// TextureVk implementation.
Luc Ferrona9ab0f32018-05-17 17:03:55 -0400380TextureVk::TextureVk(const gl::TextureState &state, RendererVk *renderer)
Jamie Madillbcf467f2018-05-23 09:46:00 -0400381 : TextureImpl(state), mRenderTarget(&mImage, &mBaseLevelImageView, this), mPixelBuffer(renderer)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400382{
383}
384
385TextureVk::~TextureVk()
386{
387}
388
Jamie Madill035fd6b2017-10-03 15:43:22 -0400389gl::Error TextureVk::onDestroy(const gl::Context *context)
390{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400391 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400392 RendererVk *renderer = contextVk->getRenderer();
393
Jamie Madillc4f27e42018-03-31 14:19:18 -0400394 releaseImage(context, renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400395 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400396
Jamie Madilla7be1f72018-04-13 15:16:26 -0400397 mPixelBuffer.release(renderer);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400398 return gl::NoError();
399}
400
Jamie Madillc564c072017-06-01 12:45:42 -0400401gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400402 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400403 GLenum internalFormat,
404 const gl::Extents &size,
405 GLenum format,
406 GLenum type,
407 const gl::PixelUnpackState &unpack,
408 const uint8_t *pixels)
409{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400410 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400411 RendererVk *renderer = contextVk->getRenderer();
412
Jamie Madillc4f27e42018-03-31 14:19:18 -0400413 // Convert internalFormat to sized internal format.
414 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400415
Jamie Madill1b038242017-11-01 15:14:36 -0400416 if (mImage.valid())
417 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400418 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400419
420 // Calculate the expected size for the index we are defining. If the size is different from
421 // the given size, or the format is different, we are redefining the image so we must
422 // release it.
423 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400424 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400425 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400426 }
427 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400428
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500429 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400430 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500431 {
432 return gl::NoError();
433 }
434
Jamie Madill26084d02018-04-09 13:44:04 -0400435 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400436 onResourceChanged(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400437
Jamie Madill035fd6b2017-10-03 15:43:22 -0400438 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400439 if (pixels)
440 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400441 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
442 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400443 }
444
445 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400446}
447
Jamie Madillc564c072017-06-01 12:45:42 -0400448gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400449 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400450 const gl::Box &area,
451 GLenum format,
452 GLenum type,
453 const gl::PixelUnpackState &unpack,
454 const uint8_t *pixels)
455{
Jamie Madill5b18f482017-11-30 17:24:22 -0500456 ContextVk *contextVk = vk::GetImpl(context);
457 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400458 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
459 contextVk, index, gl::Extents(area.width, area.height, area.depth),
460 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400461
462 // Create a new graph node to store image initialization commands.
Jamie Madill316c6062018-05-29 10:49:45 -0400463 onResourceChanged(contextVk->getRenderer());
Jamie Madillb2214862018-04-26 07:25:48 -0400464
Jamie Madill5b18f482017-11-30 17:24:22 -0500465 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400466}
467
Jamie Madillc564c072017-06-01 12:45:42 -0400468gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400469 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400470 GLenum internalFormat,
471 const gl::Extents &size,
472 const gl::PixelUnpackState &unpack,
473 size_t imageSize,
474 const uint8_t *pixels)
475{
476 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500477 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400478}
479
Jamie Madillc564c072017-06-01 12:45:42 -0400480gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400481 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400482 const gl::Box &area,
483 GLenum format,
484 const gl::PixelUnpackState &unpack,
485 size_t imageSize,
486 const uint8_t *pixels)
487{
488 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500489 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400490}
491
Jamie Madillc564c072017-06-01 12:45:42 -0400492gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400493 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400494 const gl::Rectangle &sourceArea,
495 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400496 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400497{
Luc Ferronf299a372018-05-14 14:44:54 -0400498 gl::Extents newImageSize(sourceArea.width, sourceArea.height, 1);
499 const gl::InternalFormat &internalFormatInfo =
500 gl::GetInternalFormatInfo(internalFormat, GL_UNSIGNED_BYTE);
501 ANGLE_TRY(setImage(context, index, internalFormat, newImageSize, internalFormatInfo.format,
502 internalFormatInfo.type, gl::PixelUnpackState(), nullptr));
503 return copySubImageImpl(context, index, gl::Offset(0, 0, 0), sourceArea, internalFormatInfo,
504 source);
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400505}
506
Jamie Madillc564c072017-06-01 12:45:42 -0400507gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400508 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400509 const gl::Offset &destOffset,
510 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400511 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400512{
Luc Ferronf299a372018-05-14 14:44:54 -0400513 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
514 return copySubImageImpl(context, index, destOffset, sourceArea, currentFormat, source);
515}
516
517gl::Error TextureVk::copySubImageImpl(const gl::Context *context,
518 const gl::ImageIndex &index,
519 const gl::Offset &destOffset,
520 const gl::Rectangle &sourceArea,
521 const gl::InternalFormat &internalFormat,
522 gl::Framebuffer *source)
523{
Luc Ferron018709f2018-05-10 13:53:11 -0400524 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
525 gl::Rectangle clippedSourceArea;
526 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
527 &clippedSourceArea))
528 {
529 return gl::NoError();
530 }
531
532 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
533 destOffset.y + sourceArea.y - sourceArea.y, 0);
534
535 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill316c6062018-05-29 10:49:45 -0400536 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronf299a372018-05-14 14:44:54 -0400537 FramebufferVk *framebufferVk = vk::GetImpl(source);
Luc Ferron018709f2018-05-10 13:53:11 -0400538
539 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
540 // change to the pixel buffer.
541 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
542 // when its supported.
Jamie Madill58675012018-05-22 14:54:07 -0400543 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromFramebuffer(
Luc Ferron018709f2018-05-10 13:53:11 -0400544 context, index, clippedSourceArea, modifiedDestOffset,
Luc Ferronf299a372018-05-14 14:44:54 -0400545 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), internalFormat,
Jamie Madill58675012018-05-22 14:54:07 -0400546 framebufferVk));
Luc Ferron018709f2018-05-10 13:53:11 -0400547
Jamie Madill316c6062018-05-29 10:49:45 -0400548 onResourceChanged(renderer);
549 framebufferVk->addReadDependency(this);
Luc Ferron018709f2018-05-10 13:53:11 -0400550 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400551}
552
Luc Ferronfa7503c2018-05-08 11:25:06 -0400553vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
Jamie Madill316c6062018-05-29 10:49:45 -0400554 vk::CommandBuffer **commandBufferOut)
Luc Ferronfa7503c2018-05-08 11:25:06 -0400555{
Luc Ferronc5181702018-05-17 09:44:42 -0400556 ANGLE_TRY(appendWriteResource(renderer, commandBufferOut));
Luc Ferronfa7503c2018-05-08 11:25:06 -0400557 return vk::NoError();
558}
559
Jamie Madillc564c072017-06-01 12:45:42 -0400560gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500561 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400562 size_t levels,
563 GLenum internalFormat,
564 const gl::Extents &size)
565{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400566 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
567 RendererVk *renderer = contextVk->getRenderer();
568 const vk::Format &format = renderer->getFormat(internalFormat);
569 vk::CommandBuffer *commandBuffer = nullptr;
570 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
571 ANGLE_TRY(initImage(renderer, format, size, static_cast<uint32_t>(levels), commandBuffer));
572 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400573}
574
Corentin Wallez99d492c2018-02-27 15:17:10 -0500575gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
576 gl::TextureType type,
577 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400578{
579 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500580 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400581}
582
Jamie Madill4928b7c2017-06-20 12:57:39 -0400583gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500584 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400585 egl::Stream *stream,
586 const egl::Stream::GLTextureDescription &desc)
587{
588 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500589 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400590}
591
Jamie Madillc564c072017-06-01 12:45:42 -0400592gl::Error TextureVk::generateMipmap(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400593{
Luc Ferron22695bf2018-05-22 15:52:08 -0400594 ContextVk *contextVk = vk::GetImpl(context);
595 RendererVk *renderer = contextVk->getRenderer();
Luc Ferronc5181702018-05-17 09:44:42 -0400596
597 // Some data is pending, or the image has not been defined at all yet
598 if (!mImage.valid())
599 {
600 // lets initialize the image so we can generate the next levels.
601 if (!mPixelBuffer.empty())
602 {
603 ANGLE_TRY(ensureImageInitialized(renderer));
604 ASSERT(mImage.valid());
605 }
606 else
607 {
608 // There is nothing to generate if there is nothing uploaded so far.
609 return gl::NoError();
610 }
611 }
612
613 // Before we loop to generate all the next levels, we can get the source level and copy it to a
614 // buffer.
Luc Ferron22695bf2018-05-22 15:52:08 -0400615 const angle::Format &angleFormat = mImage.getFormat().textureFormat();
616 uint32_t imageLayerCount = GetImageLayerCount(mState.getType());
617
Luc Ferronc5181702018-05-17 09:44:42 -0400618 bool newBufferAllocated = false;
Luc Ferronc5181702018-05-17 09:44:42 -0400619 const gl::Extents baseLevelExtents = mImage.getExtents();
620 GLuint sourceRowPitch = baseLevelExtents.width * angleFormat.pixelBytes;
621 size_t baseLevelAllocationSize = sourceRowPitch * baseLevelExtents.height;
622
Luc Ferron22695bf2018-05-22 15:52:08 -0400623 vk::CommandBuffer *commandBuffer = nullptr;
624 getCommandBufferForWrite(renderer, &commandBuffer);
Luc Ferronc5181702018-05-17 09:44:42 -0400625
Luc Ferron22695bf2018-05-22 15:52:08 -0400626 // Requirement of the copyImageToBuffer, the source image must be in SRC_OPTIMAL layout.
627 mImage.changeLayoutWithStages(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
628 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
629 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
630
631 size_t totalAllocationSize = baseLevelAllocationSize * imageLayerCount;
632
633 VkBuffer copyBufferHandle;
634 uint8_t *baseLevelBuffers;
635 uint32_t copyBaseOffset;
636
637 // Allocate enough memory to copy every level 0 image (one for each layer of the texture).
638 ANGLE_TRY(mPixelBuffer.allocate(renderer, totalAllocationSize, &baseLevelBuffers,
639 &copyBufferHandle, &copyBaseOffset, &newBufferAllocated));
640
641 // Do only one copy for all layers at once.
Luc Ferronc5181702018-05-17 09:44:42 -0400642 VkBufferImageCopy region;
643 region.bufferImageHeight = baseLevelExtents.height;
Luc Ferron22695bf2018-05-22 15:52:08 -0400644 region.bufferOffset = static_cast<VkDeviceSize>(copyBaseOffset);
Luc Ferronc5181702018-05-17 09:44:42 -0400645 region.bufferRowLength = baseLevelExtents.width;
646 region.imageExtent.width = baseLevelExtents.width;
647 region.imageExtent.height = baseLevelExtents.height;
648 region.imageExtent.depth = 1;
649 region.imageOffset.x = 0;
650 region.imageOffset.y = 0;
651 region.imageOffset.z = 0;
652 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
653 region.imageSubresource.baseArrayLayer = 0;
Luc Ferron22695bf2018-05-22 15:52:08 -0400654 region.imageSubresource.layerCount = imageLayerCount;
Luc Ferronc5181702018-05-17 09:44:42 -0400655 region.imageSubresource.mipLevel = mState.getEffectiveBaseLevel();
656
Luc Ferron22695bf2018-05-22 15:52:08 -0400657 commandBuffer->copyImageToBuffer(mImage.getImage(), mImage.getCurrentLayout(), copyBufferHandle,
658 1, &region);
Luc Ferronc5181702018-05-17 09:44:42 -0400659
660 ANGLE_TRY(renderer->finish(context));
661
662 // We're changing this textureVk content, make sure we let the graph know.
663 onResourceChanged(renderer);
664
665 // We now have the base level available to be manipulated in the baseLevelBuffer pointer.
666 // Generate all the missing mipmaps with the slow path. We can optimize with vkCmdBlitImage
667 // later.
Luc Ferron22695bf2018-05-22 15:52:08 -0400668 // For each layer, use the copied data to generate all the mips.
669 for (GLuint layer = 0; layer < imageLayerCount; layer++)
670 {
671 size_t bufferOffset = layer * baseLevelAllocationSize;
672 ANGLE_TRY(generateMipmapLevels(
673 contextVk, angleFormat, layer, mState.getEffectiveBaseLevel() + 1,
674 mState.getMipmapMaxLevel(), baseLevelExtents.width, baseLevelExtents.height,
675 sourceRowPitch, baseLevelBuffers + bufferOffset));
676 }
Luc Ferronc5181702018-05-17 09:44:42 -0400677
678 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400679}
680
Jamie Madill4928b7c2017-06-20 12:57:39 -0400681gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400682{
683 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400684 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400685}
686
Jamie Madill4928b7c2017-06-20 12:57:39 -0400687gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400688{
689 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400690 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400691}
692
Jamie Madill4928b7c2017-06-20 12:57:39 -0400693gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400694{
695 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400696 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400697}
698
Jamie Madill4928b7c2017-06-20 12:57:39 -0400699gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
700 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400701 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400702 FramebufferAttachmentRenderTarget **rtOut)
703{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400704 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400705 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400706
707 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400708 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400709
710 ContextVk *contextVk = vk::GetImpl(context);
711 RendererVk *renderer = contextVk->getRenderer();
712
713 ANGLE_TRY(ensureImageInitialized(renderer));
714
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400715 *rtOut = &mRenderTarget;
716 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400717}
718
Jamie Madill26084d02018-04-09 13:44:04 -0400719vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
720{
Luc Ferron10434f62018-04-24 10:06:37 -0400721 if (mImage.valid() && mPixelBuffer.empty())
722 {
723 return vk::NoError();
724 }
725
Jamie Madill26084d02018-04-09 13:44:04 -0400726 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400727 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400728
729 if (!mImage.valid())
730 {
731 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
Jamie Madill26084d02018-04-09 13:44:04 -0400732 const vk::Format &format =
733 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400734 const gl::Extents &extents = baseLevelDesc.size;
Luc Ferron66410532018-04-20 12:47:45 -0400735 const uint32_t levelCount = getLevelCount();
Jamie Madill26084d02018-04-09 13:44:04 -0400736
Luc Ferronfa7503c2018-05-08 11:25:06 -0400737 ANGLE_TRY(initImage(renderer, format, extents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400738 }
739
Jamie Madilla7be1f72018-04-13 15:16:26 -0400740 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400741 return vk::NoError();
742}
743
Luc Ferron4bba74f2018-04-19 14:40:45 -0400744gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700745{
Luc Ferron20610902018-04-19 14:41:13 -0400746 if (dirtyBits.none() && mSampler.valid())
747 {
748 return gl::NoError();
749 }
750
751 ContextVk *contextVk = vk::GetImpl(context);
752 if (mSampler.valid())
753 {
754 RendererVk *renderer = contextVk->getRenderer();
Jamie Madillc57ee252018-05-30 19:53:48 -0400755 renderer->releaseObject(getStoredQueueSerial(), &mSampler);
Luc Ferron20610902018-04-19 14:41:13 -0400756 }
757
758 const gl::SamplerState &samplerState = mState.getSamplerState();
759
760 // Create a simple sampler. Force basic parameter settings.
761 VkSamplerCreateInfo samplerInfo;
762 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
763 samplerInfo.pNext = nullptr;
764 samplerInfo.flags = 0;
765 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
766 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400767 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400768 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
769 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
770 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
771 samplerInfo.mipLodBias = 0.0f;
772 samplerInfo.anisotropyEnable = VK_FALSE;
773 samplerInfo.maxAnisotropy = 1.0f;
774 samplerInfo.compareEnable = VK_FALSE;
775 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400776 samplerInfo.minLod = samplerState.minLod;
777 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400778 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
779 samplerInfo.unnormalizedCoordinates = VK_FALSE;
780
781 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400782 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700783}
784
Jamie Madillc564c072017-06-01 12:45:42 -0400785gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500786 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800787 GLsizei samples,
788 GLint internalformat,
789 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500790 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800791{
792 UNIMPLEMENTED();
793 return gl::InternalError() << "setStorageMultisample is unimplemented.";
794}
795
Jamie Madill05b35b22017-10-03 09:01:44 -0400796gl::Error TextureVk::initializeContents(const gl::Context *context,
797 const gl::ImageIndex &imageIndex)
798{
799 UNIMPLEMENTED();
800 return gl::NoError();
801}
802
Jamie Madill858c1cc2018-03-31 14:19:13 -0400803const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400804{
805 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400806 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400807}
808
809const vk::ImageView &TextureVk::getImageView() const
810{
Jamie Madill93edca12018-03-30 10:43:18 -0400811 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400812
813 const GLenum minFilter = mState.getSamplerState().minFilter;
814 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
815 {
816 return mBaseLevelImageView;
817 }
818
819 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400820}
821
822const vk::Sampler &TextureVk::getSampler() const
823{
824 ASSERT(mSampler.valid());
825 return mSampler;
826}
827
Luc Ferronfa7503c2018-05-08 11:25:06 -0400828vk::Error TextureVk::initImage(RendererVk *renderer,
829 const vk::Format &format,
830 const gl::Extents &extents,
831 const uint32_t levelCount,
832 vk::CommandBuffer *commandBuffer)
833{
834 const VkDevice device = renderer->getDevice();
835
836 const VkImageUsageFlags usage =
837 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
838 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
839
840 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
841
842 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
843
844 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
845
846 gl::SwizzleState mappedSwizzle;
847 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
848
849 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
850 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
851 mappedSwizzle, &mMipmapImageView, levelCount));
852 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
853 mappedSwizzle, &mBaseLevelImageView, 1));
854
855 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -0400856 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronc20b9502018-05-24 09:30:17 -0400857 mImage.clearColor(black, 0, levelCount, commandBuffer);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400858 return vk::NoError();
859}
860
Jamie Madillc4f27e42018-03-31 14:19:18 -0400861void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
862{
863 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Jamie Madillc57ee252018-05-30 19:53:48 -0400864 renderer->releaseObject(getStoredQueueSerial(), &mBaseLevelImageView);
865 renderer->releaseObject(getStoredQueueSerial(), &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -0400866 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
867}
868
Luc Ferron66410532018-04-20 12:47:45 -0400869uint32_t TextureVk::getLevelCount() const
870{
871 ASSERT(mState.getEffectiveBaseLevel() == 0);
872
873 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
874 return mState.getMipmapMaxLevel() + 1;
875}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400876} // namespace rx