blob: 7186631c76d3c1f351de14019bc06742753ed62a [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040013#include "libANGLE/Context.h"
14#include "libANGLE/renderer/vulkan/ContextVk.h"
15#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050016#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017
18namespace rx
19{
Luc Ferron5164b792018-03-06 09:10:12 -050020namespace
21{
Jamie Madill93edca12018-03-30 10:43:18 -040022void MapSwizzleState(GLenum internalFormat,
23 const gl::SwizzleState &swizzleState,
24 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050025{
26 switch (internalFormat)
27 {
Jamie Madill26084d02018-04-09 13:44:04 -040028 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040029 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
30 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
31 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050033 break;
Jamie Madill26084d02018-04-09 13:44:04 -040034 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040035 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
36 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
37 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050039 break;
Jamie Madill26084d02018-04-09 13:44:04 -040040 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040041 swizzleStateOut->swizzleRed = GL_ZERO;
42 swizzleStateOut->swizzleGreen = GL_ZERO;
43 swizzleStateOut->swizzleBlue = GL_ZERO;
44 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040045 break;
Luc Ferron7348fc52018-05-09 07:17:16 -040046 case GL_RGB8:
47 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
48 swizzleStateOut->swizzleGreen = swizzleState.swizzleGreen;
49 swizzleStateOut->swizzleBlue = swizzleState.swizzleBlue;
50 swizzleStateOut->swizzleAlpha = GL_ONE;
51 break;
Luc Ferron5164b792018-03-06 09:10:12 -050052 default:
Jamie Madill93edca12018-03-30 10:43:18 -040053 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050054 break;
55 }
56}
Jamie Madill26084d02018-04-09 13:44:04 -040057
58constexpr VkBufferUsageFlags kStagingBufferFlags =
59 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
60constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron5164b792018-03-06 09:10:12 -050061} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040062
Jamie Madill26084d02018-04-09 13:44:04 -040063// StagingStorage implementation.
Jamie Madilla7be1f72018-04-13 15:16:26 -040064PixelBuffer::PixelBuffer() : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040065{
Jamie Madill20fa8d52018-04-15 10:09:32 -040066 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
67 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
68 mStagingBuffer.init(4);
Jamie Madill26084d02018-04-09 13:44:04 -040069}
70
Jamie Madilla7be1f72018-04-13 15:16:26 -040071PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040072{
73}
74
Jamie Madilla7be1f72018-04-13 15:16:26 -040075void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040076{
77 mStagingBuffer.release(renderer);
78}
79
Jamie Madilla7be1f72018-04-13 15:16:26 -040080gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
81 const gl::ImageIndex &index,
82 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040083 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040084 const gl::InternalFormat &formatInfo,
85 const gl::PixelUnpackState &unpack,
86 GLenum type,
87 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -040088{
89 GLuint inputRowPitch = 0;
90 ANGLE_TRY_RESULT(
91 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
92 inputRowPitch);
93
94 GLuint inputDepthPitch = 0;
95 ANGLE_TRY_RESULT(
96 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
97 inputDepthPitch);
98
99 // TODO(jmadill): skip images for 3D Textures.
100 bool applySkipImages = false;
101
102 GLuint inputSkipBytes = 0;
103 ANGLE_TRY_RESULT(
104 formatInfo.computeSkipBytes(inputRowPitch, inputDepthPitch, unpack, applySkipImages),
105 inputSkipBytes);
106
107 RendererVk *renderer = contextVk->getRenderer();
108
109 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
110 const angle::Format &storageFormat = vkFormat.textureFormat();
111
112 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
113 size_t outputDepthPitch = outputRowPitch * extents.height;
114
Jamie Madill20fa8d52018-04-15 10:09:32 -0400115 VkBuffer bufferHandle = VK_NULL_HANDLE;
116
Jamie Madill26084d02018-04-09 13:44:04 -0400117 uint8_t *stagingPointer = nullptr;
118 bool newBufferAllocated = false;
119 uint32_t stagingOffset = 0;
120 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400121 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400122 &stagingOffset, &newBufferAllocated);
123
124 const uint8_t *source = pixels + inputSkipBytes;
125
126 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
127
128 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
129 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
130
Jamie Madill20fa8d52018-04-15 10:09:32 -0400131 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400132
Jamie Madill20fa8d52018-04-15 10:09:32 -0400133 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
134 copy.bufferRowLength = extents.width;
135 copy.bufferImageHeight = extents.height;
136 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
137 copy.imageSubresource.mipLevel = index.getLevelIndex();
138 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
139 copy.imageSubresource.layerCount = index.getLayerCount();
140
Luc Ferron33e05ba2018-04-23 15:12:34 -0400141 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400142 gl_vk::GetExtent(extents, &copy.imageExtent);
143
144 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400145
146 return gl::NoError();
147}
148
Luc Ferron2a849bf2018-05-10 13:19:11 -0400149gl::Error PixelBuffer::stageSubresourceUpdateFromImage(ContextVk *contextVk,
150 vk::CommandBuffer *commandBuffer,
151 const gl::ImageIndex &index,
152 const gl::Rectangle &sourceArea,
153 const gl::Offset dstOffset,
154 const gl::Extents dstExtent,
155 const gl::InternalFormat &formatInfo,
156 GLenum type,
157 vk::ImageHelper &srcImageHelper)
158{
159 // If the extents and offset is outside the source image, we need to clip.
160 gl::Rectangle clippedRectangle;
161 if (!ClipRectangle(sourceArea,
162 gl::Rectangle(0, 0, srcImageHelper.getExtents().width,
163 srcImageHelper.getExtents().height),
164 &clippedRectangle))
165 {
166 // Empty source area, nothing to do.
167 return gl::NoError();
168 }
169
170 // 1- obtain a buffer handle to copy to
171 RendererVk *renderer = contextVk->getRenderer();
172
173 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
174 const angle::Format &storageFormat = vkFormat.textureFormat();
175
176 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
177 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
178
179 VkBuffer bufferHandle = VK_NULL_HANDLE;
180
181 uint8_t *stagingPointer = nullptr;
182 bool newBufferAllocated = false;
183 uint32_t stagingOffset = 0;
184 size_t allocationSize = outputDepthPitch * 1;
185 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
186 &stagingOffset, &newBufferAllocated);
187
188 // 2- copy the source image region to the pixel buffer.
189 VkBufferImageCopy copyToBuffer;
190 copyToBuffer.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
191 copyToBuffer.bufferRowLength = clippedRectangle.width;
192 copyToBuffer.bufferImageHeight = clippedRectangle.height;
193 copyToBuffer.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
194 copyToBuffer.imageSubresource.mipLevel = 0;
195 copyToBuffer.imageSubresource.baseArrayLayer = 0;
196 copyToBuffer.imageSubresource.layerCount = 1;
197 copyToBuffer.imageOffset.x = clippedRectangle.x;
198 copyToBuffer.imageOffset.y = clippedRectangle.y;
199 copyToBuffer.imageOffset.z = 0;
200 copyToBuffer.imageExtent.width = clippedRectangle.width;
201 copyToBuffer.imageExtent.height = clippedRectangle.height;
202 copyToBuffer.imageExtent.depth = 1;
203
204 srcImageHelper.changeLayoutWithStages(
205 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
206 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, commandBuffer);
207
208 commandBuffer->copyImageToBuffer(srcImageHelper.getImage(), srcImageHelper.getCurrentLayout(),
209 bufferHandle, 1, &copyToBuffer);
210
211 VkBufferImageCopy copyToImage;
212 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
213 copyToImage.bufferRowLength = clippedRectangle.width;
214 copyToImage.bufferImageHeight = clippedRectangle.height;
215 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
216 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
217 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
218 copyToImage.imageSubresource.layerCount = index.getLayerCount();
219 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
220 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
221
222 // 3- enqueue the destination image subresource update
223 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
224 return gl::NoError();
225}
226
Jamie Madilla7be1f72018-04-13 15:16:26 -0400227vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
228 vk::ImageHelper *image,
229 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400230{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400231 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400232 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400233 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400234 }
235
Jamie Madill20fa8d52018-04-15 10:09:32 -0400236 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
237
238 for (const SubresourceUpdate &update : mSubresourceUpdates)
239 {
240 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400241
242 // Conservatively flush all writes to the image. We could use a more restricted barrier.
243 // Do not move this above the for loop, otherwise multiple updates can have race conditions
244 // and not be applied correctly as seen i:
245 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
246 image->changeLayoutWithStages(
247 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
248 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
249
Jamie Madill20fa8d52018-04-15 10:09:32 -0400250 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
251 image->getCurrentLayout(), 1, &update.copyRegion);
252 }
253
254 mSubresourceUpdates.clear();
Luc Ferron61859812018-05-09 14:17:39 -0400255 mStagingBuffer.releaseRetainedBuffers(renderer);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400256
Jamie Madill26084d02018-04-09 13:44:04 -0400257 return vk::NoError();
258}
259
Luc Ferron10434f62018-04-24 10:06:37 -0400260bool PixelBuffer::empty() const
261{
262 return mSubresourceUpdates.empty();
263}
264
Jamie Madilla7be1f72018-04-13 15:16:26 -0400265PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400266{
267}
268
Jamie Madilla7be1f72018-04-13 15:16:26 -0400269PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
270 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400271 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
272{
273}
274
Jamie Madilla7be1f72018-04-13 15:16:26 -0400275PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400276
Jamie Madill26084d02018-04-09 13:44:04 -0400277// TextureVk implementation.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400278TextureVk::TextureVk(const gl::TextureState &state) : TextureImpl(state)
279{
Jamie Madillbc543422018-03-30 10:43:19 -0400280 mRenderTarget.image = &mImage;
Luc Ferron66410532018-04-20 12:47:45 -0400281 mRenderTarget.imageView = &mBaseLevelImageView;
Jamie Madillbc543422018-03-30 10:43:19 -0400282 mRenderTarget.resource = this;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400283}
284
285TextureVk::~TextureVk()
286{
287}
288
Jamie Madill035fd6b2017-10-03 15:43:22 -0400289gl::Error TextureVk::onDestroy(const gl::Context *context)
290{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400291 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400292 RendererVk *renderer = contextVk->getRenderer();
293
Jamie Madillc4f27e42018-03-31 14:19:18 -0400294 releaseImage(context, renderer);
Jamie Madille88ec8e2017-10-31 17:18:14 -0400295 renderer->releaseResource(*this, &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400296
Jamie Madilla7be1f72018-04-13 15:16:26 -0400297 mPixelBuffer.release(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400298
Jamie Madill035fd6b2017-10-03 15:43:22 -0400299 return gl::NoError();
300}
301
Jamie Madillc564c072017-06-01 12:45:42 -0400302gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400303 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400304 GLenum internalFormat,
305 const gl::Extents &size,
306 GLenum format,
307 GLenum type,
308 const gl::PixelUnpackState &unpack,
309 const uint8_t *pixels)
310{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400311 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400312 RendererVk *renderer = contextVk->getRenderer();
313
Jamie Madillc4f27e42018-03-31 14:19:18 -0400314 // Convert internalFormat to sized internal format.
315 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400316
Jamie Madill1b038242017-11-01 15:14:36 -0400317 if (mImage.valid())
318 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400319 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400320
321 // Calculate the expected size for the index we are defining. If the size is different from
322 // the given size, or the format is different, we are redefining the image so we must
323 // release it.
324 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400325 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400326 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400327 }
328 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400329
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500330 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400331 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500332 {
333 return gl::NoError();
334 }
335
Jamie Madill26084d02018-04-09 13:44:04 -0400336 // Create a new graph node to store image initialization commands.
337 getNewWritingNode(renderer);
338
Jamie Madill035fd6b2017-10-03 15:43:22 -0400339 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400340 if (pixels)
341 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400342 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
343 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400344 }
345
346 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400347}
348
Jamie Madillc564c072017-06-01 12:45:42 -0400349gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400350 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400351 const gl::Box &area,
352 GLenum format,
353 GLenum type,
354 const gl::PixelUnpackState &unpack,
355 const uint8_t *pixels)
356{
Jamie Madill5b18f482017-11-30 17:24:22 -0500357 ContextVk *contextVk = vk::GetImpl(context);
358 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400359 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
360 contextVk, index, gl::Extents(area.width, area.height, area.depth),
361 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400362
363 // Create a new graph node to store image initialization commands.
364 getNewWritingNode(contextVk->getRenderer());
365
Jamie Madill5b18f482017-11-30 17:24:22 -0500366 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400367}
368
Jamie Madillc564c072017-06-01 12:45:42 -0400369gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400370 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400371 GLenum internalFormat,
372 const gl::Extents &size,
373 const gl::PixelUnpackState &unpack,
374 size_t imageSize,
375 const uint8_t *pixels)
376{
377 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500378 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400379}
380
Jamie Madillc564c072017-06-01 12:45:42 -0400381gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400382 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400383 const gl::Box &area,
384 GLenum format,
385 const gl::PixelUnpackState &unpack,
386 size_t imageSize,
387 const uint8_t *pixels)
388{
389 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500390 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400391}
392
Jamie Madillc564c072017-06-01 12:45:42 -0400393gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400394 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400395 const gl::Rectangle &sourceArea,
396 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400397 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400398{
399 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500400 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400401}
402
Jamie Madillc564c072017-06-01 12:45:42 -0400403gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400404 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400405 const gl::Offset &destOffset,
406 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400407 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400408{
409 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500410 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400411}
412
Luc Ferronfa7503c2018-05-08 11:25:06 -0400413vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
414 vk::CommandBuffer **outCommandBuffer)
415{
416 const VkDevice device = renderer->getDevice();
417 updateQueueSerial(renderer->getCurrentQueueSerial());
418 if (!hasChildlessWritingNode())
419 {
420 beginWriteResource(renderer, outCommandBuffer);
421 }
422 else
423 {
424 vk::CommandGraphNode *node = getCurrentWritingNode();
425 *outCommandBuffer = node->getOutsideRenderPassCommands();
426 if (!(*outCommandBuffer)->valid())
427 {
428 ANGLE_TRY(node->beginOutsideRenderPassRecording(device, renderer->getCommandPool(),
429 outCommandBuffer));
430 }
431 }
432 return vk::NoError();
433}
434
Jamie Madillc564c072017-06-01 12:45:42 -0400435gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500436 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400437 size_t levels,
438 GLenum internalFormat,
439 const gl::Extents &size)
440{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400441 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
442 RendererVk *renderer = contextVk->getRenderer();
443 const vk::Format &format = renderer->getFormat(internalFormat);
444 vk::CommandBuffer *commandBuffer = nullptr;
445 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
446 ANGLE_TRY(initImage(renderer, format, size, static_cast<uint32_t>(levels), commandBuffer));
447 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400448}
449
Corentin Wallez99d492c2018-02-27 15:17:10 -0500450gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
451 gl::TextureType type,
452 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400453{
454 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500455 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400456}
457
Jamie Madill4928b7c2017-06-20 12:57:39 -0400458gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500459 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400460 egl::Stream *stream,
461 const egl::Stream::GLTextureDescription &desc)
462{
463 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500464 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400465}
466
Jamie Madillc564c072017-06-01 12:45:42 -0400467gl::Error TextureVk::generateMipmap(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400468{
469 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500470 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400471}
472
Jamie Madill4928b7c2017-06-20 12:57:39 -0400473gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400474{
475 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400476 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400477}
478
Jamie Madill4928b7c2017-06-20 12:57:39 -0400479gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400480{
481 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400482 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400483}
484
Jamie Madill4928b7c2017-06-20 12:57:39 -0400485gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400486{
487 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400488 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400489}
490
Jamie Madill4928b7c2017-06-20 12:57:39 -0400491gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
492 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400493 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400494 FramebufferAttachmentRenderTarget **rtOut)
495{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400496 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400497 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400498
499 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400500 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400501
502 ContextVk *contextVk = vk::GetImpl(context);
503 RendererVk *renderer = contextVk->getRenderer();
504
505 ANGLE_TRY(ensureImageInitialized(renderer));
506
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400507 *rtOut = &mRenderTarget;
508 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400509}
510
Jamie Madill26084d02018-04-09 13:44:04 -0400511vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
512{
Luc Ferron10434f62018-04-24 10:06:37 -0400513 if (mImage.valid() && mPixelBuffer.empty())
514 {
515 return vk::NoError();
516 }
517
Jamie Madill26084d02018-04-09 13:44:04 -0400518 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400519 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400520
521 if (!mImage.valid())
522 {
523 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
Jamie Madill26084d02018-04-09 13:44:04 -0400524 const vk::Format &format =
525 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400526 const gl::Extents &extents = baseLevelDesc.size;
Luc Ferron66410532018-04-20 12:47:45 -0400527 const uint32_t levelCount = getLevelCount();
Jamie Madill26084d02018-04-09 13:44:04 -0400528
Luc Ferronfa7503c2018-05-08 11:25:06 -0400529 ANGLE_TRY(initImage(renderer, format, extents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400530 }
531
Jamie Madilla7be1f72018-04-13 15:16:26 -0400532 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400533 return vk::NoError();
534}
535
Luc Ferron4bba74f2018-04-19 14:40:45 -0400536gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700537{
Luc Ferron20610902018-04-19 14:41:13 -0400538 if (dirtyBits.none() && mSampler.valid())
539 {
540 return gl::NoError();
541 }
542
543 ContextVk *contextVk = vk::GetImpl(context);
544 if (mSampler.valid())
545 {
546 RendererVk *renderer = contextVk->getRenderer();
547 renderer->releaseResource(*this, &mSampler);
548 }
549
550 const gl::SamplerState &samplerState = mState.getSamplerState();
551
552 // Create a simple sampler. Force basic parameter settings.
553 VkSamplerCreateInfo samplerInfo;
554 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
555 samplerInfo.pNext = nullptr;
556 samplerInfo.flags = 0;
557 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
558 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400559 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400560 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
561 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
562 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
563 samplerInfo.mipLodBias = 0.0f;
564 samplerInfo.anisotropyEnable = VK_FALSE;
565 samplerInfo.maxAnisotropy = 1.0f;
566 samplerInfo.compareEnable = VK_FALSE;
567 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400568 samplerInfo.minLod = samplerState.minLod;
569 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400570 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
571 samplerInfo.unnormalizedCoordinates = VK_FALSE;
572
573 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400574 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700575}
576
Jamie Madillc564c072017-06-01 12:45:42 -0400577gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500578 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800579 GLsizei samples,
580 GLint internalformat,
581 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500582 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800583{
584 UNIMPLEMENTED();
585 return gl::InternalError() << "setStorageMultisample is unimplemented.";
586}
587
Jamie Madill05b35b22017-10-03 09:01:44 -0400588gl::Error TextureVk::initializeContents(const gl::Context *context,
589 const gl::ImageIndex &imageIndex)
590{
591 UNIMPLEMENTED();
592 return gl::NoError();
593}
594
Jamie Madill858c1cc2018-03-31 14:19:13 -0400595const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400596{
597 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400598 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400599}
600
601const vk::ImageView &TextureVk::getImageView() const
602{
Jamie Madill93edca12018-03-30 10:43:18 -0400603 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400604
605 const GLenum minFilter = mState.getSamplerState().minFilter;
606 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
607 {
608 return mBaseLevelImageView;
609 }
610
611 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400612}
613
614const vk::Sampler &TextureVk::getSampler() const
615{
616 ASSERT(mSampler.valid());
617 return mSampler;
618}
619
Luc Ferronfa7503c2018-05-08 11:25:06 -0400620vk::Error TextureVk::initImage(RendererVk *renderer,
621 const vk::Format &format,
622 const gl::Extents &extents,
623 const uint32_t levelCount,
624 vk::CommandBuffer *commandBuffer)
625{
626 const VkDevice device = renderer->getDevice();
627
628 const VkImageUsageFlags usage =
629 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
630 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
631
632 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
633
634 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
635
636 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
637
638 gl::SwizzleState mappedSwizzle;
639 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
640
641 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
642 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
643 mappedSwizzle, &mMipmapImageView, levelCount));
644 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
645 mappedSwizzle, &mBaseLevelImageView, 1));
646
647 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -0400648 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronfa7503c2018-05-08 11:25:06 -0400649 mImage.clearColor(black, commandBuffer);
650 return vk::NoError();
651}
652
Jamie Madillc4f27e42018-03-31 14:19:18 -0400653void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
654{
655 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Luc Ferron66410532018-04-20 12:47:45 -0400656 renderer->releaseResource(*this, &mBaseLevelImageView);
657 renderer->releaseResource(*this, &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -0400658 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
659}
660
Luc Ferron66410532018-04-20 12:47:45 -0400661uint32_t TextureVk::getLevelCount() const
662{
663 ASSERT(mState.getEffectiveBaseLevel() == 0);
664
665 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
666 return mState.getMipmapMaxLevel() + 1;
667}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400668} // namespace rx