blob: 4a81567f02e04fba41164bb4f2d0b3e734ff2c58 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// TextureVk.cpp:
7// Implements the class methods for TextureVk.
8//
9
10#include "libANGLE/renderer/vulkan/TextureVk.h"
11
12#include "common/debug.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040013#include "libANGLE/Context.h"
14#include "libANGLE/renderer/vulkan/ContextVk.h"
Luc Ferron018709f2018-05-10 13:53:11 -040015#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill035fd6b2017-10-03 15:43:22 -040016#include "libANGLE/renderer/vulkan/RendererVk.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050017#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040018
19namespace rx
20{
Luc Ferron5164b792018-03-06 09:10:12 -050021namespace
22{
Jamie Madill93edca12018-03-30 10:43:18 -040023void MapSwizzleState(GLenum internalFormat,
24 const gl::SwizzleState &swizzleState,
25 gl::SwizzleState *swizzleStateOut)
Luc Ferron5164b792018-03-06 09:10:12 -050026{
27 switch (internalFormat)
28 {
Jamie Madill26084d02018-04-09 13:44:04 -040029 case GL_LUMINANCE8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040030 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
31 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
32 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
33 swizzleStateOut->swizzleAlpha = GL_ONE;
Luc Ferron5164b792018-03-06 09:10:12 -050034 break;
Jamie Madill26084d02018-04-09 13:44:04 -040035 case GL_LUMINANCE8_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040036 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
37 swizzleStateOut->swizzleGreen = swizzleState.swizzleRed;
38 swizzleStateOut->swizzleBlue = swizzleState.swizzleRed;
39 swizzleStateOut->swizzleAlpha = swizzleState.swizzleGreen;
Luc Ferron5164b792018-03-06 09:10:12 -050040 break;
Jamie Madill26084d02018-04-09 13:44:04 -040041 case GL_ALPHA8_OES:
Jamie Madill93edca12018-03-30 10:43:18 -040042 swizzleStateOut->swizzleRed = GL_ZERO;
43 swizzleStateOut->swizzleGreen = GL_ZERO;
44 swizzleStateOut->swizzleBlue = GL_ZERO;
45 swizzleStateOut->swizzleAlpha = swizzleState.swizzleRed;
Luc Ferron49cef9a2018-03-21 17:28:53 -040046 break;
Luc Ferron7348fc52018-05-09 07:17:16 -040047 case GL_RGB8:
48 swizzleStateOut->swizzleRed = swizzleState.swizzleRed;
49 swizzleStateOut->swizzleGreen = swizzleState.swizzleGreen;
50 swizzleStateOut->swizzleBlue = swizzleState.swizzleBlue;
51 swizzleStateOut->swizzleAlpha = GL_ONE;
52 break;
Luc Ferron5164b792018-03-06 09:10:12 -050053 default:
Jamie Madill93edca12018-03-30 10:43:18 -040054 *swizzleStateOut = swizzleState;
Luc Ferron5164b792018-03-06 09:10:12 -050055 break;
56 }
57}
Jamie Madill26084d02018-04-09 13:44:04 -040058
59constexpr VkBufferUsageFlags kStagingBufferFlags =
60 (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
61constexpr size_t kStagingBufferSize = 1024 * 16;
Luc Ferron5164b792018-03-06 09:10:12 -050062} // anonymous namespace
Jamie Madill9e54b5a2016-05-25 12:57:39 -040063
Jamie Madill26084d02018-04-09 13:44:04 -040064// StagingStorage implementation.
Jamie Madilla7be1f72018-04-13 15:16:26 -040065PixelBuffer::PixelBuffer() : mStagingBuffer(kStagingBufferFlags, kStagingBufferSize)
Jamie Madill26084d02018-04-09 13:44:04 -040066{
Jamie Madill20fa8d52018-04-15 10:09:32 -040067 // vkCmdCopyBufferToImage must have an offset that is a multiple of 4.
68 // https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkBufferImageCopy.html
69 mStagingBuffer.init(4);
Jamie Madill26084d02018-04-09 13:44:04 -040070}
71
Jamie Madilla7be1f72018-04-13 15:16:26 -040072PixelBuffer::~PixelBuffer()
Jamie Madill26084d02018-04-09 13:44:04 -040073{
74}
75
Jamie Madilla7be1f72018-04-13 15:16:26 -040076void PixelBuffer::release(RendererVk *renderer)
Jamie Madill26084d02018-04-09 13:44:04 -040077{
78 mStagingBuffer.release(renderer);
79}
80
Jamie Madilla7be1f72018-04-13 15:16:26 -040081gl::Error PixelBuffer::stageSubresourceUpdate(ContextVk *contextVk,
82 const gl::ImageIndex &index,
83 const gl::Extents &extents,
Luc Ferron33e05ba2018-04-23 15:12:34 -040084 const gl::Offset &offset,
Jamie Madilla7be1f72018-04-13 15:16:26 -040085 const gl::InternalFormat &formatInfo,
86 const gl::PixelUnpackState &unpack,
87 GLenum type,
88 const uint8_t *pixels)
Jamie Madill26084d02018-04-09 13:44:04 -040089{
90 GLuint inputRowPitch = 0;
91 ANGLE_TRY_RESULT(
92 formatInfo.computeRowPitch(type, extents.width, unpack.alignment, unpack.rowLength),
93 inputRowPitch);
94
95 GLuint inputDepthPitch = 0;
96 ANGLE_TRY_RESULT(
97 formatInfo.computeDepthPitch(extents.height, unpack.imageHeight, inputRowPitch),
98 inputDepthPitch);
99
100 // TODO(jmadill): skip images for 3D Textures.
101 bool applySkipImages = false;
102
103 GLuint inputSkipBytes = 0;
104 ANGLE_TRY_RESULT(
105 formatInfo.computeSkipBytes(inputRowPitch, inputDepthPitch, unpack, applySkipImages),
106 inputSkipBytes);
107
108 RendererVk *renderer = contextVk->getRenderer();
109
110 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
111 const angle::Format &storageFormat = vkFormat.textureFormat();
112
113 size_t outputRowPitch = storageFormat.pixelBytes * extents.width;
114 size_t outputDepthPitch = outputRowPitch * extents.height;
115
Jamie Madill20fa8d52018-04-15 10:09:32 -0400116 VkBuffer bufferHandle = VK_NULL_HANDLE;
117
Jamie Madill26084d02018-04-09 13:44:04 -0400118 uint8_t *stagingPointer = nullptr;
119 bool newBufferAllocated = false;
120 uint32_t stagingOffset = 0;
121 size_t allocationSize = outputDepthPitch * extents.depth;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400122 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
Jamie Madill26084d02018-04-09 13:44:04 -0400123 &stagingOffset, &newBufferAllocated);
124
125 const uint8_t *source = pixels + inputSkipBytes;
126
127 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(type);
128
129 loadFunction.loadFunction(extents.width, extents.height, extents.depth, source, inputRowPitch,
130 inputDepthPitch, stagingPointer, outputRowPitch, outputDepthPitch);
131
Jamie Madill20fa8d52018-04-15 10:09:32 -0400132 VkBufferImageCopy copy;
Jamie Madill26084d02018-04-09 13:44:04 -0400133
Jamie Madill20fa8d52018-04-15 10:09:32 -0400134 copy.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
135 copy.bufferRowLength = extents.width;
136 copy.bufferImageHeight = extents.height;
137 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
138 copy.imageSubresource.mipLevel = index.getLevelIndex();
139 copy.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
140 copy.imageSubresource.layerCount = index.getLayerCount();
141
Luc Ferron33e05ba2018-04-23 15:12:34 -0400142 gl_vk::GetOffset(offset, &copy.imageOffset);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400143 gl_vk::GetExtent(extents, &copy.imageExtent);
144
145 mSubresourceUpdates.emplace_back(bufferHandle, copy);
Jamie Madill26084d02018-04-09 13:44:04 -0400146
147 return gl::NoError();
148}
149
Luc Ferron018709f2018-05-10 13:53:11 -0400150gl::Error PixelBuffer::stageSubresourceUpdateFromRenderTarget(const gl::Context *context,
151 const gl::ImageIndex &index,
152 const gl::Rectangle &sourceArea,
153 const gl::Offset &dstOffset,
154 const gl::Extents &dstExtent,
155 const gl::InternalFormat &formatInfo,
156 vk::CommandBuffer *commandBuffer,
157 RenderTargetVk *renderTarget)
Luc Ferron2a849bf2018-05-10 13:19:11 -0400158{
159 // If the extents and offset is outside the source image, we need to clip.
160 gl::Rectangle clippedRectangle;
Luc Ferron018709f2018-05-10 13:53:11 -0400161 const gl::Extents imageExtents = renderTarget->image->getExtents();
162 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, imageExtents.width, imageExtents.height),
Luc Ferron2a849bf2018-05-10 13:19:11 -0400163 &clippedRectangle))
164 {
165 // Empty source area, nothing to do.
166 return gl::NoError();
167 }
168
169 // 1- obtain a buffer handle to copy to
Luc Ferron018709f2018-05-10 13:53:11 -0400170 RendererVk *renderer = GetImplAs<ContextVk>(context)->getRenderer();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400171
172 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
173 const angle::Format &storageFormat = vkFormat.textureFormat();
Luc Ferron018709f2018-05-10 13:53:11 -0400174 LoadImageFunctionInfo loadFunction = vkFormat.loadFunctions(formatInfo.type);
Luc Ferron2a849bf2018-05-10 13:19:11 -0400175
176 size_t outputRowPitch = storageFormat.pixelBytes * clippedRectangle.width;
177 size_t outputDepthPitch = outputRowPitch * clippedRectangle.height;
178
179 VkBuffer bufferHandle = VK_NULL_HANDLE;
180
181 uint8_t *stagingPointer = nullptr;
182 bool newBufferAllocated = false;
183 uint32_t stagingOffset = 0;
Luc Ferron018709f2018-05-10 13:53:11 -0400184
185 // The destination is only one layer deep.
186 size_t allocationSize = outputDepthPitch;
Luc Ferron2a849bf2018-05-10 13:19:11 -0400187 mStagingBuffer.allocate(renderer, allocationSize, &stagingPointer, &bufferHandle,
188 &stagingOffset, &newBufferAllocated);
189
Luc Ferron018709f2018-05-10 13:53:11 -0400190 // 2- copy the source image region to the pixel buffer using a cpu readback
191 if (loadFunction.requiresConversion)
192 {
193 // TODO(lucferron): This needs additional work, we will read into a temp buffer and then
194 // use the loadFunction to read the data to our PixelBuffer.
195 // http://anglebug.com/2501
196 UNIMPLEMENTED();
197 }
198 else
199 {
200 PackPixelsParams params;
201 params.area = sourceArea;
202 params.format = formatInfo.internalFormat;
203 params.type = formatInfo.type;
204 params.outputPitch = static_cast<GLuint>(outputRowPitch);
205 params.packBuffer = nullptr;
206 params.pack = gl::PixelPackState();
Luc Ferron2a849bf2018-05-10 13:19:11 -0400207
Luc Ferron018709f2018-05-10 13:53:11 -0400208 ANGLE_TRY(ReadPixelsFromRenderTarget(context, sourceArea, params, renderTarget,
209 commandBuffer, stagingPointer));
210 }
Luc Ferron2a849bf2018-05-10 13:19:11 -0400211
Luc Ferron018709f2018-05-10 13:53:11 -0400212 // 3- enqueue the destination image subresource update
Luc Ferron2a849bf2018-05-10 13:19:11 -0400213 VkBufferImageCopy copyToImage;
214 copyToImage.bufferOffset = static_cast<VkDeviceSize>(stagingOffset);
Luc Ferron018709f2018-05-10 13:53:11 -0400215 copyToImage.bufferRowLength = 0; // Tightly packed data can be specified as 0.
Luc Ferron2a849bf2018-05-10 13:19:11 -0400216 copyToImage.bufferImageHeight = clippedRectangle.height;
217 copyToImage.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
218 copyToImage.imageSubresource.mipLevel = index.getLevelIndex();
219 copyToImage.imageSubresource.baseArrayLayer = index.hasLayer() ? index.getLayerIndex() : 0;
220 copyToImage.imageSubresource.layerCount = index.getLayerCount();
221 gl_vk::GetOffset(dstOffset, &copyToImage.imageOffset);
222 gl_vk::GetExtent(dstExtent, &copyToImage.imageExtent);
223
224 // 3- enqueue the destination image subresource update
225 mSubresourceUpdates.emplace_back(bufferHandle, copyToImage);
226 return gl::NoError();
227}
228
Jamie Madilla7be1f72018-04-13 15:16:26 -0400229vk::Error PixelBuffer::flushUpdatesToImage(RendererVk *renderer,
230 vk::ImageHelper *image,
231 vk::CommandBuffer *commandBuffer)
Jamie Madill26084d02018-04-09 13:44:04 -0400232{
Jamie Madill20fa8d52018-04-15 10:09:32 -0400233 if (mSubresourceUpdates.empty())
Jamie Madill26084d02018-04-09 13:44:04 -0400234 {
Jamie Madill20fa8d52018-04-15 10:09:32 -0400235 return vk::NoError();
Jamie Madill26084d02018-04-09 13:44:04 -0400236 }
237
Jamie Madill20fa8d52018-04-15 10:09:32 -0400238 ANGLE_TRY(mStagingBuffer.flush(renderer->getDevice()));
239
240 for (const SubresourceUpdate &update : mSubresourceUpdates)
241 {
242 ASSERT(update.bufferHandle != VK_NULL_HANDLE);
Luc Ferron1a186b12018-04-24 15:25:35 -0400243
244 // Conservatively flush all writes to the image. We could use a more restricted barrier.
245 // Do not move this above the for loop, otherwise multiple updates can have race conditions
246 // and not be applied correctly as seen i:
247 // dEQP-gles2.functional_texture_specification_texsubimage2d_align_2d* tests on Windows AMD
248 image->changeLayoutWithStages(
249 VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
250 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, commandBuffer);
251
Jamie Madill20fa8d52018-04-15 10:09:32 -0400252 commandBuffer->copyBufferToImage(update.bufferHandle, image->getImage(),
253 image->getCurrentLayout(), 1, &update.copyRegion);
254 }
255
256 mSubresourceUpdates.clear();
Luc Ferron61859812018-05-09 14:17:39 -0400257 mStagingBuffer.releaseRetainedBuffers(renderer);
Jamie Madill20fa8d52018-04-15 10:09:32 -0400258
Jamie Madill26084d02018-04-09 13:44:04 -0400259 return vk::NoError();
260}
261
Luc Ferron10434f62018-04-24 10:06:37 -0400262bool PixelBuffer::empty() const
263{
264 return mSubresourceUpdates.empty();
265}
266
Jamie Madilla7be1f72018-04-13 15:16:26 -0400267PixelBuffer::SubresourceUpdate::SubresourceUpdate() : bufferHandle(VK_NULL_HANDLE)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400268{
269}
270
Jamie Madilla7be1f72018-04-13 15:16:26 -0400271PixelBuffer::SubresourceUpdate::SubresourceUpdate(VkBuffer bufferHandleIn,
272 const VkBufferImageCopy &copyRegionIn)
Jamie Madill20fa8d52018-04-15 10:09:32 -0400273 : bufferHandle(bufferHandleIn), copyRegion(copyRegionIn)
274{
275}
276
Jamie Madilla7be1f72018-04-13 15:16:26 -0400277PixelBuffer::SubresourceUpdate::SubresourceUpdate(const SubresourceUpdate &other) = default;
Jamie Madill20fa8d52018-04-15 10:09:32 -0400278
Jamie Madill26084d02018-04-09 13:44:04 -0400279// TextureVk implementation.
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400280TextureVk::TextureVk(const gl::TextureState &state) : TextureImpl(state)
281{
Jamie Madillbc543422018-03-30 10:43:19 -0400282 mRenderTarget.image = &mImage;
Luc Ferron66410532018-04-20 12:47:45 -0400283 mRenderTarget.imageView = &mBaseLevelImageView;
Jamie Madillbc543422018-03-30 10:43:19 -0400284 mRenderTarget.resource = this;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400285}
286
287TextureVk::~TextureVk()
288{
289}
290
Jamie Madill035fd6b2017-10-03 15:43:22 -0400291gl::Error TextureVk::onDestroy(const gl::Context *context)
292{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400293 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400294 RendererVk *renderer = contextVk->getRenderer();
295
Jamie Madillc4f27e42018-03-31 14:19:18 -0400296 releaseImage(context, renderer);
Jamie Madille88ec8e2017-10-31 17:18:14 -0400297 renderer->releaseResource(*this, &mSampler);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400298
Jamie Madilla7be1f72018-04-13 15:16:26 -0400299 mPixelBuffer.release(renderer);
Jamie Madill26084d02018-04-09 13:44:04 -0400300
Jamie Madill035fd6b2017-10-03 15:43:22 -0400301 return gl::NoError();
302}
303
Jamie Madillc564c072017-06-01 12:45:42 -0400304gl::Error TextureVk::setImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400305 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400306 GLenum internalFormat,
307 const gl::Extents &size,
308 GLenum format,
309 GLenum type,
310 const gl::PixelUnpackState &unpack,
311 const uint8_t *pixels)
312{
Jamie Madille1f3ad42017-10-28 23:00:42 -0400313 ContextVk *contextVk = vk::GetImpl(context);
Jamie Madill1b038242017-11-01 15:14:36 -0400314 RendererVk *renderer = contextVk->getRenderer();
315
Jamie Madillc4f27e42018-03-31 14:19:18 -0400316 // Convert internalFormat to sized internal format.
317 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(internalFormat, type);
Jamie Madill035fd6b2017-10-03 15:43:22 -0400318
Jamie Madill1b038242017-11-01 15:14:36 -0400319 if (mImage.valid())
320 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400321 const vk::Format &vkFormat = renderer->getFormat(formatInfo.sizedInternalFormat);
Luc Ferron90968362018-05-04 08:47:22 -0400322
323 // Calculate the expected size for the index we are defining. If the size is different from
324 // the given size, or the format is different, we are redefining the image so we must
325 // release it.
326 if (mImage.getFormat() != vkFormat || size != mImage.getSize(index))
Jamie Madill1b038242017-11-01 15:14:36 -0400327 {
Jamie Madillc4f27e42018-03-31 14:19:18 -0400328 releaseImage(context, renderer);
Jamie Madill1b038242017-11-01 15:14:36 -0400329 }
330 }
Jamie Madill035fd6b2017-10-03 15:43:22 -0400331
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500332 // Early-out on empty textures, don't create a zero-sized storage.
Jamie Madill26084d02018-04-09 13:44:04 -0400333 if (size.empty())
Geoff Langbd6ae4a2018-01-29 15:51:18 -0500334 {
335 return gl::NoError();
336 }
337
Jamie Madill26084d02018-04-09 13:44:04 -0400338 // Create a new graph node to store image initialization commands.
339 getNewWritingNode(renderer);
340
Jamie Madill035fd6b2017-10-03 15:43:22 -0400341 // Handle initial data.
Jamie Madill035fd6b2017-10-03 15:43:22 -0400342 if (pixels)
343 {
Luc Ferron33e05ba2018-04-23 15:12:34 -0400344 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(contextVk, index, size, gl::Offset(),
345 formatInfo, unpack, type, pixels));
Jamie Madill035fd6b2017-10-03 15:43:22 -0400346 }
347
348 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400349}
350
Jamie Madillc564c072017-06-01 12:45:42 -0400351gl::Error TextureVk::setSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400352 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400353 const gl::Box &area,
354 GLenum format,
355 GLenum type,
356 const gl::PixelUnpackState &unpack,
357 const uint8_t *pixels)
358{
Jamie Madill5b18f482017-11-30 17:24:22 -0500359 ContextVk *contextVk = vk::GetImpl(context);
360 const gl::InternalFormat &formatInfo = gl::GetInternalFormatInfo(format, type);
Luc Ferron33e05ba2018-04-23 15:12:34 -0400361 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdate(
362 contextVk, index, gl::Extents(area.width, area.height, area.depth),
363 gl::Offset(area.x, area.y, area.z), formatInfo, unpack, type, pixels));
Jamie Madillb2214862018-04-26 07:25:48 -0400364
365 // Create a new graph node to store image initialization commands.
366 getNewWritingNode(contextVk->getRenderer());
367
Jamie Madill5b18f482017-11-30 17:24:22 -0500368 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400369}
370
Jamie Madillc564c072017-06-01 12:45:42 -0400371gl::Error TextureVk::setCompressedImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400372 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400373 GLenum internalFormat,
374 const gl::Extents &size,
375 const gl::PixelUnpackState &unpack,
376 size_t imageSize,
377 const uint8_t *pixels)
378{
379 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500380 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400381}
382
Jamie Madillc564c072017-06-01 12:45:42 -0400383gl::Error TextureVk::setCompressedSubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400384 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400385 const gl::Box &area,
386 GLenum format,
387 const gl::PixelUnpackState &unpack,
388 size_t imageSize,
389 const uint8_t *pixels)
390{
391 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500392 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400393}
394
Jamie Madillc564c072017-06-01 12:45:42 -0400395gl::Error TextureVk::copyImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400396 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400397 const gl::Rectangle &sourceArea,
398 GLenum internalFormat,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400399 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400400{
401 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500402 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400403}
404
Jamie Madillc564c072017-06-01 12:45:42 -0400405gl::Error TextureVk::copySubImage(const gl::Context *context,
Jamie Madillc4f27e42018-03-31 14:19:18 -0400406 const gl::ImageIndex &index,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400407 const gl::Offset &destOffset,
408 const gl::Rectangle &sourceArea,
Jamie Madill690c8eb2018-03-12 15:20:03 -0400409 gl::Framebuffer *source)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400410{
Luc Ferron018709f2018-05-10 13:53:11 -0400411 gl::Extents fbSize = source->getReadColorbuffer()->getSize();
412 gl::Rectangle clippedSourceArea;
413 if (!ClipRectangle(sourceArea, gl::Rectangle(0, 0, fbSize.width, fbSize.height),
414 &clippedSourceArea))
415 {
416 return gl::NoError();
417 }
418
419 const gl::Offset modifiedDestOffset(destOffset.x + sourceArea.x - sourceArea.x,
420 destOffset.y + sourceArea.y - sourceArea.y, 0);
421
422 ContextVk *contextVk = vk::GetImpl(context);
423
424 FramebufferVk *framebufferVk = vk::GetImpl(source);
425 RenderTargetVk *renderTarget = framebufferVk->getColorReadRenderTarget();
426 const gl::InternalFormat &currentFormat = *mState.getBaseLevelDesc().format.info;
427
428 vk::CommandBuffer *commandBuffer = nullptr;
429 ANGLE_TRY(framebufferVk->beginWriteResource(contextVk->getRenderer(), &commandBuffer));
430
431 // For now, favor conformance. We do a CPU readback that does the conversion, and then stage the
432 // change to the pixel buffer.
433 // Eventually we can improve this easily by implementing vkCmdBlitImage to do the conversion
434 // when its supported.
435 ANGLE_TRY(mPixelBuffer.stageSubresourceUpdateFromRenderTarget(
436 context, index, clippedSourceArea, modifiedDestOffset,
437 gl::Extents(clippedSourceArea.width, clippedSourceArea.height, 1), currentFormat,
438 commandBuffer, renderTarget));
439
440 vk::CommandGraphNode *writingNode = getNewWritingNode(contextVk->getRenderer());
441 framebufferVk->onReadResource(writingNode, contextVk->getRenderer()->getCurrentQueueSerial());
442 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400443}
444
Luc Ferronfa7503c2018-05-08 11:25:06 -0400445vk::Error TextureVk::getCommandBufferForWrite(RendererVk *renderer,
446 vk::CommandBuffer **outCommandBuffer)
447{
448 const VkDevice device = renderer->getDevice();
449 updateQueueSerial(renderer->getCurrentQueueSerial());
450 if (!hasChildlessWritingNode())
451 {
452 beginWriteResource(renderer, outCommandBuffer);
453 }
454 else
455 {
456 vk::CommandGraphNode *node = getCurrentWritingNode();
457 *outCommandBuffer = node->getOutsideRenderPassCommands();
458 if (!(*outCommandBuffer)->valid())
459 {
460 ANGLE_TRY(node->beginOutsideRenderPassRecording(device, renderer->getCommandPool(),
461 outCommandBuffer));
462 }
463 }
464 return vk::NoError();
465}
466
Jamie Madillc564c072017-06-01 12:45:42 -0400467gl::Error TextureVk::setStorage(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500468 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400469 size_t levels,
470 GLenum internalFormat,
471 const gl::Extents &size)
472{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400473 ContextVk *contextVk = GetAs<ContextVk>(context->getImplementation());
474 RendererVk *renderer = contextVk->getRenderer();
475 const vk::Format &format = renderer->getFormat(internalFormat);
476 vk::CommandBuffer *commandBuffer = nullptr;
477 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
478 ANGLE_TRY(initImage(renderer, format, size, static_cast<uint32_t>(levels), commandBuffer));
479 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400480}
481
Corentin Wallez99d492c2018-02-27 15:17:10 -0500482gl::Error TextureVk::setEGLImageTarget(const gl::Context *context,
483 gl::TextureType type,
484 egl::Image *image)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400485{
486 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500487 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400488}
489
Jamie Madill4928b7c2017-06-20 12:57:39 -0400490gl::Error TextureVk::setImageExternal(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500491 gl::TextureType type,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400492 egl::Stream *stream,
493 const egl::Stream::GLTextureDescription &desc)
494{
495 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500496 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400497}
498
Jamie Madillc564c072017-06-01 12:45:42 -0400499gl::Error TextureVk::generateMipmap(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400500{
501 UNIMPLEMENTED();
Yuly Novikovc4d18aa2017-03-09 18:45:02 -0500502 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400503}
504
Jamie Madill4928b7c2017-06-20 12:57:39 -0400505gl::Error TextureVk::setBaseLevel(const gl::Context *context, GLuint baseLevel)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400506{
507 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400508 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400509}
510
Jamie Madill4928b7c2017-06-20 12:57:39 -0400511gl::Error TextureVk::bindTexImage(const gl::Context *context, egl::Surface *surface)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400512{
513 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400514 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400515}
516
Jamie Madill4928b7c2017-06-20 12:57:39 -0400517gl::Error TextureVk::releaseTexImage(const gl::Context *context)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400518{
519 UNIMPLEMENTED();
Jamie Madill4928b7c2017-06-20 12:57:39 -0400520 return gl::InternalError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400521}
522
Jamie Madill4928b7c2017-06-20 12:57:39 -0400523gl::Error TextureVk::getAttachmentRenderTarget(const gl::Context *context,
524 GLenum binding,
Jamie Madill4fd95d52017-04-05 11:22:18 -0400525 const gl::ImageIndex &imageIndex,
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400526 FramebufferAttachmentRenderTarget **rtOut)
527{
Luc Ferronfa7503c2018-05-08 11:25:06 -0400528 // TODO(jmadill): Handle cube textures. http://anglebug.com/2470
Jamie Madillcc129372018-04-12 09:13:18 -0400529 ASSERT(imageIndex.getType() == gl::TextureType::_2D);
Jamie Madill26084d02018-04-09 13:44:04 -0400530
531 // Non-zero mip level attachments are an ES 3.0 feature.
Jamie Madillcc129372018-04-12 09:13:18 -0400532 ASSERT(imageIndex.getLevelIndex() == 0 && !imageIndex.hasLayer());
Jamie Madill26084d02018-04-09 13:44:04 -0400533
534 ContextVk *contextVk = vk::GetImpl(context);
535 RendererVk *renderer = contextVk->getRenderer();
536
537 ANGLE_TRY(ensureImageInitialized(renderer));
538
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400539 *rtOut = &mRenderTarget;
540 return gl::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400541}
542
Jamie Madill26084d02018-04-09 13:44:04 -0400543vk::Error TextureVk::ensureImageInitialized(RendererVk *renderer)
544{
Luc Ferron10434f62018-04-24 10:06:37 -0400545 if (mImage.valid() && mPixelBuffer.empty())
546 {
547 return vk::NoError();
548 }
549
Jamie Madill26084d02018-04-09 13:44:04 -0400550 vk::CommandBuffer *commandBuffer = nullptr;
Luc Ferronfa7503c2018-05-08 11:25:06 -0400551 ANGLE_TRY(getCommandBufferForWrite(renderer, &commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400552
553 if (!mImage.valid())
554 {
555 const gl::ImageDesc &baseLevelDesc = mState.getBaseLevelDesc();
Jamie Madill26084d02018-04-09 13:44:04 -0400556 const vk::Format &format =
557 renderer->getFormat(baseLevelDesc.format.info->sizedInternalFormat);
Luc Ferronfa7503c2018-05-08 11:25:06 -0400558 const gl::Extents &extents = baseLevelDesc.size;
Luc Ferron66410532018-04-20 12:47:45 -0400559 const uint32_t levelCount = getLevelCount();
Jamie Madill26084d02018-04-09 13:44:04 -0400560
Luc Ferronfa7503c2018-05-08 11:25:06 -0400561 ANGLE_TRY(initImage(renderer, format, extents, levelCount, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400562 }
563
Jamie Madilla7be1f72018-04-13 15:16:26 -0400564 ANGLE_TRY(mPixelBuffer.flushUpdatesToImage(renderer, &mImage, commandBuffer));
Jamie Madill26084d02018-04-09 13:44:04 -0400565 return vk::NoError();
566}
567
Luc Ferron4bba74f2018-04-19 14:40:45 -0400568gl::Error TextureVk::syncState(const gl::Context *context, const gl::Texture::DirtyBits &dirtyBits)
Geoff Lang22416862016-06-08 16:14:36 -0700569{
Luc Ferron20610902018-04-19 14:41:13 -0400570 if (dirtyBits.none() && mSampler.valid())
571 {
572 return gl::NoError();
573 }
574
575 ContextVk *contextVk = vk::GetImpl(context);
576 if (mSampler.valid())
577 {
578 RendererVk *renderer = contextVk->getRenderer();
579 renderer->releaseResource(*this, &mSampler);
580 }
581
582 const gl::SamplerState &samplerState = mState.getSamplerState();
583
584 // Create a simple sampler. Force basic parameter settings.
585 VkSamplerCreateInfo samplerInfo;
586 samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
587 samplerInfo.pNext = nullptr;
588 samplerInfo.flags = 0;
589 samplerInfo.magFilter = gl_vk::GetFilter(samplerState.magFilter);
590 samplerInfo.minFilter = gl_vk::GetFilter(samplerState.minFilter);
Luc Ferron66410532018-04-20 12:47:45 -0400591 samplerInfo.mipmapMode = gl_vk::GetSamplerMipmapMode(samplerState.minFilter);
Luc Ferron20610902018-04-19 14:41:13 -0400592 samplerInfo.addressModeU = gl_vk::GetSamplerAddressMode(samplerState.wrapS);
593 samplerInfo.addressModeV = gl_vk::GetSamplerAddressMode(samplerState.wrapT);
594 samplerInfo.addressModeW = gl_vk::GetSamplerAddressMode(samplerState.wrapR);
595 samplerInfo.mipLodBias = 0.0f;
596 samplerInfo.anisotropyEnable = VK_FALSE;
597 samplerInfo.maxAnisotropy = 1.0f;
598 samplerInfo.compareEnable = VK_FALSE;
599 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
Luc Ferron66410532018-04-20 12:47:45 -0400600 samplerInfo.minLod = samplerState.minLod;
601 samplerInfo.maxLod = samplerState.maxLod;
Luc Ferron20610902018-04-19 14:41:13 -0400602 samplerInfo.borderColor = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
603 samplerInfo.unnormalizedCoordinates = VK_FALSE;
604
605 ANGLE_TRY(mSampler.init(contextVk->getDevice(), samplerInfo));
Luc Ferron4bba74f2018-04-19 14:40:45 -0400606 return gl::NoError();
Geoff Lang22416862016-06-08 16:14:36 -0700607}
608
Jamie Madillc564c072017-06-01 12:45:42 -0400609gl::Error TextureVk::setStorageMultisample(const gl::Context *context,
Corentin Wallez99d492c2018-02-27 15:17:10 -0500610 gl::TextureType type,
JiangYizhoubddc46b2016-12-09 09:50:51 +0800611 GLsizei samples,
612 GLint internalformat,
613 const gl::Extents &size,
Geoff Lang92019432017-11-20 13:09:34 -0500614 bool fixedSampleLocations)
JiangYizhoubddc46b2016-12-09 09:50:51 +0800615{
616 UNIMPLEMENTED();
617 return gl::InternalError() << "setStorageMultisample is unimplemented.";
618}
619
Jamie Madill05b35b22017-10-03 09:01:44 -0400620gl::Error TextureVk::initializeContents(const gl::Context *context,
621 const gl::ImageIndex &imageIndex)
622{
623 UNIMPLEMENTED();
624 return gl::NoError();
625}
626
Jamie Madill858c1cc2018-03-31 14:19:13 -0400627const vk::ImageHelper &TextureVk::getImage() const
Jamie Madill5547b382017-10-23 18:16:01 -0400628{
629 ASSERT(mImage.valid());
Jamie Madill858c1cc2018-03-31 14:19:13 -0400630 return mImage;
Jamie Madill5547b382017-10-23 18:16:01 -0400631}
632
633const vk::ImageView &TextureVk::getImageView() const
634{
Jamie Madill93edca12018-03-30 10:43:18 -0400635 ASSERT(mImage.valid());
Luc Ferron66410532018-04-20 12:47:45 -0400636
637 const GLenum minFilter = mState.getSamplerState().minFilter;
638 if (minFilter == GL_LINEAR || minFilter == GL_NEAREST)
639 {
640 return mBaseLevelImageView;
641 }
642
643 return mMipmapImageView;
Jamie Madill5547b382017-10-23 18:16:01 -0400644}
645
646const vk::Sampler &TextureVk::getSampler() const
647{
648 ASSERT(mSampler.valid());
649 return mSampler;
650}
651
Luc Ferronfa7503c2018-05-08 11:25:06 -0400652vk::Error TextureVk::initImage(RendererVk *renderer,
653 const vk::Format &format,
654 const gl::Extents &extents,
655 const uint32_t levelCount,
656 vk::CommandBuffer *commandBuffer)
657{
658 const VkDevice device = renderer->getDevice();
659
660 const VkImageUsageFlags usage =
661 (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
662 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT);
663
664 ANGLE_TRY(mImage.init(device, mState.getType(), extents, format, 1, usage, levelCount));
665
666 const VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
667
668 ANGLE_TRY(mImage.initMemory(device, renderer->getMemoryProperties(), flags));
669
670 gl::SwizzleState mappedSwizzle;
671 MapSwizzleState(format.internalFormat, mState.getSwizzleState(), &mappedSwizzle);
672
673 // TODO(jmadill): Separate imageviews for RenderTargets and Sampling.
674 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
675 mappedSwizzle, &mMipmapImageView, levelCount));
676 ANGLE_TRY(mImage.initImageView(device, mState.getType(), VK_IMAGE_ASPECT_COLOR_BIT,
677 mappedSwizzle, &mBaseLevelImageView, 1));
678
679 // TODO(jmadill): Fold this into the RenderPass load/store ops. http://anglebug.com/2361
Luc Ferron7348fc52018-05-09 07:17:16 -0400680 VkClearColorValue black = {{0, 0, 0, 1.0f}};
Luc Ferronfa7503c2018-05-08 11:25:06 -0400681 mImage.clearColor(black, commandBuffer);
682 return vk::NoError();
683}
684
Jamie Madillc4f27e42018-03-31 14:19:18 -0400685void TextureVk::releaseImage(const gl::Context *context, RendererVk *renderer)
686{
687 mImage.release(renderer->getCurrentQueueSerial(), renderer);
Luc Ferron66410532018-04-20 12:47:45 -0400688 renderer->releaseResource(*this, &mBaseLevelImageView);
689 renderer->releaseResource(*this, &mMipmapImageView);
Jamie Madillc4f27e42018-03-31 14:19:18 -0400690 onStateChange(context, angle::SubjectMessage::DEPENDENT_DIRTY_BITS);
691}
692
Luc Ferron66410532018-04-20 12:47:45 -0400693uint32_t TextureVk::getLevelCount() const
694{
695 ASSERT(mState.getEffectiveBaseLevel() == 0);
696
697 // getMipmapMaxLevel will be 0 here if mipmaps are not used, so the levelCount is always +1.
698 return mState.getMipmapMaxLevel() + 1;
699}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400700} // namespace rx