blob: 7b7bff5dd0081eca227f4da4abe81374d7320137 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/gpu/GrBackendSemaphore.h"
11#include "include/gpu/GrBackendSurface.h"
12#include "include/gpu/GrContextOptions.h"
13#include "include/private/SkTo.h"
14#include "src/core/SkConvertPixels.h"
15#include "src/core/SkMipMap.h"
16#include "src/gpu/GrContextPriv.h"
Robert Phillipsbd1ef682019-05-31 12:48:49 -040017#include "src/gpu/GrDataUtils.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrGeometryProcessor.h"
19#include "src/gpu/GrGpuResourceCacheAccess.h"
20#include "src/gpu/GrMesh.h"
21#include "src/gpu/GrPipeline.h"
Greg Daniel797efca2019-05-09 14:04:20 -040022#include "src/gpu/GrRenderTargetContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050023#include "src/gpu/GrRenderTargetPriv.h"
24#include "src/gpu/GrTexturePriv.h"
Greg Daniel797efca2019-05-09 14:04:20 -040025#include "src/gpu/SkGpuDevice.h"
Robert Phillips9dbcdcc2019-05-13 10:40:06 -040026#include "src/gpu/SkGr.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050027#include "src/gpu/vk/GrVkAMDMemoryAllocator.h"
28#include "src/gpu/vk/GrVkCommandBuffer.h"
29#include "src/gpu/vk/GrVkCommandPool.h"
30#include "src/gpu/vk/GrVkGpuCommandBuffer.h"
31#include "src/gpu/vk/GrVkImage.h"
32#include "src/gpu/vk/GrVkIndexBuffer.h"
33#include "src/gpu/vk/GrVkInterface.h"
34#include "src/gpu/vk/GrVkMemory.h"
35#include "src/gpu/vk/GrVkPipeline.h"
36#include "src/gpu/vk/GrVkPipelineState.h"
37#include "src/gpu/vk/GrVkRenderPass.h"
38#include "src/gpu/vk/GrVkResourceProvider.h"
39#include "src/gpu/vk/GrVkSemaphore.h"
40#include "src/gpu/vk/GrVkTexture.h"
41#include "src/gpu/vk/GrVkTextureRenderTarget.h"
42#include "src/gpu/vk/GrVkTransferBuffer.h"
43#include "src/gpu/vk/GrVkVertexBuffer.h"
Greg Daniel797efca2019-05-09 14:04:20 -040044#include "src/image/SkImage_Gpu.h"
45#include "src/image/SkSurface_Gpu.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050046#include "src/sksl/SkSLCompiler.h"
Greg Daniel98bffae2018-08-01 13:25:41 -040047
Mike Kleinc0bd9f92019-04-23 12:05:21 -050048#include "include/gpu/vk/GrVkExtensions.h"
49#include "include/gpu/vk/GrVkTypes.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050050
Ben Wagnerf08d1d02018-06-18 15:11:00 -040051#include <utility>
52
Forrest Reiling44f85712017-03-27 23:22:20 -070053#if !defined(SK_BUILD_FOR_WIN)
54#include <unistd.h>
55#endif // !defined(SK_BUILD_FOR_WIN)
56
Greg Danieldef55462018-08-01 13:40:14 -040057#if defined(SK_BUILD_FOR_WIN) && defined(SK_DEBUG)
Ben Wagnerab6eefe2019-05-20 11:02:49 -040058#include "src/core/SkLeanWindows.h"
Greg Danieldef55462018-08-01 13:40:14 -040059#endif
60
Greg Daniel164a9f02016-02-22 09:56:40 -050061#define VK_CALL(X) GR_VK_CALL(this->vkInterface(), X)
62#define VK_CALL_RET(RET, X) GR_VK_CALL_RET(this->vkInterface(), RET, X)
63#define VK_CALL_ERRCHECK(X) GR_VK_CALL_ERRCHECK(this->vkInterface(), X)
64
Greg Danielf730c182018-07-02 20:15:37 +000065sk_sp<GrGpu> GrVkGpu::Make(const GrVkBackendContext& backendContext,
Brian Salomon384fab42017-12-07 12:33:05 -050066 const GrContextOptions& options, GrContext* context) {
Greg Danielf730c182018-07-02 20:15:37 +000067 if (backendContext.fInstance == VK_NULL_HANDLE ||
68 backendContext.fPhysicalDevice == VK_NULL_HANDLE ||
69 backendContext.fDevice == VK_NULL_HANDLE ||
70 backendContext.fQueue == VK_NULL_HANDLE) {
71 return nullptr;
72 }
Greg Danield3e65aa2018-08-01 09:19:45 -040073 if (!backendContext.fGetProc) {
74 return nullptr;
Greg Danielc8cd45a2018-07-12 10:02:37 -040075 }
Greg Danield3e65aa2018-08-01 09:19:45 -040076
Greg Daniel41f0e282019-01-28 13:15:05 -050077 PFN_vkEnumerateInstanceVersion localEnumerateInstanceVersion =
78 reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
79 backendContext.fGetProc("vkEnumerateInstanceVersion",
80 VK_NULL_HANDLE, VK_NULL_HANDLE));
81 uint32_t instanceVersion = 0;
82 if (!localEnumerateInstanceVersion) {
83 instanceVersion = VK_MAKE_VERSION(1, 0, 0);
84 } else {
85 VkResult err = localEnumerateInstanceVersion(&instanceVersion);
86 if (err) {
87 SkDebugf("Failed to enumerate instance version. Err: %d\n", err);
88 return nullptr;
89 }
90 }
91
Greg Danielc0b03d82018-08-03 14:41:15 -040092 PFN_vkGetPhysicalDeviceProperties localGetPhysicalDeviceProperties =
93 reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(
94 backendContext.fGetProc("vkGetPhysicalDeviceProperties",
95 backendContext.fInstance,
96 VK_NULL_HANDLE));
97
98 if (!localGetPhysicalDeviceProperties) {
99 return nullptr;
100 }
101 VkPhysicalDeviceProperties physDeviceProperties;
102 localGetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &physDeviceProperties);
103 uint32_t physDevVersion = physDeviceProperties.apiVersion;
104
Greg Daniel41f0e282019-01-28 13:15:05 -0500105 uint32_t apiVersion = backendContext.fMaxAPIVersion ? backendContext.fMaxAPIVersion
106 : instanceVersion;
107
108 instanceVersion = SkTMin(instanceVersion, apiVersion);
109 physDevVersion = SkTMin(physDevVersion, apiVersion);
110
Greg Daniel98bffae2018-08-01 13:25:41 -0400111 sk_sp<const GrVkInterface> interface;
Greg Danield3e65aa2018-08-01 09:19:45 -0400112
Greg Daniel98bffae2018-08-01 13:25:41 -0400113 if (backendContext.fVkExtensions) {
114 interface.reset(new GrVkInterface(backendContext.fGetProc,
115 backendContext.fInstance,
116 backendContext.fDevice,
Greg Daniel41f0e282019-01-28 13:15:05 -0500117 instanceVersion,
Greg Danielc0b03d82018-08-03 14:41:15 -0400118 physDevVersion,
Greg Daniel98bffae2018-08-01 13:25:41 -0400119 backendContext.fVkExtensions));
Greg Daniel41f0e282019-01-28 13:15:05 -0500120 if (!interface->validate(instanceVersion, physDevVersion, backendContext.fVkExtensions)) {
Greg Daniel98bffae2018-08-01 13:25:41 -0400121 return nullptr;
122 }
123 } else {
Greg Daniel98bffae2018-08-01 13:25:41 -0400124 GrVkExtensions extensions;
Greg Daniel88e8ddc2019-04-25 16:37:08 -0400125 // The only extension flag that may effect the vulkan backend is the swapchain extension. We
126 // need to know if this is enabled to know if we can transition to a present layout when
127 // flushing a surface.
128 if (backendContext.fExtensions & kKHR_swapchain_GrVkExtensionFlag) {
129 const char* swapChainExtName = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
130 extensions.init(backendContext.fGetProc, backendContext.fInstance,
131 backendContext.fPhysicalDevice, 0, nullptr, 1, &swapChainExtName);
132 }
Greg Daniel98bffae2018-08-01 13:25:41 -0400133 interface.reset(new GrVkInterface(backendContext.fGetProc,
134 backendContext.fInstance,
135 backendContext.fDevice,
Greg Daniel41f0e282019-01-28 13:15:05 -0500136 instanceVersion,
Greg Danielc0b03d82018-08-03 14:41:15 -0400137 physDevVersion,
Greg Daniel98bffae2018-08-01 13:25:41 -0400138 &extensions));
Greg Daniel41f0e282019-01-28 13:15:05 -0500139 if (!interface->validate(instanceVersion, physDevVersion, &extensions)) {
Greg Daniel98bffae2018-08-01 13:25:41 -0400140 return nullptr;
141 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500142 }
143
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400144 sk_sp<GrVkGpu> vkGpu(new GrVkGpu(context, options, backendContext, interface,
145 instanceVersion, physDevVersion));
146 if (backendContext.fProtectedContext == GrProtected::kYes &&
147 !vkGpu->vkCaps().supportsProtectedMemory()) {
148 return nullptr;
149 }
150 return std::move(vkGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500151}
152
153////////////////////////////////////////////////////////////////////////////////
154
halcanary9d524f22016-03-29 09:03:52 -0700155GrVkGpu::GrVkGpu(GrContext* context, const GrContextOptions& options,
Greg Daniel41f0e282019-01-28 13:15:05 -0500156 const GrVkBackendContext& backendContext, sk_sp<const GrVkInterface> interface,
157 uint32_t instanceVersion, uint32_t physicalDeviceVersion)
Brian Salomon384fab42017-12-07 12:33:05 -0500158 : INHERITED(context)
Greg Danielc8cd45a2018-07-12 10:02:37 -0400159 , fInterface(std::move(interface))
Greg Danielf730c182018-07-02 20:15:37 +0000160 , fMemoryAllocator(backendContext.fMemoryAllocator)
161 , fInstance(backendContext.fInstance)
Greg Daniel637c06a2018-09-12 09:44:25 -0400162 , fPhysicalDevice(backendContext.fPhysicalDevice)
Greg Danielf730c182018-07-02 20:15:37 +0000163 , fDevice(backendContext.fDevice)
164 , fQueue(backendContext.fQueue)
Greg Danielecddbc02018-08-30 16:39:34 -0400165 , fQueueIndex(backendContext.fGraphicsQueueIndex)
Brian Salomon384fab42017-12-07 12:33:05 -0500166 , fResourceProvider(this)
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400167 , fDisconnected(false)
168 , fProtectedContext(backendContext.fProtectedContext) {
Greg Danielf730c182018-07-02 20:15:37 +0000169 SkASSERT(!backendContext.fOwnsInstanceAndDevice);
jvanverth633b3562016-03-23 11:01:22 -0700170
Greg Daniel81df0412018-05-31 13:13:33 -0400171 if (!fMemoryAllocator) {
172 // We were not given a memory allocator at creation
Greg Danielf730c182018-07-02 20:15:37 +0000173 fMemoryAllocator.reset(new GrVkAMDMemoryAllocator(backendContext.fPhysicalDevice,
Greg Danielc8cd45a2018-07-12 10:02:37 -0400174 fDevice, fInterface));
Greg Daniel81df0412018-05-31 13:13:33 -0400175 }
176
ethannicholasb3058bd2016-07-01 08:22:01 -0700177 fCompiler = new SkSL::Compiler();
jvanverth633b3562016-03-23 11:01:22 -0700178
Greg Daniela0651ac2018-08-08 09:23:18 -0400179 if (backendContext.fDeviceFeatures2) {
Greg Daniel36443602018-08-02 12:51:52 -0400180 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Greg Daniela0651ac2018-08-08 09:23:18 -0400181 *backendContext.fDeviceFeatures2, instanceVersion,
Greg Daniel41f0e282019-01-28 13:15:05 -0500182 physicalDeviceVersion,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400183 *backendContext.fVkExtensions, fProtectedContext));
Greg Daniela0651ac2018-08-08 09:23:18 -0400184 } else if (backendContext.fDeviceFeatures) {
185 VkPhysicalDeviceFeatures2 features2;
186 features2.pNext = nullptr;
187 features2.features = *backendContext.fDeviceFeatures;
188 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Greg Daniel41f0e282019-01-28 13:15:05 -0500189 features2, instanceVersion, physicalDeviceVersion,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400190 *backendContext.fVkExtensions, fProtectedContext));
Greg Daniel36443602018-08-02 12:51:52 -0400191 } else {
Greg Daniela0651ac2018-08-08 09:23:18 -0400192 VkPhysicalDeviceFeatures2 features;
193 memset(&features, 0, sizeof(VkPhysicalDeviceFeatures2));
194 features.pNext = nullptr;
Greg Daniel36443602018-08-02 12:51:52 -0400195 if (backendContext.fFeatures & kGeometryShader_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400196 features.features.geometryShader = true;
Greg Daniel36443602018-08-02 12:51:52 -0400197 }
198 if (backendContext.fFeatures & kDualSrcBlend_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400199 features.features.dualSrcBlend = true;
Greg Daniel36443602018-08-02 12:51:52 -0400200 }
201 if (backendContext.fFeatures & kSampleRateShading_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400202 features.features.sampleRateShading = true;
Greg Daniel36443602018-08-02 12:51:52 -0400203 }
Greg Danielf808c5e2019-04-30 14:48:27 -0400204 GrVkExtensions extensions;
205 // The only extension flag that may effect the vulkan backend is the swapchain extension. We
206 // need to know if this is enabled to know if we can transition to a present layout when
207 // flushing a surface.
208 if (backendContext.fExtensions & kKHR_swapchain_GrVkExtensionFlag) {
209 const char* swapChainExtName = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
210 extensions.init(backendContext.fGetProc, backendContext.fInstance,
211 backendContext.fPhysicalDevice, 0, nullptr, 1, &swapChainExtName);
212 }
Greg Daniel36443602018-08-02 12:51:52 -0400213 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400214 features, instanceVersion, physicalDeviceVersion, extensions,
215 fProtectedContext));
Greg Daniel36443602018-08-02 12:51:52 -0400216 }
jvanverth633b3562016-03-23 11:01:22 -0700217 fCaps.reset(SkRef(fVkCaps.get()));
218
Greg Danielf730c182018-07-02 20:15:37 +0000219 VK_CALL(GetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &fPhysDevProps));
220 VK_CALL(GetPhysicalDeviceMemoryProperties(backendContext.fPhysicalDevice, &fPhysDevMemProps));
jvanverth633b3562016-03-23 11:01:22 -0700221
Greg Daniela870b462019-01-08 15:49:46 -0500222 fResourceProvider.init();
223
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500224 fCmdPool = fResourceProvider.findOrCreateCommandPool();
225 fCurrentCmdBuffer = fCmdPool->getPrimaryCommandBuffer();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000226 SkASSERT(fCurrentCmdBuffer);
jvanverth633b3562016-03-23 11:01:22 -0700227 fCurrentCmdBuffer->begin(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500228}
229
Greg Daniel8606cf82017-05-08 16:17:53 -0400230void GrVkGpu::destroyResources() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500231 if (fCmdPool) {
232 fCmdPool->getPrimaryCommandBuffer()->end(this);
233 fCmdPool->close();
Greg Daniel8606cf82017-05-08 16:17:53 -0400234 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500235
236 // wait for all commands to finish
Jim Van Verth09557d72016-11-07 11:10:21 -0500237 VkResult res = VK_CALL(QueueWaitIdle(fQueue));
egdanielf8c2be32016-06-24 13:18:27 -0700238
239 // On windows, sometimes calls to QueueWaitIdle return before actually signalling the fences
240 // on the command buffers even though they have completed. This causes an assert to fire when
241 // destroying the command buffers. Currently this ony seems to happen on windows, so we add a
Jim Van Verth09557d72016-11-07 11:10:21 -0500242 // sleep to make sure the fence signals.
egdanielf8c2be32016-06-24 13:18:27 -0700243#ifdef SK_DEBUG
Greg Daniel80a08dd2017-01-20 10:45:49 -0500244 if (this->vkCaps().mustSleepOnTearDown()) {
egdanielf8c2be32016-06-24 13:18:27 -0700245#if defined(SK_BUILD_FOR_WIN)
Greg Daniel80a08dd2017-01-20 10:45:49 -0500246 Sleep(10); // In milliseconds
egdanielf8c2be32016-06-24 13:18:27 -0700247#else
Greg Daniel80a08dd2017-01-20 10:45:49 -0500248 sleep(1); // In seconds
egdanielf8c2be32016-06-24 13:18:27 -0700249#endif
Greg Daniel80a08dd2017-01-20 10:45:49 -0500250 }
egdanielf8c2be32016-06-24 13:18:27 -0700251#endif
252
egdanielbe9d8212016-09-20 08:54:23 -0700253#ifdef SK_DEBUG
Greg Daniel8a8668b2016-10-31 16:34:42 -0400254 SkASSERT(VK_SUCCESS == res || VK_ERROR_DEVICE_LOST == res);
egdanielbe9d8212016-09-20 08:54:23 -0700255#endif
halcanary9d524f22016-03-29 09:03:52 -0700256
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500257 if (fCmdPool) {
258 fCmdPool->unref(this);
259 fCmdPool = nullptr;
260 }
261
Greg Daniel6be35232017-03-01 17:01:09 -0500262 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
263 fSemaphoresToWaitOn[i]->unref(this);
264 }
265 fSemaphoresToWaitOn.reset();
266
Greg Daniela5cb7812017-06-16 09:45:32 -0400267 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
268 fSemaphoresToSignal[i]->unref(this);
269 }
270 fSemaphoresToSignal.reset();
271
Jim Van Verth09557d72016-11-07 11:10:21 -0500272 // must call this just before we destroy the command pool and VkDevice
273 fResourceProvider.destroyResources(VK_ERROR_DEVICE_LOST == res);
Greg Daniel164a9f02016-02-22 09:56:40 -0500274
Greg Danielf730c182018-07-02 20:15:37 +0000275 fMemoryAllocator.reset();
276
277 fQueue = VK_NULL_HANDLE;
278 fDevice = VK_NULL_HANDLE;
279 fInstance = VK_NULL_HANDLE;
Greg Daniel8606cf82017-05-08 16:17:53 -0400280}
281
282GrVkGpu::~GrVkGpu() {
283 if (!fDisconnected) {
284 this->destroyResources();
285 }
286 delete fCompiler;
287}
288
289
290void GrVkGpu::disconnect(DisconnectType type) {
291 INHERITED::disconnect(type);
292 if (!fDisconnected) {
293 if (DisconnectType::kCleanup == type) {
294 this->destroyResources();
295 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500296 if (fCmdPool) {
297 fCmdPool->unrefAndAbandon();
298 fCmdPool = nullptr;
Greg Danieladb4bfe2018-08-23 16:15:05 -0400299 }
Greg Daniel8606cf82017-05-08 16:17:53 -0400300 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
301 fSemaphoresToWaitOn[i]->unrefAndAbandon();
302 }
Greg Daniela5cb7812017-06-16 09:45:32 -0400303 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
304 fSemaphoresToSignal[i]->unrefAndAbandon();
305 }
Greg Daniel8606cf82017-05-08 16:17:53 -0400306
307 // must call this just before we destroy the command pool and VkDevice
308 fResourceProvider.abandonResources();
Greg Danieladb4bfe2018-08-23 16:15:05 -0400309
310 fMemoryAllocator.reset();
Greg Daniel8606cf82017-05-08 16:17:53 -0400311 }
312 fSemaphoresToWaitOn.reset();
Greg Daniela5cb7812017-06-16 09:45:32 -0400313 fSemaphoresToSignal.reset();
Greg Daniel8606cf82017-05-08 16:17:53 -0400314 fCurrentCmdBuffer = nullptr;
Greg Daniel8606cf82017-05-08 16:17:53 -0400315 fDisconnected = true;
316 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500317}
318
319///////////////////////////////////////////////////////////////////////////////
320
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400321GrGpuRTCommandBuffer* GrVkGpu::getCommandBuffer(
Ethan Nicholas56d19a52018-10-15 11:26:20 -0400322 GrRenderTarget* rt, GrSurfaceOrigin origin, const SkRect& bounds,
Greg Daniel500d58b2017-08-24 15:59:33 -0400323 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
324 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400325 if (!fCachedRTCommandBuffer) {
326 fCachedRTCommandBuffer.reset(new GrVkGpuRTCommandBuffer(this));
327 }
328
Greg Daniela41a74a2018-10-09 12:59:23 +0000329 fCachedRTCommandBuffer->set(rt, origin, colorInfo, stencilInfo);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400330 return fCachedRTCommandBuffer.get();
Greg Daniel500d58b2017-08-24 15:59:33 -0400331}
332
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400333GrGpuTextureCommandBuffer* GrVkGpu::getCommandBuffer(GrTexture* texture, GrSurfaceOrigin origin) {
334 if (!fCachedTexCommandBuffer) {
335 fCachedTexCommandBuffer.reset(new GrVkGpuTextureCommandBuffer(this));
336 }
337
338 fCachedTexCommandBuffer->set(texture, origin);
339 return fCachedTexCommandBuffer.get();
egdaniel066df7c2016-06-08 14:02:27 -0700340}
341
Greg Daniela3aa75a2019-04-12 14:24:55 -0400342void GrVkGpu::submitCommandBuffer(SyncQueue sync, GrGpuFinishedProc finishedProc,
343 GrGpuFinishedContext finishedContext) {
Brian Salomone39526b2019-06-24 16:35:53 -0400344 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -0500345 SkASSERT(fCurrentCmdBuffer);
Greg Danielb0c7ad12019-06-06 17:23:35 +0000346 SkASSERT(!fCachedRTCommandBuffer || !fCachedRTCommandBuffer->isActive());
347 SkASSERT(!fCachedTexCommandBuffer || !fCachedTexCommandBuffer->isActive());
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400348
349 if (!fCurrentCmdBuffer->hasWork() && kForce_SyncQueue != sync &&
350 !fSemaphoresToSignal.count() && !fSemaphoresToWaitOn.count()) {
351 SkASSERT(fDrawables.empty());
Robert Phillips84614c32019-04-05 09:36:00 -0400352 fResourceProvider.checkCommandBuffers();
Greg Daniela3aa75a2019-04-12 14:24:55 -0400353 if (finishedProc) {
354 fResourceProvider.addFinishedProcToActiveCommandBuffers(finishedProc, finishedContext);
355 }
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400356 return;
357 }
358
Greg Daniel164a9f02016-02-22 09:56:40 -0500359 fCurrentCmdBuffer->end(this);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500360 fCmdPool->close();
Greg Daniela5cb7812017-06-16 09:45:32 -0400361 fCurrentCmdBuffer->submitToQueue(this, fQueue, sync, fSemaphoresToSignal, fSemaphoresToWaitOn);
Greg Daniel6be35232017-03-01 17:01:09 -0500362
Greg Daniela3aa75a2019-04-12 14:24:55 -0400363 if (finishedProc) {
364 // Make sure this is called after closing the current command pool
365 fResourceProvider.addFinishedProcToActiveCommandBuffers(finishedProc, finishedContext);
366 }
367
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400368 // We must delete and drawables that have been waitint till submit for us to destroy.
369 fDrawables.reset();
370
Greg Daniel6be35232017-03-01 17:01:09 -0500371 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
372 fSemaphoresToWaitOn[i]->unref(this);
373 }
374 fSemaphoresToWaitOn.reset();
Greg Daniela5cb7812017-06-16 09:45:32 -0400375 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
376 fSemaphoresToSignal[i]->unref(this);
377 }
378 fSemaphoresToSignal.reset();
Greg Daniel6be35232017-03-01 17:01:09 -0500379
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500380 // Release old command pool and create a new one
381 fCmdPool->unref(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500382 fResourceProvider.checkCommandBuffers();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500383 fCmdPool = fResourceProvider.findOrCreateCommandPool();
384 fCurrentCmdBuffer = fCmdPool->getPrimaryCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -0500385 fCurrentCmdBuffer->begin(this);
386}
387
388///////////////////////////////////////////////////////////////////////////////
Brian Salomondbf70722019-02-07 11:31:24 -0500389sk_sp<GrGpuBuffer> GrVkGpu::onCreateBuffer(size_t size, GrGpuBufferType type,
390 GrAccessPattern accessPattern, const void* data) {
391 sk_sp<GrGpuBuffer> buff;
cdalton397536c2016-03-25 12:15:03 -0700392 switch (type) {
Brian Salomonae64c192019-02-05 09:41:37 -0500393 case GrGpuBufferType::kVertex:
cdalton397536c2016-03-25 12:15:03 -0700394 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
395 kStatic_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500396 buff = GrVkVertexBuffer::Make(this, size, kDynamic_GrAccessPattern == accessPattern);
egdaniele05bbbb2016-04-19 12:13:41 -0700397 break;
Brian Salomonae64c192019-02-05 09:41:37 -0500398 case GrGpuBufferType::kIndex:
cdalton397536c2016-03-25 12:15:03 -0700399 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
400 kStatic_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500401 buff = GrVkIndexBuffer::Make(this, size, kDynamic_GrAccessPattern == accessPattern);
egdaniele05bbbb2016-04-19 12:13:41 -0700402 break;
Brian Salomonae64c192019-02-05 09:41:37 -0500403 case GrGpuBufferType::kXferCpuToGpu:
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400404 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
405 kStream_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500406 buff = GrVkTransferBuffer::Make(this, size, GrVkBuffer::kCopyRead_Type);
egdaniele05bbbb2016-04-19 12:13:41 -0700407 break;
Brian Salomonae64c192019-02-05 09:41:37 -0500408 case GrGpuBufferType::kXferGpuToCpu:
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400409 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
410 kStream_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500411 buff = GrVkTransferBuffer::Make(this, size, GrVkBuffer::kCopyWrite_Type);
egdaniele05bbbb2016-04-19 12:13:41 -0700412 break;
cdalton397536c2016-03-25 12:15:03 -0700413 default:
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400414 SK_ABORT("Unknown buffer type.");
cdalton397536c2016-03-25 12:15:03 -0700415 return nullptr;
416 }
cdalton1bf3e712016-04-19 10:00:02 -0700417 if (data && buff) {
418 buff->updateData(data, size);
419 }
420 return buff;
Greg Daniel164a9f02016-02-22 09:56:40 -0500421}
422
Brian Salomona9b04b92018-06-01 15:04:28 -0400423bool GrVkGpu::onWritePixels(GrSurface* surface, int left, int top, int width, int height,
424 GrColorType srcColorType, const GrMipLevel texels[],
425 int mipLevelCount) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500426 GrVkTexture* vkTex = static_cast<GrVkTexture*>(surface->asTexture());
427 if (!vkTex) {
428 return false;
429 }
430
jvanverth900bd4a2016-04-29 13:53:12 -0700431 // Make sure we have at least the base level
Robert Phillips590533f2017-07-11 14:22:35 -0400432 if (!mipLevelCount || !texels[0].fPixels) {
jvanverth03509ea2016-03-02 13:19:47 -0800433 return false;
434 }
bsalomona1e6b3b2016-03-02 10:58:23 -0800435
Robert Phillips1f098982019-05-15 10:27:36 -0400436 SkASSERT(!GrVkFormatIsCompressed(vkTex->imageFormat()));
Greg Daniel164a9f02016-02-22 09:56:40 -0500437 bool success = false;
Robert Phillips92de6312017-05-23 07:43:48 -0400438 bool linearTiling = vkTex->isLinearTiled();
439 if (linearTiling) {
Robert Phillips590533f2017-07-11 14:22:35 -0400440 if (mipLevelCount > 1) {
Robert Phillips92de6312017-05-23 07:43:48 -0400441 SkDebugf("Can't upload mipmap data to linear tiled texture");
442 return false;
443 }
444 if (VK_IMAGE_LAYOUT_PREINITIALIZED != vkTex->currentLayout()) {
445 // Need to change the layout to general in order to perform a host write
446 vkTex->setImageLayout(this,
447 VK_IMAGE_LAYOUT_GENERAL,
448 VK_ACCESS_HOST_WRITE_BIT,
449 VK_PIPELINE_STAGE_HOST_BIT,
450 false);
451 this->submitCommandBuffer(kForce_SyncQueue);
452 }
Brian Salomona9b04b92018-06-01 15:04:28 -0400453 success = this->uploadTexDataLinear(vkTex, left, top, width, height, srcColorType,
Robert Phillips590533f2017-07-11 14:22:35 -0400454 texels[0].fPixels, texels[0].fRowBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500455 } else {
Greg Danielda86e282018-06-13 09:41:19 -0400456 SkASSERT(mipLevelCount <= vkTex->texturePriv().maxMipMapLevel() + 1);
Brian Salomona9b04b92018-06-01 15:04:28 -0400457 success = this->uploadTexDataOptimal(vkTex, left, top, width, height, srcColorType, texels,
458 mipLevelCount);
Greg Daniel164a9f02016-02-22 09:56:40 -0500459 }
egdaniel4583ec52016-06-27 12:57:00 -0700460
jvanverth900bd4a2016-04-29 13:53:12 -0700461 return success;
Greg Daniel164a9f02016-02-22 09:56:40 -0500462}
463
Brian Salomone05ba5a2019-04-08 11:59:07 -0400464bool GrVkGpu::onTransferPixelsTo(GrTexture* texture, int left, int top, int width, int height,
465 GrColorType bufferColorType, GrGpuBuffer* transferBuffer,
466 size_t bufferOffset, size_t rowBytes) {
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400467 // Vulkan only supports 4-byte aligned offsets
468 if (SkToBool(bufferOffset & 0x2)) {
469 return false;
470 }
471 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
472 if (!vkTex) {
473 return false;
474 }
Robert Phillips1f098982019-05-15 10:27:36 -0400475
476 // Can't transfer compressed data
477 SkASSERT(!GrVkFormatIsCompressed(vkTex->imageFormat()));
478
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400479 GrVkTransferBuffer* vkBuffer = static_cast<GrVkTransferBuffer*>(transferBuffer);
480 if (!vkBuffer) {
481 return false;
482 }
483
Greg Daniel660cc992017-06-26 14:55:05 -0400484 SkDEBUGCODE(
485 SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
486 SkIRect bounds = SkIRect::MakeWH(texture->width(), texture->height());
487 SkASSERT(bounds.contains(subRect));
488 )
Brian Salomonc320b152018-02-20 14:05:36 -0500489 int bpp = GrColorTypeBytesPerPixel(bufferColorType);
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400490 if (rowBytes == 0) {
Brian Salomonc320b152018-02-20 14:05:36 -0500491 rowBytes = bpp * width;
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400492 }
493
494 // Set up copy region
495 VkBufferImageCopy region;
496 memset(&region, 0, sizeof(VkBufferImageCopy));
497 region.bufferOffset = bufferOffset;
498 region.bufferRowLength = (uint32_t)(rowBytes/bpp);
499 region.bufferImageHeight = 0;
500 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
501 region.imageOffset = { left, top, 0 };
502 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
503
504 // Change layout of our target so it can be copied to
505 vkTex->setImageLayout(this,
506 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
507 VK_ACCESS_TRANSFER_WRITE_BIT,
508 VK_PIPELINE_STAGE_TRANSFER_BIT,
509 false);
510
511 // Copy the buffer to the image
512 fCurrentCmdBuffer->copyBufferToImage(this,
513 vkBuffer,
514 vkTex,
515 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
516 1,
517 &region);
518
Greg Daniel0fc4d2d2017-10-12 11:23:36 -0400519 vkTex->texturePriv().markMipMapsDirty();
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400520 return true;
521}
522
Brian Salomon26de56e2019-04-10 12:14:26 -0400523bool GrVkGpu::onTransferPixelsFrom(GrSurface* surface, int left, int top, int width, int height,
524 GrColorType bufferColorType, GrGpuBuffer* transferBuffer,
525 size_t offset) {
Brian Salomona585fe92019-04-09 14:57:00 -0400526 SkASSERT(surface);
527 SkASSERT(transferBuffer);
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400528 if (fProtectedContext == GrProtected::kYes) {
529 return false;
530 }
Brian Salomona585fe92019-04-09 14:57:00 -0400531
Brian Salomona585fe92019-04-09 14:57:00 -0400532 GrVkTransferBuffer* vkBuffer = static_cast<GrVkTransferBuffer*>(transferBuffer);
533
534 GrVkImage* srcImage;
535 if (GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(surface->asRenderTarget())) {
536 // Reading from render targets that wrap a secondary command buffer is not allowed since
537 // it would require us to know the VkImage, which we don't have, as well as need us to
538 // stop and start the VkRenderPass which we don't have access to.
539 if (rt->wrapsSecondaryCommandBuffer()) {
540 return false;
541 }
542 // resolve the render target if necessary
543 switch (rt->getResolveType()) {
544 case GrVkRenderTarget::kCantResolve_ResolveType:
545 return false;
546 case GrVkRenderTarget::kAutoResolves_ResolveType:
547 break;
548 case GrVkRenderTarget::kCanResolve_ResolveType:
549 this->resolveRenderTargetNoFlush(rt);
550 break;
551 default:
552 SK_ABORT("Unknown resolve type");
553 }
554 srcImage = rt;
555 } else {
556 srcImage = static_cast<GrVkTexture*>(surface->asTexture());
557 }
558
559 // Set up copy region
560 VkBufferImageCopy region;
561 memset(&region, 0, sizeof(VkBufferImageCopy));
562 region.bufferOffset = offset;
Brian Salomon26de56e2019-04-10 12:14:26 -0400563 region.bufferRowLength = width;
Brian Salomona585fe92019-04-09 14:57:00 -0400564 region.bufferImageHeight = 0;
565 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
566 region.imageOffset = { left, top, 0 };
567 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
568
569 srcImage->setImageLayout(this,
570 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
571 VK_ACCESS_TRANSFER_READ_BIT,
572 VK_PIPELINE_STAGE_TRANSFER_BIT,
573 false);
574
575 fCurrentCmdBuffer->copyImageToBuffer(this, srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
576 vkBuffer, 1, &region);
577
578 // Make sure the copy to buffer has finished.
579 vkBuffer->addMemoryBarrier(this,
580 VK_ACCESS_TRANSFER_WRITE_BIT,
581 VK_ACCESS_HOST_READ_BIT,
582 VK_PIPELINE_STAGE_TRANSFER_BIT,
583 VK_PIPELINE_STAGE_HOST_BIT,
584 false);
Brian Salomon26de56e2019-04-10 12:14:26 -0400585 return true;
Brian Salomona585fe92019-04-09 14:57:00 -0400586}
587
Brian Salomon1fabd512018-02-09 09:54:25 -0500588void GrVkGpu::resolveImage(GrSurface* dst, GrVkRenderTarget* src, const SkIRect& srcRect,
589 const SkIPoint& dstPoint) {
egdaniel4bcd62e2016-08-31 07:37:31 -0700590 SkASSERT(dst);
Chris Dalton6ce447a2019-06-23 18:07:38 -0600591 SkASSERT(src && src->numSamples() > 1 && src->msaaImage());
egdaniel4bcd62e2016-08-31 07:37:31 -0700592
egdaniel4bcd62e2016-08-31 07:37:31 -0700593 VkImageResolve resolveInfo;
Brian Salomon1fabd512018-02-09 09:54:25 -0500594 resolveInfo.srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
595 resolveInfo.srcOffset = {srcRect.fLeft, srcRect.fTop, 0};
596 resolveInfo.dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
597 resolveInfo.dstOffset = {dstPoint.fX, dstPoint.fY, 0};
598 resolveInfo.extent = {(uint32_t)srcRect.width(), (uint32_t)srcRect.height(), 1};
egdaniel4bcd62e2016-08-31 07:37:31 -0700599
Greg Danielbc26c392017-04-18 13:32:10 -0400600 GrVkImage* dstImage;
601 GrRenderTarget* dstRT = dst->asRenderTarget();
602 if (dstRT) {
603 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(dstRT);
Greg Danielbc26c392017-04-18 13:32:10 -0400604 dstImage = vkRT;
605 } else {
606 SkASSERT(dst->asTexture());
607 dstImage = static_cast<GrVkTexture*>(dst->asTexture());
608 }
609 dstImage->setImageLayout(this,
610 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
611 VK_ACCESS_TRANSFER_WRITE_BIT,
612 VK_PIPELINE_STAGE_TRANSFER_BIT,
613 false);
egdaniel4bcd62e2016-08-31 07:37:31 -0700614
615 src->msaaImage()->setImageLayout(this,
616 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
617 VK_ACCESS_TRANSFER_READ_BIT,
618 VK_PIPELINE_STAGE_TRANSFER_BIT,
619 false);
620
Greg Danielbc26c392017-04-18 13:32:10 -0400621 fCurrentCmdBuffer->resolveImage(this, *src->msaaImage(), *dstImage, 1, &resolveInfo);
egdaniel4bcd62e2016-08-31 07:37:31 -0700622}
623
Brian Salomon1fabd512018-02-09 09:54:25 -0500624void GrVkGpu::internalResolveRenderTarget(GrRenderTarget* target, bool requiresSubmit) {
egdaniel66933552016-08-24 07:22:19 -0700625 if (target->needsResolve()) {
Chris Dalton6ce447a2019-06-23 18:07:38 -0600626 SkASSERT(target->numSamples() > 1);
egdaniel52ad2512016-08-04 12:50:01 -0700627 GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(target);
628 SkASSERT(rt->msaaImage());
Greg Daniel69d49922017-02-23 09:44:02 -0500629
egdaniel4bcd62e2016-08-31 07:37:31 -0700630 const SkIRect& srcRect = rt->getResolveRect();
egdaniel52ad2512016-08-04 12:50:01 -0700631
Brian Salomon1fabd512018-02-09 09:54:25 -0500632 this->resolveImage(target, rt, srcRect, SkIPoint::Make(srcRect.fLeft, srcRect.fTop));
egdaniel52ad2512016-08-04 12:50:01 -0700633
634 rt->flagAsResolved();
Greg Daniel69d49922017-02-23 09:44:02 -0500635
636 if (requiresSubmit) {
637 this->submitCommandBuffer(kSkip_SyncQueue);
638 }
egdaniel52ad2512016-08-04 12:50:01 -0700639 }
640}
641
Brian Salomona9b04b92018-06-01 15:04:28 -0400642bool GrVkGpu::uploadTexDataLinear(GrVkTexture* tex, int left, int top, int width, int height,
643 GrColorType dataColorType, const void* data, size_t rowBytes) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500644 SkASSERT(data);
jvanverth900bd4a2016-04-29 13:53:12 -0700645 SkASSERT(tex->isLinearTiled());
Greg Daniel164a9f02016-02-22 09:56:40 -0500646
Jim Van Verth1676cb92019-01-15 13:24:45 -0500647 // If we're uploading compressed data then we should be using uploadCompressedTexData
648 SkASSERT(!GrPixelConfigIsCompressed(GrColorTypeToPixelConfig(dataColorType,
649 GrSRGBEncoded::kNo)));
650
Greg Daniel660cc992017-06-26 14:55:05 -0400651 SkDEBUGCODE(
652 SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
653 SkIRect bounds = SkIRect::MakeWH(tex->width(), tex->height());
654 SkASSERT(bounds.contains(subRect));
655 )
Brian Salomonc320b152018-02-20 14:05:36 -0500656 int bpp = GrColorTypeBytesPerPixel(dataColorType);
Greg Daniel164a9f02016-02-22 09:56:40 -0500657 size_t trimRowBytes = width * bpp;
Greg Daniel660cc992017-06-26 14:55:05 -0400658 if (!rowBytes) {
659 rowBytes = trimRowBytes;
660 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500661
jvanverth900bd4a2016-04-29 13:53:12 -0700662 SkASSERT(VK_IMAGE_LAYOUT_PREINITIALIZED == tex->currentLayout() ||
663 VK_IMAGE_LAYOUT_GENERAL == tex->currentLayout());
664 const VkImageSubresource subres = {
665 VK_IMAGE_ASPECT_COLOR_BIT,
666 0, // mipLevel
667 0, // arraySlice
668 };
669 VkSubresourceLayout layout;
Greg Daniel164a9f02016-02-22 09:56:40 -0500670
jvanverth900bd4a2016-04-29 13:53:12 -0700671 const GrVkInterface* interface = this->vkInterface();
Greg Daniel164a9f02016-02-22 09:56:40 -0500672
jvanverth900bd4a2016-04-29 13:53:12 -0700673 GR_VK_CALL(interface, GetImageSubresourceLayout(fDevice,
egdanielb2df0c22016-05-13 11:30:37 -0700674 tex->image(),
jvanverth900bd4a2016-04-29 13:53:12 -0700675 &subres,
676 &layout));
Greg Daniel164a9f02016-02-22 09:56:40 -0500677
jvanverth1e305ba2016-06-01 09:39:15 -0700678 const GrVkAlloc& alloc = tex->alloc();
Brian Salomona9b04b92018-06-01 15:04:28 -0400679 VkDeviceSize offset = top * layout.rowPitch + left * bpp;
jvanverth900bd4a2016-04-29 13:53:12 -0700680 VkDeviceSize size = height*layout.rowPitch;
Greg Daniel81df0412018-05-31 13:13:33 -0400681 SkASSERT(size + offset <= alloc.fSize);
682 void* mapPtr = GrVkMemory::MapAlloc(this, alloc);
683 if (!mapPtr) {
jvanverth900bd4a2016-04-29 13:53:12 -0700684 return false;
685 }
Greg Daniel81df0412018-05-31 13:13:33 -0400686 mapPtr = reinterpret_cast<char*>(mapPtr) + offset;
jvanverth900bd4a2016-04-29 13:53:12 -0700687
Brian Salomona9b04b92018-06-01 15:04:28 -0400688 SkRectMemcpy(mapPtr, static_cast<size_t>(layout.rowPitch), data, rowBytes, trimRowBytes,
689 height);
jvanverth900bd4a2016-04-29 13:53:12 -0700690
Greg Daniele35a99e2018-03-02 11:44:22 -0500691 GrVkMemory::FlushMappedAlloc(this, alloc, offset, size);
Greg Daniel81df0412018-05-31 13:13:33 -0400692 GrVkMemory::UnmapAlloc(this, alloc);
jvanverth900bd4a2016-04-29 13:53:12 -0700693
694 return true;
695}
696
Brian Salomona9b04b92018-06-01 15:04:28 -0400697bool GrVkGpu::uploadTexDataOptimal(GrVkTexture* tex, int left, int top, int width, int height,
698 GrColorType dataColorType, const GrMipLevel texels[],
699 int mipLevelCount) {
jvanverth900bd4a2016-04-29 13:53:12 -0700700 SkASSERT(!tex->isLinearTiled());
701 // The assumption is either that we have no mipmaps, or that our rect is the entire texture
Robert Phillips590533f2017-07-11 14:22:35 -0400702 SkASSERT(1 == mipLevelCount ||
jvanverth900bd4a2016-04-29 13:53:12 -0700703 (0 == left && 0 == top && width == tex->width() && height == tex->height()));
704
Greg Danieldd20e912017-04-07 14:42:23 -0400705 // We assume that if the texture has mip levels, we either upload to all the levels or just the
706 // first.
Robert Phillips590533f2017-07-11 14:22:35 -0400707 SkASSERT(1 == mipLevelCount || mipLevelCount == (tex->texturePriv().maxMipMapLevel() + 1));
Greg Danieldd20e912017-04-07 14:42:23 -0400708
Jim Van Verth1676cb92019-01-15 13:24:45 -0500709 // If we're uploading compressed data then we should be using uploadCompressedTexData
710 SkASSERT(!GrPixelConfigIsCompressed(GrColorTypeToPixelConfig(dataColorType,
711 GrSRGBEncoded::kNo)));
712
jvanverth900bd4a2016-04-29 13:53:12 -0700713 if (width == 0 || height == 0) {
714 return false;
715 }
716
Greg Daniel475eb702018-09-28 14:16:50 -0400717 if (GrPixelConfigToColorType(tex->config()) != dataColorType) {
718 return false;
719 }
720
721 // For RGB_888x src data we are uploading it first to an RGBA texture and then copying it to the
722 // dst RGB texture. Thus we do not upload mip levels for that.
Greg Danielf259b8b2019-02-14 09:03:43 -0500723 if (dataColorType == GrColorType::kRGB_888x && tex->imageFormat() == VK_FORMAT_R8G8B8_UNORM) {
724 SkASSERT(tex->config() == kRGB_888_GrPixelConfig);
Greg Daniel475eb702018-09-28 14:16:50 -0400725 // First check that we'll be able to do the copy to the to the R8G8B8 image in the end via a
726 // blit or draw.
Greg Danielcaa795f2019-05-14 11:54:25 -0400727 if (!this->vkCaps().formatCanBeDstofBlit(VK_FORMAT_R8G8B8_UNORM, tex->isLinearTiled()) &&
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400728 !this->vkCaps().maxRenderTargetSampleCount(VK_FORMAT_R8G8B8_UNORM)) {
Greg Daniel475eb702018-09-28 14:16:50 -0400729 return false;
730 }
731 mipLevelCount = 1;
732 }
733
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400734 SkASSERT(this->vkCaps().isFormatTexturable(tex->imageFormat()));
Brian Salomonc320b152018-02-20 14:05:36 -0500735 int bpp = GrColorTypeBytesPerPixel(dataColorType);
jvanverth900bd4a2016-04-29 13:53:12 -0700736
737 // texels is const.
jvanverthc578b0632016-05-02 10:58:12 -0700738 // But we may need to adjust the fPixels ptr based on the copyRect, or fRowBytes.
739 // Because of this we need to make a non-const shallow copy of texels.
Robert Phillips0f992772017-07-12 08:24:56 -0400740 SkAutoTMalloc<GrMipLevel> texelsShallowCopy;
741
Greg Daniel475eb702018-09-28 14:16:50 -0400742 texelsShallowCopy.reset(mipLevelCount);
743 memcpy(texelsShallowCopy.get(), texels, mipLevelCount*sizeof(GrMipLevel));
jvanverth900bd4a2016-04-29 13:53:12 -0700744
Robert Phillips590533f2017-07-11 14:22:35 -0400745 SkTArray<size_t> individualMipOffsets(mipLevelCount);
jvanverthc578b0632016-05-02 10:58:12 -0700746 individualMipOffsets.push_back(0);
747 size_t combinedBufferSize = width * bpp * height;
748 int currentWidth = width;
749 int currentHeight = height;
Greg Daniel475eb702018-09-28 14:16:50 -0400750 if (!texelsShallowCopy[0].fPixels) {
Greg Daniel55afd6d2017-09-29 09:32:44 -0400751 combinedBufferSize = 0;
752 }
753
Greg Daniel468fd632017-03-22 17:03:45 -0400754 // The alignment must be at least 4 bytes and a multiple of the bytes per pixel of the image
755 // config. This works with the assumption that the bytes in pixel config is always a power of 2.
756 SkASSERT((bpp & (bpp - 1)) == 0);
757 const size_t alignmentMask = 0x3 | (bpp - 1);
Robert Phillips590533f2017-07-11 14:22:35 -0400758 for (int currentMipLevel = 1; currentMipLevel < mipLevelCount; currentMipLevel++) {
jvanverthc578b0632016-05-02 10:58:12 -0700759 currentWidth = SkTMax(1, currentWidth/2);
760 currentHeight = SkTMax(1, currentHeight/2);
Greg Daniel660cc992017-06-26 14:55:05 -0400761
Greg Daniel55afd6d2017-09-29 09:32:44 -0400762 if (texelsShallowCopy[currentMipLevel].fPixels) {
763 const size_t trimmedSize = currentWidth * bpp * currentHeight;
764 const size_t alignmentDiff = combinedBufferSize & alignmentMask;
765 if (alignmentDiff != 0) {
766 combinedBufferSize += alignmentMask - alignmentDiff + 1;
767 }
768 individualMipOffsets.push_back(combinedBufferSize);
769 combinedBufferSize += trimmedSize;
770 } else {
771 individualMipOffsets.push_back(0);
Greg Daniel468fd632017-03-22 17:03:45 -0400772 }
Greg Daniel55afd6d2017-09-29 09:32:44 -0400773 }
774 if (0 == combinedBufferSize) {
775 // We don't actually have any data to upload so just return success
776 return true;
jvanverth900bd4a2016-04-29 13:53:12 -0700777 }
778
779 // allocate buffer to hold our mip data
Brian Salomon12d22642019-01-29 14:38:50 -0500780 sk_sp<GrVkTransferBuffer> transferBuffer =
781 GrVkTransferBuffer::Make(this, combinedBufferSize, GrVkBuffer::kCopyRead_Type);
Greg Daniel475eb702018-09-28 14:16:50 -0400782 if (!transferBuffer) {
Forrest Reilingc04f8452017-04-26 19:26:12 -0700783 return false;
Greg Daniel6888c0d2017-08-25 11:55:50 -0400784 }
jvanverth900bd4a2016-04-29 13:53:12 -0700785
Greg Daniel475eb702018-09-28 14:16:50 -0400786 int uploadLeft = left;
787 int uploadTop = top;
788 GrVkTexture* uploadTexture = tex;
789 // For uploading RGB_888x data to an R8G8B8_UNORM texture we must first upload the data to an
790 // R8G8B8A8_UNORM image and then copy it.
791 sk_sp<GrVkTexture> copyTexture;
Greg Danielf259b8b2019-02-14 09:03:43 -0500792 if (dataColorType == GrColorType::kRGB_888x && tex->imageFormat() == VK_FORMAT_R8G8B8_UNORM) {
Greg Daniel46cfbc62019-06-07 11:43:30 -0400793 bool dstHasYcbcr = tex->ycbcrConversionInfo().isValid();
794 if (!this->vkCaps().canCopyAsBlit(tex->config(), 1, false, dstHasYcbcr,
795 kRGBA_8888_GrPixelConfig, 1, false, false)) {
796 return false;
797 }
Greg Daniel475eb702018-09-28 14:16:50 -0400798 GrSurfaceDesc surfDesc;
799 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
800 surfDesc.fWidth = width;
801 surfDesc.fHeight = height;
802 surfDesc.fConfig = kRGBA_8888_GrPixelConfig;
803 surfDesc.fSampleCnt = 1;
804
805 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
806 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
807 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
808
809 GrVkImage::ImageDesc imageDesc;
810 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
811 imageDesc.fFormat = VK_FORMAT_R8G8B8A8_UNORM;
812 imageDesc.fWidth = width;
813 imageDesc.fHeight = height;
814 imageDesc.fLevels = 1;
815 imageDesc.fSamples = 1;
816 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
817 imageDesc.fUsageFlags = usageFlags;
818 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
819
820 copyTexture = GrVkTexture::MakeNewTexture(this, SkBudgeted::kYes, surfDesc, imageDesc,
821 GrMipMapsStatus::kNotAllocated);
822 if (!copyTexture) {
823 return false;
824 }
Greg Daniel5c7b5412019-05-10 11:39:55 -0400825
Greg Daniel475eb702018-09-28 14:16:50 -0400826 uploadTexture = copyTexture.get();
827 uploadLeft = 0;
828 uploadTop = 0;
829 }
830
jvanverth900bd4a2016-04-29 13:53:12 -0700831 char* buffer = (char*) transferBuffer->map();
Robert Phillips590533f2017-07-11 14:22:35 -0400832 SkTArray<VkBufferImageCopy> regions(mipLevelCount);
jvanverth900bd4a2016-04-29 13:53:12 -0700833
jvanverthc578b0632016-05-02 10:58:12 -0700834 currentWidth = width;
835 currentHeight = height;
Greg Daniel475eb702018-09-28 14:16:50 -0400836 int layerHeight = uploadTexture->height();
Robert Phillips590533f2017-07-11 14:22:35 -0400837 for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; currentMipLevel++) {
Greg Daniel55afd6d2017-09-29 09:32:44 -0400838 if (texelsShallowCopy[currentMipLevel].fPixels) {
839 SkASSERT(1 == mipLevelCount || currentHeight == layerHeight);
840 const size_t trimRowBytes = currentWidth * bpp;
841 const size_t rowBytes = texelsShallowCopy[currentMipLevel].fRowBytes
842 ? texelsShallowCopy[currentMipLevel].fRowBytes
843 : trimRowBytes;
jvanverth900bd4a2016-04-29 13:53:12 -0700844
Greg Daniel55afd6d2017-09-29 09:32:44 -0400845 // copy data into the buffer, skipping the trailing bytes
846 char* dst = buffer + individualMipOffsets[currentMipLevel];
847 const char* src = (const char*)texelsShallowCopy[currentMipLevel].fPixels;
Brian Salomona9b04b92018-06-01 15:04:28 -0400848 SkRectMemcpy(dst, trimRowBytes, src, rowBytes, trimRowBytes, currentHeight);
Greg Daniel55afd6d2017-09-29 09:32:44 -0400849
850 VkBufferImageCopy& region = regions.push_back();
851 memset(&region, 0, sizeof(VkBufferImageCopy));
852 region.bufferOffset = transferBuffer->offset() + individualMipOffsets[currentMipLevel];
853 region.bufferRowLength = currentWidth;
854 region.bufferImageHeight = currentHeight;
855 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, SkToU32(currentMipLevel), 0, 1 };
Greg Daniel475eb702018-09-28 14:16:50 -0400856 region.imageOffset = {uploadLeft, uploadTop, 0};
Greg Daniel55afd6d2017-09-29 09:32:44 -0400857 region.imageExtent = { (uint32_t)currentWidth, (uint32_t)currentHeight, 1 };
jvanverth900bd4a2016-04-29 13:53:12 -0700858 }
jvanverthc578b0632016-05-02 10:58:12 -0700859 currentWidth = SkTMax(1, currentWidth/2);
860 currentHeight = SkTMax(1, currentHeight/2);
Greg Daniela1b282b2017-03-28 14:56:46 -0400861 layerHeight = currentHeight;
jvanverth900bd4a2016-04-29 13:53:12 -0700862 }
863
jvanverth9d54afc2016-09-20 09:20:03 -0700864 // no need to flush non-coherent memory, unmap will do that for us
jvanverth900bd4a2016-04-29 13:53:12 -0700865 transferBuffer->unmap();
866
jvanverth900bd4a2016-04-29 13:53:12 -0700867 // Change layout of our target so it can be copied to
Greg Daniel475eb702018-09-28 14:16:50 -0400868 uploadTexture->setImageLayout(this,
869 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
870 VK_ACCESS_TRANSFER_WRITE_BIT,
871 VK_PIPELINE_STAGE_TRANSFER_BIT,
872 false);
jvanverth900bd4a2016-04-29 13:53:12 -0700873
874 // Copy the buffer to the image
875 fCurrentCmdBuffer->copyBufferToImage(this,
Brian Salomon12d22642019-01-29 14:38:50 -0500876 transferBuffer.get(),
Greg Daniel475eb702018-09-28 14:16:50 -0400877 uploadTexture,
jvanverth900bd4a2016-04-29 13:53:12 -0700878 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
879 regions.count(),
880 regions.begin());
Greg Daniel475eb702018-09-28 14:16:50 -0400881
882 // If we copied the data into a temporary image first, copy that image into our main texture
883 // now.
884 if (copyTexture.get()) {
885 SkASSERT(dataColorType == GrColorType::kRGB_888x);
Greg Daniel46cfbc62019-06-07 11:43:30 -0400886 SkAssertResult(this->copySurface(tex, copyTexture.get(), SkIRect::MakeWH(width, height),
887 SkIPoint::Make(left, top), false));
Greg Daniel475eb702018-09-28 14:16:50 -0400888 }
Robert Phillips590533f2017-07-11 14:22:35 -0400889 if (1 == mipLevelCount) {
Greg Daniel0fc4d2d2017-10-12 11:23:36 -0400890 tex->texturePriv().markMipMapsDirty();
Greg Danieldd20e912017-04-07 14:42:23 -0400891 }
jvanverth900bd4a2016-04-29 13:53:12 -0700892
Greg Daniel164a9f02016-02-22 09:56:40 -0500893 return true;
894}
895
Jim Van Verth1676cb92019-01-15 13:24:45 -0500896// It's probably possible to roll this into uploadTexDataOptimal,
897// but for now it's easier to maintain as a separate entity.
898bool GrVkGpu::uploadTexDataCompressed(GrVkTexture* tex, int left, int top, int width, int height,
Brian Salomonc0519232019-06-26 20:55:39 -0400899 SkImage::CompressionType compressionType, const void* data) {
900 SkASSERT(data);
Jim Van Verth1676cb92019-01-15 13:24:45 -0500901 SkASSERT(!tex->isLinearTiled());
902 // For now the assumption is that our rect is the entire texture.
903 // Compressed textures are read-only so this should be a reasonable assumption.
904 SkASSERT(0 == left && 0 == top && width == tex->width() && height == tex->height());
905
Jim Van Verth1676cb92019-01-15 13:24:45 -0500906 if (width == 0 || height == 0) {
907 return false;
908 }
909
Brian Salomonc0519232019-06-26 20:55:39 -0400910 SkImage::CompressionType textureCompressionType;
911 if (!GrVkFormatToCompressionType(tex->imageFormat(), &textureCompressionType) ||
912 textureCompressionType != compressionType) {
Jim Van Verth1676cb92019-01-15 13:24:45 -0500913 return false;
914 }
915
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400916 SkASSERT(this->vkCaps().isFormatTexturable(tex->imageFormat()));
Jim Van Verth1676cb92019-01-15 13:24:45 -0500917
Brian Salomonc0519232019-06-26 20:55:39 -0400918 size_t dataSize = GrCompressedDataSize(compressionType, width, height);
Jim Van Verth1676cb92019-01-15 13:24:45 -0500919
920 // allocate buffer to hold our mip data
Brian Salomon12d22642019-01-29 14:38:50 -0500921 sk_sp<GrVkTransferBuffer> transferBuffer =
Brian Salomonc0519232019-06-26 20:55:39 -0400922 GrVkTransferBuffer::Make(this, dataSize, GrVkBuffer::kCopyRead_Type);
Jim Van Verth1676cb92019-01-15 13:24:45 -0500923 if (!transferBuffer) {
924 return false;
925 }
926
927 int uploadLeft = left;
928 int uploadTop = top;
929 GrVkTexture* uploadTexture = tex;
930
931 char* buffer = (char*)transferBuffer->map();
Jim Van Verth1676cb92019-01-15 13:24:45 -0500932
Brian Salomonc0519232019-06-26 20:55:39 -0400933 memcpy(buffer, data, dataSize);
Jim Van Verth1676cb92019-01-15 13:24:45 -0500934
Brian Salomonc0519232019-06-26 20:55:39 -0400935 VkBufferImageCopy region;
936 memset(&region, 0, sizeof(VkBufferImageCopy));
937 region.bufferOffset = transferBuffer->offset();
938 region.bufferRowLength = width;
939 region.bufferImageHeight = height;
940 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
941 region.imageOffset = { uploadLeft, uploadTop, 0 };
942 region.imageExtent = { SkToU32(width), SkToU32(height), 1 };
Jim Van Verth1676cb92019-01-15 13:24:45 -0500943
944 // no need to flush non-coherent memory, unmap will do that for us
945 transferBuffer->unmap();
946
947 // Change layout of our target so it can be copied to
948 uploadTexture->setImageLayout(this,
949 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
950 VK_ACCESS_TRANSFER_WRITE_BIT,
951 VK_PIPELINE_STAGE_TRANSFER_BIT,
952 false);
953
954 // Copy the buffer to the image
955 fCurrentCmdBuffer->copyBufferToImage(this,
Brian Salomon12d22642019-01-29 14:38:50 -0500956 transferBuffer.get(),
Jim Van Verth1676cb92019-01-15 13:24:45 -0500957 uploadTexture,
958 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
Brian Salomonc0519232019-06-26 20:55:39 -0400959 1,
960 &region);
Jim Van Verth1676cb92019-01-15 13:24:45 -0500961
962 return true;
963}
964
Greg Daniel164a9f02016-02-22 09:56:40 -0500965////////////////////////////////////////////////////////////////////////////////
Robert Phillips67d52cf2017-06-05 13:38:13 -0400966sk_sp<GrTexture> GrVkGpu::onCreateTexture(const GrSurfaceDesc& desc, SkBudgeted budgeted,
Brian Salomon58389b92018-03-07 13:01:25 -0500967 const GrMipLevel texels[], int mipLevelCount) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500968 bool renderTarget = SkToBool(desc.fFlags & kRenderTarget_GrSurfaceFlag);
969
970 VkFormat pixelFormat;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500971 SkAssertResult(GrPixelConfigToVkFormat(desc.fConfig, &pixelFormat));
egdaniel0a3a7f72016-06-24 09:22:31 -0700972
Greg Daniel164a9f02016-02-22 09:56:40 -0500973 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
974 if (renderTarget) {
975 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
976 }
977
978 // For now we will set the VK_IMAGE_USAGE_TRANSFER_DESTINATION_BIT and
979 // VK_IMAGE_USAGE_TRANSFER_SOURCE_BIT on every texture since we do not know whether or not we
980 // will be using this texture in some copy or not. Also this assumes, as is the current case,
jvanverth62340062016-04-26 08:01:44 -0700981 // that all render targets in vulkan are also textures. If we change this practice of setting
Greg Daniel164a9f02016-02-22 09:56:40 -0500982 // both bits, we must make sure to set the destination bit if we are uploading srcData to the
983 // texture.
984 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
985
Greg Daniel164a9f02016-02-22 09:56:40 -0500986 // This ImageDesc refers to the texture that will be read by the client. Thus even if msaa is
jvanverth62340062016-04-26 08:01:44 -0700987 // requested, this ImageDesc describes the resolved texture. Therefore we always have samples set
Greg Daniel164a9f02016-02-22 09:56:40 -0500988 // to 1.
Robert Phillips590533f2017-07-11 14:22:35 -0400989 int mipLevels = !mipLevelCount ? 1 : mipLevelCount;
Greg Daniel164a9f02016-02-22 09:56:40 -0500990 GrVkImage::ImageDesc imageDesc;
991 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
992 imageDesc.fFormat = pixelFormat;
993 imageDesc.fWidth = desc.fWidth;
994 imageDesc.fHeight = desc.fHeight;
Brian Salomon7128fdd2017-05-22 14:00:07 -0400995 imageDesc.fLevels = mipLevels;
Greg Daniel164a9f02016-02-22 09:56:40 -0500996 imageDesc.fSamples = 1;
Brian Salomon7128fdd2017-05-22 14:00:07 -0400997 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
Greg Daniel164a9f02016-02-22 09:56:40 -0500998 imageDesc.fUsageFlags = usageFlags;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400999 imageDesc.fIsProtected = desc.fIsProtected;
Greg Daniel164a9f02016-02-22 09:56:40 -05001000
Greg Daniel0fc4d2d2017-10-12 11:23:36 -04001001 GrMipMapsStatus mipMapsStatus = GrMipMapsStatus::kNotAllocated;
1002 if (mipLevels > 1) {
1003 mipMapsStatus = GrMipMapsStatus::kValid;
1004 for (int i = 0; i < mipLevels; ++i) {
1005 if (!texels[i].fPixels) {
1006 mipMapsStatus = GrMipMapsStatus::kDirty;
1007 break;
1008 }
Greg Daniel834f1202017-10-09 15:06:20 -04001009 }
1010 }
1011
Robert Phillips67d52cf2017-06-05 13:38:13 -04001012 sk_sp<GrVkTexture> tex;
Greg Daniel164a9f02016-02-22 09:56:40 -05001013 if (renderTarget) {
Greg Daniel475eb702018-09-28 14:16:50 -04001014 tex = GrVkTextureRenderTarget::MakeNewTextureRenderTarget(this, budgeted, desc,
1015 imageDesc,
1016 mipMapsStatus);
Greg Daniel164a9f02016-02-22 09:56:40 -05001017 } else {
Greg Daniel475eb702018-09-28 14:16:50 -04001018 tex = GrVkTexture::MakeNewTexture(this, budgeted, desc, imageDesc, mipMapsStatus);
Greg Daniel164a9f02016-02-22 09:56:40 -05001019 }
1020
1021 if (!tex) {
1022 return nullptr;
1023 }
1024
Brian Salomonc320b152018-02-20 14:05:36 -05001025 auto colorType = GrPixelConfigToColorType(desc.fConfig);
Robert Phillips590533f2017-07-11 14:22:35 -04001026 if (mipLevelCount) {
Brian Salomonc0519232019-06-26 20:55:39 -04001027 if (!this->uploadTexDataOptimal(tex.get(), 0, 0, desc.fWidth, desc.fHeight, colorType,
1028 texels, mipLevelCount)) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001029 tex->unref();
1030 return nullptr;
1031 }
1032 }
1033
Brian Salomonc0519232019-06-26 20:55:39 -04001034 if (SkToBool(desc.fFlags & kPerformInitialClear_GrSurfaceFlag)) {
Brian Salomond17b4a62017-05-23 16:53:47 -04001035 VkClearColorValue zeroClearColor;
1036 memset(&zeroClearColor, 0, sizeof(zeroClearColor));
1037 VkImageSubresourceRange range;
1038 range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1039 range.baseArrayLayer = 0;
1040 range.baseMipLevel = 0;
1041 range.layerCount = 1;
1042 range.levelCount = 1;
1043 tex->setImageLayout(this, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1044 VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, false);
Robert Phillips67d52cf2017-06-05 13:38:13 -04001045 this->currentCommandBuffer()->clearColorImage(this, tex.get(), &zeroClearColor, 1, &range);
Brian Salomond17b4a62017-05-23 16:53:47 -04001046 }
Ben Wagnerff134f22018-04-24 16:29:16 -04001047 return std::move(tex);
Greg Daniel164a9f02016-02-22 09:56:40 -05001048}
1049
Brian Salomonc0519232019-06-26 20:55:39 -04001050sk_sp<GrTexture> GrVkGpu::onCreateCompressedTexture(int width, int height,
1051 SkImage::CompressionType compressionType,
1052 SkBudgeted budgeted, const void* data) {
1053 GrBackendFormat format = this->caps()->getBackendFormatFromCompressionType(compressionType);
1054 if (!format.getVkFormat()) {
1055 return nullptr;
1056 }
1057 VkFormat pixelFormat = *format.getVkFormat();
1058
1059 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
1060
1061 // For now we will set the VK_IMAGE_USAGE_TRANSFER_DESTINATION_BIT and
1062 // VK_IMAGE_USAGE_TRANSFER_SOURCE_BIT on every texture since we do not know whether or not we
1063 // will be using this texture in some copy or not. Also this assumes, as is the current case,
1064 // that all render targets in vulkan are also textures. If we change this practice of setting
1065 // both bits, we must make sure to set the destination bit if we are uploading srcData to the
1066 // texture.
1067 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1068
1069 // Compressed textures with MIP levels or multiple samples are not supported as of now.
1070 GrVkImage::ImageDesc imageDesc;
1071 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
1072 imageDesc.fFormat = pixelFormat;
1073 imageDesc.fWidth = width;
1074 imageDesc.fHeight = height;
1075 imageDesc.fLevels = 1;
1076 imageDesc.fSamples = 1;
1077 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
1078 imageDesc.fUsageFlags = usageFlags;
1079 imageDesc.fIsProtected = GrProtected::kNo;
1080
1081 GrSurfaceDesc desc;
1082 desc.fConfig = GrCompressionTypePixelConfig(compressionType);
1083 desc.fWidth = width;
1084 desc.fHeight = height;
1085 auto tex = GrVkTexture::MakeNewTexture(this, budgeted, desc, imageDesc,
1086 GrMipMapsStatus::kNotAllocated);
1087 if (!tex) {
1088 return nullptr;
1089 }
1090
1091 if (!this->uploadTexDataCompressed(tex.get(), 0, 0, desc.fWidth, desc.fHeight, compressionType,
1092 data)) {
1093 return nullptr;
1094 }
1095
1096 return std::move(tex);
1097}
1098
Greg Daniel164a9f02016-02-22 09:56:40 -05001099////////////////////////////////////////////////////////////////////////////////
1100
Greg Daniel6888c0d2017-08-25 11:55:50 -04001101void GrVkGpu::copyBuffer(GrVkBuffer* srcBuffer, GrVkBuffer* dstBuffer, VkDeviceSize srcOffset,
1102 VkDeviceSize dstOffset, VkDeviceSize size) {
1103 VkBufferCopy copyRegion;
1104 copyRegion.srcOffset = srcOffset;
1105 copyRegion.dstOffset = dstOffset;
1106 copyRegion.size = size;
1107 fCurrentCmdBuffer->copyBuffer(this, srcBuffer, dstBuffer, 1, &copyRegion);
1108}
1109
jvanverthdb379092016-07-07 11:18:46 -07001110bool GrVkGpu::updateBuffer(GrVkBuffer* buffer, const void* src,
1111 VkDeviceSize offset, VkDeviceSize size) {
jvanvertha584de92016-06-30 09:10:52 -07001112 // Update the buffer
jvanverthdb379092016-07-07 11:18:46 -07001113 fCurrentCmdBuffer->updateBuffer(this, buffer, offset, size, src);
jvanvertha584de92016-06-30 09:10:52 -07001114
1115 return true;
1116}
1117
1118////////////////////////////////////////////////////////////////////////////////
1119
Greg Daniel7e000222018-12-03 10:08:21 -05001120static bool check_image_info(const GrVkCaps& caps,
1121 const GrVkImageInfo& info,
Greg Danielcb324152019-02-25 11:36:53 -05001122 GrPixelConfig config,
1123 bool isWrappedRT) {
1124 if (VK_NULL_HANDLE == info.fImage) {
1125 return false;
1126 }
1127
1128 if (VK_NULL_HANDLE == info.fAlloc.fMemory && !isWrappedRT) {
Brian Salomond17f6582017-07-19 18:28:58 -04001129 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -05001130 }
1131
Greg Daniel7e000222018-12-03 10:08:21 -05001132 if (info.fYcbcrConversionInfo.isValid()) {
1133 if (!caps.supportsYcbcrConversion() || info.fFormat != VK_NULL_HANDLE) {
1134 return false;
1135 }
jvanverthfd359ca2016-03-18 11:57:24 -07001136 }
Greg Daniel7ef28f32017-04-20 16:41:55 +00001137
Greg Danielcb324152019-02-25 11:36:53 -05001138 if (info.fImageLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR && !caps.supportsSwapchain()) {
1139 return false;
1140 }
1141
Greg Daniel52e16d92018-04-10 09:34:07 -04001142 SkASSERT(GrVkFormatPixelConfigPairIsValid(info.fFormat, config));
Brian Salomond17f6582017-07-19 18:28:58 -04001143 return true;
1144}
1145
Greg Danielcaa795f2019-05-14 11:54:25 -04001146static bool check_tex_image_info(const GrVkCaps& caps, const GrVkImageInfo& info) {
1147 if (info.fImageTiling == VK_IMAGE_TILING_OPTIMAL) {
Robert Phillips39ef2ef2019-05-15 08:45:53 -04001148 if (!caps.isFormatTexturable(info.fFormat)) {
Greg Danielcaa795f2019-05-14 11:54:25 -04001149 return false;
1150 }
1151 } else {
1152 SkASSERT(info.fImageTiling == VK_IMAGE_TILING_LINEAR);
Robert Phillips39ef2ef2019-05-15 08:45:53 -04001153 if (!caps.isFormatTexturableLinearly(info.fFormat)) {
Greg Danielcaa795f2019-05-14 11:54:25 -04001154 return false;
1155 }
1156 }
1157 return true;
1158}
1159
1160static bool check_rt_image_info(const GrVkCaps& caps, const GrVkImageInfo& info) {
1161 if (!caps.maxRenderTargetSampleCount(info.fFormat)) {
1162 return false;
1163 }
1164 return true;
1165}
1166
Brian Salomond17f6582017-07-19 18:28:58 -04001167sk_sp<GrTexture> GrVkGpu::onWrapBackendTexture(const GrBackendTexture& backendTex,
Brian Salomonfa2ebea2019-01-24 15:58:58 -05001168 GrWrapOwnership ownership, GrWrapCacheable cacheable,
1169 GrIOType ioType) {
Greg Daniel7e000222018-12-03 10:08:21 -05001170 GrVkImageInfo imageInfo;
1171 if (!backendTex.getVkImageInfo(&imageInfo)) {
1172 return nullptr;
1173 }
1174
Greg Danielcb324152019-02-25 11:36:53 -05001175 if (!check_image_info(this->vkCaps(), imageInfo, backendTex.config(), false)) {
Brian Salomond17f6582017-07-19 18:28:58 -04001176 return nullptr;
1177 }
Greg Danielcaa795f2019-05-14 11:54:25 -04001178 if (!check_tex_image_info(this->vkCaps(), imageInfo)) {
1179 return nullptr;
1180 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001181
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001182 if (backendTex.isProtected() && (fProtectedContext == GrProtected::kNo)) {
1183 return nullptr;
1184 }
1185
Greg Daniel164a9f02016-02-22 09:56:40 -05001186 GrSurfaceDesc surfDesc;
Brian Salomond17f6582017-07-19 18:28:58 -04001187 surfDesc.fFlags = kNone_GrSurfaceFlags;
Greg Daniel7ef28f32017-04-20 16:41:55 +00001188 surfDesc.fWidth = backendTex.width();
1189 surfDesc.fHeight = backendTex.height();
1190 surfDesc.fConfig = backendTex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001191 surfDesc.fSampleCnt = 1;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001192 surfDesc.fIsProtected = backendTex.isProtected() ? GrProtected::kYes : GrProtected::kNo;
Greg Daniel164a9f02016-02-22 09:56:40 -05001193
Greg Daniel52e16d92018-04-10 09:34:07 -04001194 sk_sp<GrVkImageLayout> layout = backendTex.getGrVkImageLayout();
1195 SkASSERT(layout);
Brian Salomonfa2ebea2019-01-24 15:58:58 -05001196 return GrVkTexture::MakeWrappedTexture(this, surfDesc, ownership, cacheable, ioType, imageInfo,
1197 std::move(layout));
Brian Salomond17f6582017-07-19 18:28:58 -04001198}
1199
1200sk_sp<GrTexture> GrVkGpu::onWrapRenderableBackendTexture(const GrBackendTexture& backendTex,
Brian Salomond17f6582017-07-19 18:28:58 -04001201 int sampleCnt,
Brian Salomonaa6ca0a2019-01-24 16:03:07 -05001202 GrWrapOwnership ownership,
1203 GrWrapCacheable cacheable) {
Greg Daniel7e000222018-12-03 10:08:21 -05001204 GrVkImageInfo imageInfo;
1205 if (!backendTex.getVkImageInfo(&imageInfo)) {
1206 return nullptr;
1207 }
1208
Greg Danielcb324152019-02-25 11:36:53 -05001209 if (!check_image_info(this->vkCaps(), imageInfo, backendTex.config(), false)) {
Brian Salomond17f6582017-07-19 18:28:58 -04001210 return nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -05001211 }
Greg Danielcaa795f2019-05-14 11:54:25 -04001212 if (!check_tex_image_info(this->vkCaps(), imageInfo)) {
1213 return nullptr;
1214 }
1215 if (!check_rt_image_info(this->vkCaps(), imageInfo)) {
1216 return nullptr;
1217 }
Brian Salomond17f6582017-07-19 18:28:58 -04001218
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001219 if (backendTex.isProtected() && (fProtectedContext == GrProtected::kNo)) {
1220 return nullptr;
1221 }
1222
Brian Salomond17f6582017-07-19 18:28:58 -04001223 GrSurfaceDesc surfDesc;
1224 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
1225 surfDesc.fWidth = backendTex.width();
1226 surfDesc.fHeight = backendTex.height();
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001227 surfDesc.fIsProtected = backendTex.isProtected() ? GrProtected::kYes : GrProtected::kNo;
Brian Salomond17f6582017-07-19 18:28:58 -04001228 surfDesc.fConfig = backendTex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001229 surfDesc.fSampleCnt = this->caps()->getRenderTargetSampleCount(sampleCnt, backendTex.config());
Brian Salomond17f6582017-07-19 18:28:58 -04001230
Greg Daniel52e16d92018-04-10 09:34:07 -04001231 sk_sp<GrVkImageLayout> layout = backendTex.getGrVkImageLayout();
1232 SkASSERT(layout);
1233
Brian Salomonaa6ca0a2019-01-24 16:03:07 -05001234 return GrVkTextureRenderTarget::MakeWrappedTextureRenderTarget(
1235 this, surfDesc, ownership, cacheable, imageInfo, std::move(layout));
Greg Daniel164a9f02016-02-22 09:56:40 -05001236}
1237
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001238sk_sp<GrRenderTarget> GrVkGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& backendRT){
Greg Daniele79b4732017-04-20 14:07:46 -04001239 // Currently the Vulkan backend does not support wrapping of msaa render targets directly. In
1240 // general this is not an issue since swapchain images in vulkan are never multisampled. Thus if
1241 // you want a multisampled RT it is best to wrap the swapchain images and then let Skia handle
1242 // creating and owning the MSAA images.
Brian Salomonbdecacf2018-02-02 20:32:49 -05001243 if (backendRT.sampleCnt() > 1) {
Greg Daniele79b4732017-04-20 14:07:46 -04001244 return nullptr;
1245 }
halcanary9d524f22016-03-29 09:03:52 -07001246
Greg Daniel323fbcf2018-04-10 13:46:30 -04001247 GrVkImageInfo info;
1248 if (!backendRT.getVkImageInfo(&info)) {
Greg Danielbcf612b2017-05-01 13:50:58 +00001249 return nullptr;
1250 }
Greg Daniel323fbcf2018-04-10 13:46:30 -04001251
Greg Danielcb324152019-02-25 11:36:53 -05001252 if (!check_image_info(this->vkCaps(), info, backendRT.config(), true)) {
jvanverthfd359ca2016-03-18 11:57:24 -07001253 return nullptr;
1254 }
Greg Danielcaa795f2019-05-14 11:54:25 -04001255 if (!check_rt_image_info(this->vkCaps(), info)) {
1256 return nullptr;
1257 }
1258
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001259 if (backendRT.isProtected() && (fProtectedContext == GrProtected::kNo)) {
1260 return nullptr;
1261 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001262
Greg Daniel164a9f02016-02-22 09:56:40 -05001263 GrSurfaceDesc desc;
Brian Salomon0ec981b2017-05-15 13:48:50 -04001264 desc.fFlags = kRenderTarget_GrSurfaceFlag;
Robert Phillips16d8ec62017-07-27 16:16:25 -04001265 desc.fWidth = backendRT.width();
1266 desc.fHeight = backendRT.height();
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001267 desc.fIsProtected = backendRT.isProtected() ? GrProtected::kYes : GrProtected::kNo;
Robert Phillips16d8ec62017-07-27 16:16:25 -04001268 desc.fConfig = backendRT.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001269 desc.fSampleCnt = 1;
Greg Daniel164a9f02016-02-22 09:56:40 -05001270
Greg Daniel323fbcf2018-04-10 13:46:30 -04001271 sk_sp<GrVkImageLayout> layout = backendRT.getGrVkImageLayout();
Greg Daniel52e16d92018-04-10 09:34:07 -04001272
Greg Daniel323fbcf2018-04-10 13:46:30 -04001273 sk_sp<GrVkRenderTarget> tgt = GrVkRenderTarget::MakeWrappedRenderTarget(this, desc, info,
Greg Daniel52e16d92018-04-10 09:34:07 -04001274 std::move(layout));
Brian Salomonafdc6b12018-03-09 12:02:32 -05001275
1276 // We don't allow the client to supply a premade stencil buffer. We always create one if needed.
1277 SkASSERT(!backendRT.stencilBits());
1278 if (tgt) {
1279 SkASSERT(tgt->canAttemptStencilAttachment());
Greg Daniel164a9f02016-02-22 09:56:40 -05001280 }
Brian Salomonafdc6b12018-03-09 12:02:32 -05001281
Ben Wagnerff134f22018-04-24 16:29:16 -04001282 return std::move(tgt);
Greg Daniel164a9f02016-02-22 09:56:40 -05001283}
1284
Greg Daniel7ef28f32017-04-20 16:41:55 +00001285sk_sp<GrRenderTarget> GrVkGpu::onWrapBackendTextureAsRenderTarget(const GrBackendTexture& tex,
Greg Daniel7ef28f32017-04-20 16:41:55 +00001286 int sampleCnt) {
Brian Osman33910292017-04-18 14:38:53 -04001287
Greg Daniel52e16d92018-04-10 09:34:07 -04001288 GrVkImageInfo imageInfo;
1289 if (!tex.getVkImageInfo(&imageInfo)) {
Greg Danielbcf612b2017-05-01 13:50:58 +00001290 return nullptr;
1291 }
Greg Danielcb324152019-02-25 11:36:53 -05001292 if (!check_image_info(this->vkCaps(), imageInfo, tex.config(), false)) {
Brian Osman33910292017-04-18 14:38:53 -04001293 return nullptr;
1294 }
Greg Danielcaa795f2019-05-14 11:54:25 -04001295 if (!check_rt_image_info(this->vkCaps(), imageInfo)) {
1296 return nullptr;
1297 }
Greg Danielcb324152019-02-25 11:36:53 -05001298
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001299 if (tex.isProtected() && (fProtectedContext == GrProtected::kNo)) {
1300 return nullptr;
1301 }
1302
Brian Osman33910292017-04-18 14:38:53 -04001303 GrSurfaceDesc desc;
Greg Daniel7ef28f32017-04-20 16:41:55 +00001304 desc.fFlags = kRenderTarget_GrSurfaceFlag;
Greg Daniel7ef28f32017-04-20 16:41:55 +00001305 desc.fWidth = tex.width();
1306 desc.fHeight = tex.height();
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001307 desc.fIsProtected = tex.isProtected() ? GrProtected::kYes : GrProtected::kNo;
Robert Phillips16d8ec62017-07-27 16:16:25 -04001308 desc.fConfig = tex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001309 desc.fSampleCnt = this->caps()->getRenderTargetSampleCount(sampleCnt, tex.config());
1310 if (!desc.fSampleCnt) {
1311 return nullptr;
1312 }
Brian Osman33910292017-04-18 14:38:53 -04001313
Greg Daniel52e16d92018-04-10 09:34:07 -04001314 sk_sp<GrVkImageLayout> layout = tex.getGrVkImageLayout();
1315 SkASSERT(layout);
1316
Ben Wagnerff134f22018-04-24 16:29:16 -04001317 return GrVkRenderTarget::MakeWrappedRenderTarget(this, desc, imageInfo, std::move(layout));
Brian Osman33910292017-04-18 14:38:53 -04001318}
1319
Greg Danielb46add82019-01-02 14:51:29 -05001320sk_sp<GrRenderTarget> GrVkGpu::onWrapVulkanSecondaryCBAsRenderTarget(
1321 const SkImageInfo& imageInfo, const GrVkDrawableInfo& vkInfo) {
1322 int maxSize = this->caps()->maxTextureSize();
1323 if (imageInfo.width() > maxSize || imageInfo.height() > maxSize) {
1324 return nullptr;
1325 }
1326
1327 GrBackendFormat backendFormat = GrBackendFormat::MakeVk(vkInfo.fFormat);
1328 if (!backendFormat.isValid()) {
1329 return nullptr;
1330 }
Robert Phillipsd8f79a22019-06-24 13:25:42 -04001331 int sampleCnt = this->caps()->getRenderTargetSampleCount(1, imageInfo.colorType(),
1332 backendFormat);
1333 if (!sampleCnt) {
1334 return nullptr;
1335 }
1336
Greg Danielb46add82019-01-02 14:51:29 -05001337 GrPixelConfig config = this->caps()->getConfigFromBackendFormat(backendFormat,
1338 imageInfo.colorType());
1339 if (config == kUnknown_GrPixelConfig) {
1340 return nullptr;
1341 }
1342
1343 GrSurfaceDesc desc;
1344 desc.fFlags = kRenderTarget_GrSurfaceFlag;
1345 desc.fWidth = imageInfo.width();
1346 desc.fHeight = imageInfo.height();
1347 desc.fConfig = config;
Robert Phillipsd8f79a22019-06-24 13:25:42 -04001348 desc.fSampleCnt = sampleCnt;
Greg Danielb46add82019-01-02 14:51:29 -05001349
1350 return GrVkRenderTarget::MakeSecondaryCBRenderTarget(this, desc, vkInfo);
1351}
1352
Brian Salomon930f9392018-06-20 16:25:26 -04001353bool GrVkGpu::onRegenerateMipMapLevels(GrTexture* tex) {
1354 auto* vkTex = static_cast<GrVkTexture*>(tex);
jvanverth900bd4a2016-04-29 13:53:12 -07001355 // don't do anything for linearly tiled textures (can't have mipmaps)
Brian Salomon930f9392018-06-20 16:25:26 -04001356 if (vkTex->isLinearTiled()) {
jvanverth900bd4a2016-04-29 13:53:12 -07001357 SkDebugf("Trying to create mipmap for linear tiled texture");
Brian Salomon930f9392018-06-20 16:25:26 -04001358 return false;
jvanverth62340062016-04-26 08:01:44 -07001359 }
1360
jvanverth62340062016-04-26 08:01:44 -07001361 // determine if we can blit to and from this format
1362 const GrVkCaps& caps = this->vkCaps();
Greg Danielcaa795f2019-05-14 11:54:25 -04001363 if (!caps.formatCanBeDstofBlit(vkTex->imageFormat(), false) ||
1364 !caps.formatCanBeSrcofBlit(vkTex->imageFormat(), false) ||
egdaniel2f5792a2016-07-06 08:51:23 -07001365 !caps.mipMapSupport()) {
Brian Salomon930f9392018-06-20 16:25:26 -04001366 return false;
jvanverth62340062016-04-26 08:01:44 -07001367 }
1368
egdaniel7ac5da82016-07-15 13:41:42 -07001369 int width = tex->width();
1370 int height = tex->height();
1371 VkImageBlit blitRegion;
1372 memset(&blitRegion, 0, sizeof(VkImageBlit));
jvanverth62340062016-04-26 08:01:44 -07001373
jvanverth82c05582016-05-03 11:19:01 -07001374 // SkMipMap doesn't include the base level in the level count so we have to add 1
1375 uint32_t levelCount = SkMipMap::ComputeLevelCount(tex->width(), tex->height()) + 1;
Brian Salomon930f9392018-06-20 16:25:26 -04001376 SkASSERT(levelCount == vkTex->mipLevels());
egdaniel7ac5da82016-07-15 13:41:42 -07001377
Greg Danielda86e282018-06-13 09:41:19 -04001378 // change layout of the layers so we can write to them.
Brian Salomon930f9392018-06-20 16:25:26 -04001379 vkTex->setImageLayout(this, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_ACCESS_TRANSFER_WRITE_BIT,
1380 VK_PIPELINE_STAGE_TRANSFER_BIT, false);
jvanverth62340062016-04-26 08:01:44 -07001381
jvanverth50c46c72016-05-06 12:31:28 -07001382 // setup memory barrier
Brian Salomon930f9392018-06-20 16:25:26 -04001383 SkASSERT(GrVkFormatIsSupported(vkTex->imageFormat()));
jvanverth50c46c72016-05-06 12:31:28 -07001384 VkImageMemoryBarrier imageMemoryBarrier = {
Brian Salomon930f9392018-06-20 16:25:26 -04001385 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
1386 nullptr, // pNext
1387 VK_ACCESS_TRANSFER_WRITE_BIT, // srcAccessMask
1388 VK_ACCESS_TRANSFER_READ_BIT, // dstAccessMask
1389 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // oldLayout
1390 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // newLayout
1391 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
1392 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
1393 vkTex->image(), // image
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001394 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1} // subresourceRange
jvanverth50c46c72016-05-06 12:31:28 -07001395 };
1396
jvanverth62340062016-04-26 08:01:44 -07001397 // Blit the miplevels
jvanverth82c05582016-05-03 11:19:01 -07001398 uint32_t mipLevel = 1;
1399 while (mipLevel < levelCount) {
1400 int prevWidth = width;
1401 int prevHeight = height;
1402 width = SkTMax(1, width / 2);
1403 height = SkTMax(1, height / 2);
jvanverth62340062016-04-26 08:01:44 -07001404
jvanverth50c46c72016-05-06 12:31:28 -07001405 imageMemoryBarrier.subresourceRange.baseMipLevel = mipLevel - 1;
Greg Daniel59dc1482019-02-22 10:46:38 -05001406 this->addImageMemoryBarrier(vkTex->resource(), VK_PIPELINE_STAGE_TRANSFER_BIT,
1407 VK_PIPELINE_STAGE_TRANSFER_BIT, false, &imageMemoryBarrier);
jvanverth50c46c72016-05-06 12:31:28 -07001408
1409 blitRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, mipLevel - 1, 0, 1 };
jvanverth82c05582016-05-03 11:19:01 -07001410 blitRegion.srcOffsets[0] = { 0, 0, 0 };
brianosmane9906e72016-06-08 12:44:27 -07001411 blitRegion.srcOffsets[1] = { prevWidth, prevHeight, 1 };
jvanverth82c05582016-05-03 11:19:01 -07001412 blitRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, mipLevel, 0, 1 };
1413 blitRegion.dstOffsets[0] = { 0, 0, 0 };
brianosmane9906e72016-06-08 12:44:27 -07001414 blitRegion.dstOffsets[1] = { width, height, 1 };
jvanverth62340062016-04-26 08:01:44 -07001415 fCurrentCmdBuffer->blitImage(this,
Brian Salomon930f9392018-06-20 16:25:26 -04001416 vkTex->resource(),
1417 vkTex->image(),
Greg Daniel31cc7312018-03-05 11:41:06 -05001418 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Brian Salomon930f9392018-06-20 16:25:26 -04001419 vkTex->resource(),
1420 vkTex->image(),
Greg Daniel31cc7312018-03-05 11:41:06 -05001421 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
jvanverth62340062016-04-26 08:01:44 -07001422 1,
1423 &blitRegion,
1424 VK_FILTER_LINEAR);
jvanverth82c05582016-05-03 11:19:01 -07001425 ++mipLevel;
jvanverth62340062016-04-26 08:01:44 -07001426 }
Greg Danielee54f232019-04-03 14:58:40 -04001427 if (levelCount > 1) {
1428 // This barrier logically is not needed, but it changes the final level to the same layout
1429 // as all the others, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL. This makes tracking of the
1430 // layouts and future layout changes easier. The alternative here would be to track layout
1431 // and memory accesses per layer which doesn't seem work it.
1432 imageMemoryBarrier.subresourceRange.baseMipLevel = mipLevel - 1;
1433 this->addImageMemoryBarrier(vkTex->resource(), VK_PIPELINE_STAGE_TRANSFER_BIT,
1434 VK_PIPELINE_STAGE_TRANSFER_BIT, false, &imageMemoryBarrier);
1435 vkTex->updateImageLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1436 }
Brian Salomon930f9392018-06-20 16:25:26 -04001437 return true;
jvanverth62340062016-04-26 08:01:44 -07001438}
1439
Greg Daniel164a9f02016-02-22 09:56:40 -05001440////////////////////////////////////////////////////////////////////////////////
1441
1442GrStencilAttachment* GrVkGpu::createStencilAttachmentForRenderTarget(const GrRenderTarget* rt,
1443 int width,
1444 int height) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001445 SkASSERT(width >= rt->width());
1446 SkASSERT(height >= rt->height());
1447
Chris Dalton6ce447a2019-06-23 18:07:38 -06001448 int samples = rt->numSamples();
Greg Daniel164a9f02016-02-22 09:56:40 -05001449
Ethan Nicholasf610bae2018-09-20 16:55:21 -04001450 const GrVkCaps::StencilFormat& sFmt = this->vkCaps().preferredStencilFormat();
Greg Daniel164a9f02016-02-22 09:56:40 -05001451
1452 GrVkStencilAttachment* stencil(GrVkStencilAttachment::Create(this,
Greg Daniel164a9f02016-02-22 09:56:40 -05001453 width,
1454 height,
1455 samples,
1456 sFmt));
1457 fStats.incStencilAttachmentCreates();
1458 return stencil;
1459}
1460
1461////////////////////////////////////////////////////////////////////////////////
1462
Robert Phillips28a5a432019-06-07 12:46:21 -04001463bool copy_src_data(GrVkGpu* gpu, const GrVkAlloc& alloc, VkFormat vkFormat,
1464 int width, int height,
1465 const void* srcData, size_t srcRowBytes) {
1466 SkASSERT(srcData);
Brian Salomonc0519232019-06-26 20:55:39 -04001467 SkASSERT(!GrVkFormatIsCompressed(vkFormat));
1468
1469 void* mapPtr = GrVkMemory::MapAlloc(gpu, alloc);
1470 if (!mapPtr) {
1471 return false;
1472 }
1473 size_t bytesPerPixel = GrVkBytesPerFormat(vkFormat);
1474 const size_t trimRowBytes = width * bytesPerPixel;
1475 if (!srcRowBytes) {
1476 srcRowBytes = trimRowBytes;
1477 }
1478 SkASSERT(trimRowBytes * height <= alloc.fSize);
1479
1480 SkRectMemcpy(mapPtr, trimRowBytes, srcData, srcRowBytes, trimRowBytes, height);
1481
1482 GrVkMemory::FlushMappedAlloc(gpu, alloc, 0, alloc.fSize);
1483 GrVkMemory::UnmapAlloc(gpu, alloc);
1484 return true;
1485}
1486
1487bool copy_compressed_src_data(GrVkGpu* gpu, const GrVkAlloc& alloc,
1488 SkImage::CompressionType compressionType, int width, int height,
1489 const void* data) {
1490 SkASSERT(data);
Robert Phillipsbd1ef682019-05-31 12:48:49 -04001491
Greg Daniel81df0412018-05-31 13:13:33 -04001492 void* mapPtr = GrVkMemory::MapAlloc(gpu, alloc);
1493 if (!mapPtr) {
egdaniel3602d4f2016-08-12 11:58:53 -07001494 return false;
1495 }
Robert Phillips28a5a432019-06-07 12:46:21 -04001496 mapPtr = reinterpret_cast<char*>(mapPtr);
egdaniel3602d4f2016-08-12 11:58:53 -07001497
Brian Salomonc0519232019-06-26 20:55:39 -04001498 size_t dataSize = GrCompressedDataSize(compressionType, width, height);
1499 SkASSERT(dataSize <= alloc.fSize);
1500 memcpy(mapPtr, data, dataSize);
Robert Phillips28a5a432019-06-07 12:46:21 -04001501 GrVkMemory::FlushMappedAlloc(gpu, alloc, 0, alloc.fSize);
Greg Daniel81df0412018-05-31 13:13:33 -04001502 GrVkMemory::UnmapAlloc(gpu, alloc);
egdaniel3602d4f2016-08-12 11:58:53 -07001503 return true;
1504}
Robert Phillips28a5a432019-06-07 12:46:21 -04001505bool fill_in_with_color(GrVkGpu* gpu, const GrVkAlloc& alloc, VkFormat vkFormat,
1506 int baseWidth, int baseHeight,
1507 const SkTArray<size_t>& individualMipOffsets,
1508 GrPixelConfig config, const SkColor4f& color) {
Robert Phillips42dda082019-05-14 13:29:45 -04001509
Robert Phillips28a5a432019-06-07 12:46:21 -04001510 void* mapPtr = GrVkMemory::MapAlloc(gpu, alloc);
1511 if (!mapPtr) {
1512 return false;
Robert Phillips42dda082019-05-14 13:29:45 -04001513 }
1514
Brian Salomonc0519232019-06-26 20:55:39 -04001515 SkImage::CompressionType compressionType;
1516 if (GrVkFormatToCompressionType(vkFormat, &compressionType)) {
1517 GrFillInCompressedData(compressionType, baseWidth, baseHeight, (char*)mapPtr, color);
1518 } else {
1519 // TODO: pass in alloc.fSize and assert we never write past it
1520 GrFillInData(config, baseWidth, baseHeight, individualMipOffsets, (char*)mapPtr, color);
1521 }
Robert Phillips42dda082019-05-14 13:29:45 -04001522
Robert Phillips28a5a432019-06-07 12:46:21 -04001523 GrVkMemory::FlushMappedAlloc(gpu, alloc, 0, alloc.fSize);
1524 GrVkMemory::UnmapAlloc(gpu, alloc);
1525 return true;
Robert Phillips42dda082019-05-14 13:29:45 -04001526}
1527
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001528static void set_image_layout(const GrVkInterface* vkInterface, VkCommandBuffer cmdBuffer,
1529 GrVkImageInfo* info, VkImageLayout newLayout, uint32_t mipLevels,
1530 VkAccessFlagBits dstAccessMask, VkPipelineStageFlagBits dstStageMask) {
1531 VkAccessFlags srcAccessMask = GrVkImage::LayoutToSrcAccessMask(info->fImageLayout);
1532 VkPipelineStageFlags srcStageMask = GrVkImage::LayoutToPipelineSrcStageFlags(
1533 info->fImageLayout);
1534
1535 VkImageMemoryBarrier barrier;
1536 memset(&barrier, 0, sizeof(VkImageMemoryBarrier));
1537 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1538 barrier.pNext = nullptr;
1539 barrier.srcAccessMask = srcAccessMask;
1540 barrier.dstAccessMask = dstAccessMask;
1541 barrier.oldLayout = info->fImageLayout;
1542 barrier.newLayout = newLayout;
1543 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1544 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1545 barrier.image = info->fImage;
1546 barrier.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, mipLevels, 0, 1};
1547 GR_VK_CALL(vkInterface, CmdPipelineBarrier(
1548 cmdBuffer,
1549 srcStageMask,
1550 dstStageMask,
1551 0,
1552 0, nullptr,
1553 0, nullptr,
1554 1, &barrier));
1555 info->fImageLayout = newLayout;
1556}
1557
Brian Salomon52e943a2018-03-13 09:32:39 -04001558bool GrVkGpu::createTestingOnlyVkImage(GrPixelConfig config, int w, int h, bool texturable,
1559 bool renderable, GrMipMapped mipMapped, const void* srcData,
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001560 size_t srcRowBytes, const SkColor4f* color,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001561 GrVkImageInfo* info, GrProtected isProtected) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001562 SkASSERT(texturable || renderable);
1563 if (!texturable) {
1564 SkASSERT(GrMipMapped::kNo == mipMapped);
1565 SkASSERT(!srcData);
1566 }
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001567
1568 if (fProtectedContext != isProtected) {
1569 SkDebugf("Can only create protected image in protected context\n");
1570 return false;
1571 }
1572
Robert Phillips42dda082019-05-14 13:29:45 -04001573 VkFormat vkFormat;
1574 if (!GrPixelConfigToVkFormat(config, &vkFormat)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001575 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001576 }
1577
Robert Phillips0c6daf02019-05-16 12:43:11 -04001578 if (texturable && !fVkCaps->isFormatTexturable(vkFormat)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001579 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001580 }
1581
Robert Phillips0c6daf02019-05-16 12:43:11 -04001582 if (renderable && !fVkCaps->isFormatRenderable(vkFormat)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001583 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001584 }
1585
1586 // Currently we don't support uploading pixel data when mipped.
1587 if (srcData && GrMipMapped::kYes == mipMapped) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001588 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001589 }
1590
Brian Salomon52e943a2018-03-13 09:32:39 -04001591 VkImageUsageFlags usageFlags = 0;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001592 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1593 usageFlags |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
Brian Salomon52e943a2018-03-13 09:32:39 -04001594 if (texturable) {
1595 usageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
1596 }
1597 if (renderable) {
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001598 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1599 }
1600
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001601 // Figure out the number of mip levels.
1602 uint32_t mipLevels = 1;
1603 if (GrMipMapped::kYes == mipMapped) {
1604 mipLevels = SkMipMap::ComputeLevelCount(w, h) + 1;
1605 }
1606
Robert Phillipsf62e5752019-05-30 10:36:13 -04001607 GrVkImage::ImageDesc imageDesc;
1608 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
1609 imageDesc.fFormat = vkFormat;
1610 imageDesc.fWidth = w;
1611 imageDesc.fHeight = h;
1612 imageDesc.fLevels = mipLevels;
1613 imageDesc.fSamples = 1;
1614 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
1615 imageDesc.fUsageFlags = usageFlags;
1616 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001617 imageDesc.fIsProtected = fProtectedContext;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001618
Robert Phillipsf62e5752019-05-30 10:36:13 -04001619 if (!GrVkImage::InitImageInfo(this, imageDesc, info)) {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001620 SkDebugf("Failed to init image info\n");
Brian Salomon52e943a2018-03-13 09:32:39 -04001621 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001622 }
1623
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001624 if (!srcData && !color) {
1625 return true;
1626 }
1627
1628 // We need to declare these early so that we can delete them at the end outside of
1629 // the if block.
Greg Daniel8385a8a2018-02-26 13:29:37 -05001630 GrVkAlloc bufferAlloc;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001631 VkBuffer buffer = VK_NULL_HANDLE;
1632
1633 VkResult err;
1634 const VkCommandBufferAllocateInfo cmdInfo = {
1635 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
1636 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001637 fCmdPool->vkCommandPool(), // commandPool
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001638 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
1639 1 // bufferCount
1640 };
1641
1642 VkCommandBuffer cmdBuffer;
1643 err = VK_CALL(AllocateCommandBuffers(fDevice, &cmdInfo, &cmdBuffer));
1644 if (err) {
Robert Phillipsf62e5752019-05-30 10:36:13 -04001645 GrVkImage::DestroyImageInfo(this, info);
Brian Salomon52e943a2018-03-13 09:32:39 -04001646 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001647 }
1648
1649 VkCommandBufferBeginInfo cmdBufferBeginInfo;
1650 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1651 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1652 cmdBufferBeginInfo.pNext = nullptr;
1653 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1654 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
1655
1656 err = VK_CALL(BeginCommandBuffer(cmdBuffer, &cmdBufferBeginInfo));
1657 SkASSERT(!err);
1658
Robert Phillips28a5a432019-06-07 12:46:21 -04001659 size_t bytesPerPixel = GrVkBytesPerFormat(vkFormat);
Brian Salomonde9f5462018-03-07 14:23:58 -05001660 SkASSERT(w && h);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001661
Brian Salomonde9f5462018-03-07 14:23:58 -05001662 SkTArray<size_t> individualMipOffsets(mipLevels);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001663
Brian Salomonc0519232019-06-26 20:55:39 -04001664 SkImage::CompressionType compressionType;
1665 bool isCompressed = GrVkFormatToCompressionType(vkFormat, &compressionType);
Robert Phillips28a5a432019-06-07 12:46:21 -04001666
Brian Salomonc0519232019-06-26 20:55:39 -04001667 size_t combinedBufferSize;
1668 if (isCompressed) {
1669 // Compressed textures currently must be non-MIP mapped and have initial data.
1670 if (mipMapped == GrMipMapped::kYes) {
1671 return false;
1672 }
1673 if (!srcData && !color) {
1674 return false;
1675 }
1676 combinedBufferSize = GrCompressedDataSize(compressionType, w, h);
1677 individualMipOffsets.push_back(0);
1678 } else {
1679 combinedBufferSize = GrComputeTightCombinedBufferSize(bytesPerPixel, w, h,
1680 &individualMipOffsets, mipLevels);
1681 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001682
Brian Salomonde9f5462018-03-07 14:23:58 -05001683 VkBufferCreateInfo bufInfo;
1684 memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
1685 bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001686 bufInfo.flags = fProtectedContext == GrProtected::kYes ? VK_BUFFER_CREATE_PROTECTED_BIT : 0;
Brian Salomonde9f5462018-03-07 14:23:58 -05001687 bufInfo.size = combinedBufferSize;
1688 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1689 bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1690 bufInfo.queueFamilyIndexCount = 0;
1691 bufInfo.pQueueFamilyIndices = nullptr;
1692 err = VK_CALL(CreateBuffer(fDevice, &bufInfo, nullptr, &buffer));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001693
Brian Salomonde9f5462018-03-07 14:23:58 -05001694 if (err) {
Robert Phillipsf62e5752019-05-30 10:36:13 -04001695 GrVkImage::DestroyImageInfo(this, info);
Brian Salomonde9f5462018-03-07 14:23:58 -05001696 VK_CALL(EndCommandBuffer(cmdBuffer));
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001697 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001698 return false;
Brian Salomonde9f5462018-03-07 14:23:58 -05001699 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001700
Brian Salomonde9f5462018-03-07 14:23:58 -05001701 if (!GrVkMemory::AllocAndBindBufferMemory(this, buffer, GrVkBuffer::kCopyRead_Type, true,
1702 &bufferAlloc)) {
Robert Phillipsf62e5752019-05-30 10:36:13 -04001703 GrVkImage::DestroyImageInfo(this, info);
Brian Salomonde9f5462018-03-07 14:23:58 -05001704 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1705 VK_CALL(EndCommandBuffer(cmdBuffer));
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001706 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001707 return false;
Brian Salomonde9f5462018-03-07 14:23:58 -05001708 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001709
Robert Phillips28a5a432019-06-07 12:46:21 -04001710 bool result;
1711 if (!srcData) {
1712 result = fill_in_with_color(this, bufferAlloc, vkFormat, w, h, individualMipOffsets,
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001713 config, *color);
Brian Salomonc0519232019-06-26 20:55:39 -04001714 } else if (isCompressed) {
1715 result = copy_compressed_src_data(this, bufferAlloc, compressionType, w, h, srcData);
Robert Phillips28a5a432019-06-07 12:46:21 -04001716 } else {
1717 SkASSERT(1 == mipLevels);
Robert Phillips28a5a432019-06-07 12:46:21 -04001718 result = copy_src_data(this, bufferAlloc, vkFormat, w, h, srcData, srcRowBytes);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001719 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001720
Robert Phillips28a5a432019-06-07 12:46:21 -04001721 if (!result) {
1722 GrVkImage::DestroyImageInfo(this, info);
1723 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
1724 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1725 VK_CALL(EndCommandBuffer(cmdBuffer));
1726 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
1727 return false;
1728 }
1729
Brian Salomonde9f5462018-03-07 14:23:58 -05001730 // Set image layout and add barrier
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001731 set_image_layout(this->vkInterface(), cmdBuffer, info,
1732 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, mipLevels,
Greg Daniel662e2af2019-06-12 15:12:44 -04001733 VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
Brian Salomonde9f5462018-03-07 14:23:58 -05001734
1735 SkTArray<VkBufferImageCopy> regions(mipLevels);
1736
Robert Phillips28a5a432019-06-07 12:46:21 -04001737 int currentWidth = w;
1738 int currentHeight = h;
Brian Salomonde9f5462018-03-07 14:23:58 -05001739 for (uint32_t currentMipLevel = 0; currentMipLevel < mipLevels; currentMipLevel++) {
1740 // Submit copy command
1741 VkBufferImageCopy& region = regions.push_back();
1742 memset(&region, 0, sizeof(VkBufferImageCopy));
1743 region.bufferOffset = individualMipOffsets[currentMipLevel];
1744 region.bufferRowLength = currentWidth;
1745 region.bufferImageHeight = currentHeight;
Robert Phillips27eb5252019-06-03 12:59:40 -04001746 region.imageSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, currentMipLevel, 0, 1};
Brian Salomonde9f5462018-03-07 14:23:58 -05001747 region.imageOffset = {0, 0, 0};
1748 region.imageExtent = {(uint32_t)currentWidth, (uint32_t)currentHeight, 1};
1749 currentWidth = SkTMax(1, currentWidth / 2);
1750 currentHeight = SkTMax(1, currentHeight / 2);
1751 }
1752
Robert Phillipsf62e5752019-05-30 10:36:13 -04001753 VK_CALL(CmdCopyBufferToImage(cmdBuffer, buffer, info->fImage, info->fImageLayout,
1754 regions.count(), regions.begin()));
Brian Salomonde9f5462018-03-07 14:23:58 -05001755
Brian Salomon52e943a2018-03-13 09:32:39 -04001756 if (texturable) {
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001757 // Change Image layout to shader read since if we use this texture as a borrowed
1758 // texture within Ganesh we require that its layout be set to that
1759 set_image_layout(this->vkInterface(), cmdBuffer, info,
1760 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, mipLevels,
1761 VK_ACCESS_SHADER_READ_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
Brian Salomon52e943a2018-03-13 09:32:39 -04001762 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001763
1764 // End CommandBuffer
1765 err = VK_CALL(EndCommandBuffer(cmdBuffer));
1766 SkASSERT(!err);
1767
1768 // Create Fence for queue
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001769 VkFenceCreateInfo fenceInfo;
1770 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
1771 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001772 fenceInfo.pNext = nullptr;
1773 fenceInfo.flags = 0;
1774 VkFence fence = VK_NULL_HANDLE;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001775
1776 err = VK_CALL(CreateFence(fDevice, &fenceInfo, nullptr, &fence));
1777 SkASSERT(!err);
1778
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001779 VkProtectedSubmitInfo protectedSubmitInfo;
1780 if (fProtectedContext == GrProtected::kYes) {
1781 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
1782 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
1783 protectedSubmitInfo.pNext = nullptr;
1784 protectedSubmitInfo.protectedSubmit = VK_TRUE;
1785 }
1786
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001787 VkSubmitInfo submitInfo;
1788 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
1789 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001790 submitInfo.pNext = fProtectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001791 submitInfo.waitSemaphoreCount = 0;
1792 submitInfo.pWaitSemaphores = nullptr;
1793 submitInfo.pWaitDstStageMask = 0;
1794 submitInfo.commandBufferCount = 1;
1795 submitInfo.pCommandBuffers = &cmdBuffer;
1796 submitInfo.signalSemaphoreCount = 0;
1797 submitInfo.pSignalSemaphores = nullptr;
1798 err = VK_CALL(QueueSubmit(this->queue(), 1, &submitInfo, fence));
1799 SkASSERT(!err);
1800
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001801 err = VK_CALL(WaitForFences(this->device(), 1, &fence, VK_TRUE, UINT64_MAX));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001802 if (VK_TIMEOUT == err) {
Robert Phillipsf62e5752019-05-30 10:36:13 -04001803 GrVkImage::DestroyImageInfo(this, info);
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001804 if (buffer != VK_NULL_HANDLE) { // workaround for an older NVidia driver crash
1805 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
1806 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1807 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001808 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001809 VK_CALL(DestroyFence(this->device(), fence, nullptr));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001810 SkDebugf("Fence failed to signal: %d\n", err);
1811 SK_ABORT("failing");
1812 }
1813 SkASSERT(!err);
1814
1815 // Clean up transfer resources
1816 if (buffer != VK_NULL_HANDLE) { // workaround for an older NVidia driver crash
1817 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
1818 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1819 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001820 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Robert Phillipsd1d869d2019-06-07 14:21:31 -04001821 VK_CALL(DestroyFence(this->device(), fence, nullptr));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001822
Brian Salomon52e943a2018-03-13 09:32:39 -04001823 return true;
1824}
1825
Robert Phillips9dbcdcc2019-05-13 10:40:06 -04001826static bool vk_format_to_pixel_config(VkFormat format, GrPixelConfig* config) {
1827 GrPixelConfig dontCare;
1828 if (!config) {
1829 config = &dontCare;
1830 }
1831
1832 switch (format) {
1833 case VK_FORMAT_UNDEFINED:
1834 *config = kUnknown_GrPixelConfig;
1835 return false;
1836 case VK_FORMAT_R8G8B8A8_UNORM:
1837 *config = kRGBA_8888_GrPixelConfig;
1838 return true;
1839 case VK_FORMAT_R8G8B8_UNORM:
1840 *config = kRGB_888_GrPixelConfig;
1841 return true;
1842 case VK_FORMAT_R8G8_UNORM:
1843 *config = kRG_88_GrPixelConfig;
1844 return true;
1845 case VK_FORMAT_B8G8R8A8_UNORM:
1846 *config = kBGRA_8888_GrPixelConfig;
1847 return true;
1848 case VK_FORMAT_R8G8B8A8_SRGB:
1849 *config = kSRGBA_8888_GrPixelConfig;
1850 return true;
Robert Phillips9dbcdcc2019-05-13 10:40:06 -04001851 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
1852 *config = kRGBA_1010102_GrPixelConfig;
1853 return true;
1854 case VK_FORMAT_R5G6B5_UNORM_PACK16:
1855 *config = kRGB_565_GrPixelConfig;
1856 return true;
1857 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
1858 *config = kRGBA_4444_GrPixelConfig; // we're swizzling in this case
1859 return true;
1860 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
1861 *config = kRGBA_4444_GrPixelConfig;
1862 return true;
1863 case VK_FORMAT_R8_UNORM:
1864 *config = kAlpha_8_GrPixelConfig;
1865 return true;
1866 case VK_FORMAT_R32G32B32A32_SFLOAT:
1867 *config = kRGBA_float_GrPixelConfig;
1868 return true;
1869 case VK_FORMAT_R32G32_SFLOAT:
1870 *config = kRG_float_GrPixelConfig;
1871 return true;
1872 case VK_FORMAT_R16G16B16A16_SFLOAT:
1873 *config = kRGBA_half_GrPixelConfig;
1874 return true;
1875 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
1876 *config = kRGB_ETC1_GrPixelConfig;
1877 return true;
1878 case VK_FORMAT_R16_SFLOAT:
1879 *config = kAlpha_half_GrPixelConfig;
1880 return true;
Robert Phillipsfe18de52019-06-06 17:21:50 -04001881 case VK_FORMAT_R16_UNORM:
1882 *config = kR_16_GrPixelConfig;
1883 return true;
1884 case VK_FORMAT_R16G16_UNORM:
1885 *config = kRG_1616_GrPixelConfig;
1886 return true;
Robert Phillips66a46032019-06-18 08:00:42 -04001887 // Experimental (for Y416 and mutant P016/P010)
1888 case VK_FORMAT_R16G16B16A16_UNORM:
1889 *config = kRGBA_16161616_GrPixelConfig;
1890 return true;
1891 case VK_FORMAT_R16G16_SFLOAT:
1892 *config = kRG_half_GrPixelConfig;
1893 return true;
Robert Phillips9dbcdcc2019-05-13 10:40:06 -04001894 default:
1895 return false;
1896 }
1897 SK_ABORT("Unexpected config");
1898 return false;
1899}
1900
Robert Phillipsf0313ee2019-05-21 13:51:11 -04001901GrBackendTexture GrVkGpu::createBackendTexture(int w, int h,
1902 const GrBackendFormat& format,
1903 GrMipMapped mipMapped,
1904 GrRenderable renderable,
Robert Phillips459b2952019-05-23 09:38:27 -04001905 const void* srcData, size_t rowBytes,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001906 const SkColor4f* color, GrProtected isProtected) {
Robert Phillipsd8f79a22019-06-24 13:25:42 -04001907 const GrVkCaps& caps = this->vkCaps();
Brian Salomon8a375832018-03-14 10:21:40 -04001908 this->handleDirtyContext();
Robert Phillipsa479f962018-04-10 11:45:40 -04001909
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001910 if (fProtectedContext != isProtected) {
1911 SkDebugf("Can only create protected image in protected context\n");
1912 return GrBackendTexture();
1913 }
1914
Robert Phillipsd8f79a22019-06-24 13:25:42 -04001915 if (w > caps.maxTextureSize() || h > caps.maxTextureSize()) {
Robert Phillipsa479f962018-04-10 11:45:40 -04001916 return GrBackendTexture();
1917 }
1918
Robert Phillips9dbcdcc2019-05-13 10:40:06 -04001919 const VkFormat* vkFormat = format.getVkFormat();
1920 if (!vkFormat) {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001921 SkDebugf("Could net get vkformat\n");
Robert Phillips9dbcdcc2019-05-13 10:40:06 -04001922 return GrBackendTexture();
1923 }
1924
Robert Phillipsd8f79a22019-06-24 13:25:42 -04001925 if (!caps.isFormatTexturable(*vkFormat)) {
1926 SkDebugf("Config is not texturable\n");
Robert Phillips9dbcdcc2019-05-13 10:40:06 -04001927 return GrBackendTexture();
1928 }
Robert Phillipsd8f79a22019-06-24 13:25:42 -04001929
1930 GrPixelConfig config;
1931 if (!vk_format_to_pixel_config(*vkFormat, &config)) {
1932 SkDebugf("Could net get vkformat\n");
Robert Phillips646f6372018-09-25 09:31:10 -04001933 return GrBackendTexture();
1934 }
1935
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001936 GrVkImageInfo info;
Robert Phillips9dbcdcc2019-05-13 10:40:06 -04001937 if (!this->createTestingOnlyVkImage(config, w, h, true, GrRenderable::kYes == renderable,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001938 mipMapped, srcData, rowBytes, color, &info, isProtected)) {
1939 SkDebugf("Failed to create testing only image\n");
1940 return GrBackendTexture();
Brian Salomon52e943a2018-03-13 09:32:39 -04001941 }
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001942 GrBackendTexture beTex = GrBackendTexture(w, h, isProtected, info);
Robert Phillipsf0ced622019-05-16 09:06:25 -04001943#if GR_TEST_UTILS
Greg Daniel108bb232018-07-03 16:18:29 -04001944 // Lots of tests don't go through Skia's public interface which will set the config so for
1945 // testing we make sure we set a config here.
1946 beTex.setPixelConfig(config);
Robert Phillipsf0ced622019-05-16 09:06:25 -04001947#endif
Greg Daniel108bb232018-07-03 16:18:29 -04001948 return beTex;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001949}
1950
Robert Phillipsf0313ee2019-05-21 13:51:11 -04001951void GrVkGpu::deleteBackendTexture(const GrBackendTexture& tex) {
Robert Phillipsf0ced622019-05-16 09:06:25 -04001952 SkASSERT(GrBackendApi::kVulkan == tex.fBackend);
1953
1954 GrVkImageInfo info;
1955 if (tex.getVkImageInfo(&info)) {
1956 GrVkImage::DestroyImageInfo(this, const_cast<GrVkImageInfo*>(&info));
1957 }
1958}
1959
1960#if GR_TEST_UTILS
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001961bool GrVkGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
Greg Danielbdf12ad2018-10-12 09:31:11 -04001962 SkASSERT(GrBackendApi::kVulkan == tex.fBackend);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001963
Greg Daniel52e16d92018-04-10 09:34:07 -04001964 GrVkImageInfo backend;
1965 if (!tex.getVkImageInfo(&backend)) {
1966 return false;
1967 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001968
Greg Daniel52e16d92018-04-10 09:34:07 -04001969 if (backend.fImage && backend.fAlloc.fMemory) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001970 VkMemoryRequirements req;
1971 memset(&req, 0, sizeof(req));
1972 GR_VK_CALL(this->vkInterface(), GetImageMemoryRequirements(fDevice,
Greg Daniel52e16d92018-04-10 09:34:07 -04001973 backend.fImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001974 &req));
1975 // TODO: find a better check
1976 // This will probably fail with a different driver
1977 return (req.size > 0) && (req.size <= 8192 * 8192);
1978 }
1979
1980 return false;
1981}
1982
Brian Osman2d010b62018-08-09 10:55:09 -04001983GrBackendRenderTarget GrVkGpu::createTestingOnlyBackendRenderTarget(int w, int h, GrColorType ct) {
Robert Phillipsf62e5752019-05-30 10:36:13 -04001984 this->handleDirtyContext();
1985
Greg Daniel92cbf3f2018-04-12 16:50:17 -04001986 if (w > this->caps()->maxRenderTargetSize() || h > this->caps()->maxRenderTargetSize()) {
1987 return GrBackendRenderTarget();
1988 }
1989
Brian Osman2d010b62018-08-09 10:55:09 -04001990 auto config = GrColorTypeToPixelConfig(ct, GrSRGBEncoded::kNo);
Brian Salomon52e943a2018-03-13 09:32:39 -04001991 if (kUnknown_GrPixelConfig == config) {
1992 return {};
1993 }
Robert Phillipsf62e5752019-05-30 10:36:13 -04001994
1995 GrVkImageInfo info;
Robert Phillips646f6372018-09-25 09:31:10 -04001996 if (!this->createTestingOnlyVkImage(config, w, h, false, true, GrMipMapped::kNo, nullptr, 0,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04001997 &SkColors::kTransparent, &info, GrProtected::kNo)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001998 return {};
1999 }
Greg Daniel108bb232018-07-03 16:18:29 -04002000 GrBackendRenderTarget beRT = GrBackendRenderTarget(w, h, 1, 0, info);
2001 // Lots of tests don't go through Skia's public interface which will set the config so for
2002 // testing we make sure we set a config here.
2003 beRT.setPixelConfig(config);
2004 return beRT;
Brian Salomonf865b052018-03-09 09:01:53 -05002005}
2006
Brian Salomon52e943a2018-03-13 09:32:39 -04002007void GrVkGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget& rt) {
Greg Danielbdf12ad2018-10-12 09:31:11 -04002008 SkASSERT(GrBackendApi::kVulkan == rt.fBackend);
Brian Salomonf865b052018-03-09 09:01:53 -05002009
Greg Daniel323fbcf2018-04-10 13:46:30 -04002010 GrVkImageInfo info;
2011 if (rt.getVkImageInfo(&info)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04002012 // something in the command buffer may still be using this, so force submit
2013 this->submitCommandBuffer(kForce_SyncQueue);
Greg Daniel323fbcf2018-04-10 13:46:30 -04002014 GrVkImage::DestroyImageInfo(this, const_cast<GrVkImageInfo*>(&info));
Brian Salomon52e943a2018-03-13 09:32:39 -04002015 }
2016}
Brian Salomonf865b052018-03-09 09:01:53 -05002017
Greg Daniel26b50a42018-03-08 09:49:58 -05002018void GrVkGpu::testingOnly_flushGpuAndSync() {
2019 this->submitCommandBuffer(kForce_SyncQueue);
2020}
Brian Salomonf865b052018-03-09 09:01:53 -05002021#endif
Greg Daniel26b50a42018-03-08 09:49:58 -05002022
Greg Daniel164a9f02016-02-22 09:56:40 -05002023////////////////////////////////////////////////////////////////////////////////
2024
Greg Daniel59dc1482019-02-22 10:46:38 -05002025void GrVkGpu::addBufferMemoryBarrier(const GrVkResource* resource,
2026 VkPipelineStageFlags srcStageMask,
Greg Daniel164a9f02016-02-22 09:56:40 -05002027 VkPipelineStageFlags dstStageMask,
2028 bool byRegion,
2029 VkBufferMemoryBarrier* barrier) const {
2030 SkASSERT(fCurrentCmdBuffer);
Greg Daniel59dc1482019-02-22 10:46:38 -05002031 SkASSERT(resource);
Greg Daniel164a9f02016-02-22 09:56:40 -05002032 fCurrentCmdBuffer->pipelineBarrier(this,
Greg Daniel59dc1482019-02-22 10:46:38 -05002033 resource,
Greg Daniel164a9f02016-02-22 09:56:40 -05002034 srcStageMask,
2035 dstStageMask,
2036 byRegion,
2037 GrVkCommandBuffer::kBufferMemory_BarrierType,
2038 barrier);
2039}
2040
Greg Daniel59dc1482019-02-22 10:46:38 -05002041void GrVkGpu::addImageMemoryBarrier(const GrVkResource* resource,
2042 VkPipelineStageFlags srcStageMask,
Greg Daniel164a9f02016-02-22 09:56:40 -05002043 VkPipelineStageFlags dstStageMask,
2044 bool byRegion,
2045 VkImageMemoryBarrier* barrier) const {
2046 SkASSERT(fCurrentCmdBuffer);
Greg Daniel59dc1482019-02-22 10:46:38 -05002047 SkASSERT(resource);
Greg Daniel164a9f02016-02-22 09:56:40 -05002048 fCurrentCmdBuffer->pipelineBarrier(this,
Greg Daniel59dc1482019-02-22 10:46:38 -05002049 resource,
Greg Daniel164a9f02016-02-22 09:56:40 -05002050 srcStageMask,
2051 dstStageMask,
2052 byRegion,
2053 GrVkCommandBuffer::kImageMemory_BarrierType,
2054 barrier);
2055}
2056
Brian Salomonf9a1fdf2019-05-09 10:30:12 -04002057void GrVkGpu::onFinishFlush(GrSurfaceProxy* proxies[], int n,
Greg Daniel797efca2019-05-09 14:04:20 -04002058 SkSurface::BackendSurfaceAccess access, const GrFlushInfo& info,
2059 const GrPrepareForExternalIORequests& externalRequests) {
Brian Salomonf9a1fdf2019-05-09 10:30:12 -04002060 SkASSERT(n >= 0);
2061 SkASSERT(!n || proxies);
Greg Daniel51316782017-08-02 15:10:09 +00002062 // Submit the current command buffer to the Queue. Whether we inserted semaphores or not does
2063 // not effect what we do here.
Brian Salomonf9a1fdf2019-05-09 10:30:12 -04002064 if (n && access == SkSurface::BackendSurfaceAccess::kPresent) {
Greg Danielbae71212019-03-01 15:24:35 -05002065 GrVkImage* image;
Brian Salomonf9a1fdf2019-05-09 10:30:12 -04002066 for (int i = 0; i < n; ++i) {
2067 SkASSERT(proxies[i]->isInstantiated());
2068 if (GrTexture* tex = proxies[i]->peekTexture()) {
2069 image = static_cast<GrVkTexture*>(tex);
2070 } else {
2071 GrRenderTarget* rt = proxies[i]->peekRenderTarget();
2072 SkASSERT(rt);
2073 image = static_cast<GrVkRenderTarget*>(rt);
2074 }
2075 image->prepareForPresent(this);
Greg Danielbae71212019-03-01 15:24:35 -05002076 }
Greg Danielbae71212019-03-01 15:24:35 -05002077 }
Greg Daniel797efca2019-05-09 14:04:20 -04002078
2079 // Handle requests for preparing for external IO
2080 for (int i = 0; i < externalRequests.fNumImages; ++i) {
2081 SkImage* image = externalRequests.fImages[i];
2082 if (!image->isTextureBacked()) {
2083 continue;
2084 }
2085 SkImage_GpuBase* gpuImage = static_cast<SkImage_GpuBase*>(as_IB(image));
2086 sk_sp<GrTextureProxy> proxy = gpuImage->asTextureProxyRef(this->getContext());
2087 SkASSERT(proxy);
2088
2089 if (!proxy->isInstantiated()) {
2090 auto resourceProvider = this->getContext()->priv().resourceProvider();
2091 if (!proxy->instantiate(resourceProvider)) {
2092 continue;
2093 }
2094 }
2095
2096 GrTexture* tex = proxy->peekTexture();
2097 if (!tex) {
2098 continue;
2099 }
2100 GrVkTexture* vkTex = static_cast<GrVkTexture*>(tex);
2101 vkTex->prepareForExternal(this);
2102 }
2103 for (int i = 0; i < externalRequests.fNumSurfaces; ++i) {
2104 SkSurface* surface = externalRequests.fSurfaces[i];
2105 if (!surface->getCanvas()->getGrContext()) {
2106 continue;
2107 }
2108 SkSurface_Gpu* gpuSurface = static_cast<SkSurface_Gpu*>(surface);
2109 auto* rtc = gpuSurface->getDevice()->accessRenderTargetContext();
2110 sk_sp<GrRenderTargetProxy> proxy = rtc->asRenderTargetProxyRef();
2111 if (!proxy->isInstantiated()) {
2112 auto resourceProvider = this->getContext()->priv().resourceProvider();
2113 if (!proxy->instantiate(resourceProvider)) {
2114 continue;
2115 }
2116 }
2117
2118 GrRenderTarget* rt = proxy->peekRenderTarget();
2119 SkASSERT(rt);
2120 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(rt);
2121 if (externalRequests.fPrepareSurfaceForPresent &&
2122 externalRequests.fPrepareSurfaceForPresent[i]) {
2123 vkRT->prepareForPresent(this);
2124 } else {
2125 vkRT->prepareForExternal(this);
2126 }
2127 }
2128
Greg Daniele6bfb7d2019-04-17 15:26:11 -04002129 if (info.fFlags & kSyncCpu_GrFlushFlag) {
2130 this->submitCommandBuffer(kForce_SyncQueue, info.fFinishedProc, info.fFinishedContext);
Greg Danielbae71212019-03-01 15:24:35 -05002131 } else {
Greg Daniele6bfb7d2019-04-17 15:26:11 -04002132 this->submitCommandBuffer(kSkip_SyncQueue, info.fFinishedProc, info.fFinishedContext);
Greg Danielbae71212019-03-01 15:24:35 -05002133 }
Greg Daniel164a9f02016-02-22 09:56:40 -05002134}
2135
Greg Daniel25af6712018-04-25 10:44:38 -04002136static int get_surface_sample_cnt(GrSurface* surf) {
2137 if (const GrRenderTarget* rt = surf->asRenderTarget()) {
Chris Dalton6ce447a2019-06-23 18:07:38 -06002138 return rt->numSamples();
egdaniel17b89252016-04-05 07:23:38 -07002139 }
Greg Daniel25af6712018-04-25 10:44:38 -04002140 return 0;
Greg Daniel164a9f02016-02-22 09:56:40 -05002141}
2142
Greg Daniel46cfbc62019-06-07 11:43:30 -04002143void GrVkGpu::copySurfaceAsCopyImage(GrSurface* dst, GrSurface* src, GrVkImage* dstImage,
2144 GrVkImage* srcImage, const SkIRect& srcRect,
Greg Daniel164a9f02016-02-22 09:56:40 -05002145 const SkIPoint& dstPoint) {
Greg Daniel25af6712018-04-25 10:44:38 -04002146#ifdef SK_DEBUG
2147 int dstSampleCnt = get_surface_sample_cnt(dst);
2148 int srcSampleCnt = get_surface_sample_cnt(src);
Greg Daniela51e93c2019-03-25 12:30:45 -04002149 bool dstHasYcbcr = dstImage->ycbcrConversionInfo().isValid();
2150 bool srcHasYcbcr = srcImage->ycbcrConversionInfo().isValid();
Greg Daniel46cfbc62019-06-07 11:43:30 -04002151 SkASSERT(this->vkCaps().canCopyImage(dst->config(), dstSampleCnt, dstHasYcbcr,
2152 src->config(), srcSampleCnt, srcHasYcbcr));
Greg Daniel25af6712018-04-25 10:44:38 -04002153#endif
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04002154 if (src->isProtected() && !dst->isProtected()) {
2155 SkDebugf("Can't copy from protected memory to non-protected");
2156 return;
2157 }
Greg Daniel164a9f02016-02-22 09:56:40 -05002158
Greg Daniel164a9f02016-02-22 09:56:40 -05002159 // These flags are for flushing/invalidating caches and for the dst image it doesn't matter if
2160 // the cache is flushed since it is only being written to.
egdaniel17b89252016-04-05 07:23:38 -07002161 dstImage->setImageLayout(this,
jvanverth50c46c72016-05-06 12:31:28 -07002162 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2163 VK_ACCESS_TRANSFER_WRITE_BIT,
2164 VK_PIPELINE_STAGE_TRANSFER_BIT,
2165 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05002166
egdaniel17b89252016-04-05 07:23:38 -07002167 srcImage->setImageLayout(this,
2168 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07002169 VK_ACCESS_TRANSFER_READ_BIT,
2170 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07002171 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05002172
Greg Daniel164a9f02016-02-22 09:56:40 -05002173 VkImageCopy copyRegion;
2174 memset(&copyRegion, 0, sizeof(VkImageCopy));
2175 copyRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
Greg Daniel46cfbc62019-06-07 11:43:30 -04002176 copyRegion.srcOffset = { srcRect.fLeft, srcRect.fTop, 0 };
Greg Daniel164a9f02016-02-22 09:56:40 -05002177 copyRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
Greg Daniel46cfbc62019-06-07 11:43:30 -04002178 copyRegion.dstOffset = { dstPoint.fX, dstPoint.fY, 0 };
2179 copyRegion.extent = { (uint32_t)srcRect.width(), (uint32_t)srcRect.height(), 1 };
Greg Daniel164a9f02016-02-22 09:56:40 -05002180
2181 fCurrentCmdBuffer->copyImage(this,
egdaniel17b89252016-04-05 07:23:38 -07002182 srcImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05002183 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
egdaniel17b89252016-04-05 07:23:38 -07002184 dstImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05002185 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2186 1,
2187 &copyRegion);
jvanverth900bd4a2016-04-29 13:53:12 -07002188
2189 SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY,
2190 srcRect.width(), srcRect.height());
Greg Daniel46cfbc62019-06-07 11:43:30 -04002191 // The rect is already in device space so we pass in kTopLeft so no flip is done.
2192 this->didWriteToSurface(dst, kTopLeft_GrSurfaceOrigin, &dstRect);
Greg Daniel164a9f02016-02-22 09:56:40 -05002193}
2194
Greg Daniel46cfbc62019-06-07 11:43:30 -04002195void GrVkGpu::copySurfaceAsBlit(GrSurface* dst, GrSurface* src, GrVkImage* dstImage,
2196 GrVkImage* srcImage, const SkIRect& srcRect,
egdaniel17b89252016-04-05 07:23:38 -07002197 const SkIPoint& dstPoint) {
Greg Daniel25af6712018-04-25 10:44:38 -04002198#ifdef SK_DEBUG
2199 int dstSampleCnt = get_surface_sample_cnt(dst);
2200 int srcSampleCnt = get_surface_sample_cnt(src);
Greg Daniela51e93c2019-03-25 12:30:45 -04002201 bool dstHasYcbcr = dstImage->ycbcrConversionInfo().isValid();
2202 bool srcHasYcbcr = srcImage->ycbcrConversionInfo().isValid();
Greg Daniel25af6712018-04-25 10:44:38 -04002203 SkASSERT(this->vkCaps().canCopyAsBlit(dst->config(), dstSampleCnt, dstImage->isLinearTiled(),
Greg Daniela51e93c2019-03-25 12:30:45 -04002204 dstHasYcbcr, src->config(), srcSampleCnt,
2205 srcImage->isLinearTiled(), srcHasYcbcr));
egdaniel17b89252016-04-05 07:23:38 -07002206
Greg Daniel25af6712018-04-25 10:44:38 -04002207#endif
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04002208 if (src->isProtected() && !dst->isProtected()) {
2209 SkDebugf("Can't copy from protected memory to non-protected");
2210 return;
2211 }
2212
egdaniel17b89252016-04-05 07:23:38 -07002213 dstImage->setImageLayout(this,
2214 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07002215 VK_ACCESS_TRANSFER_WRITE_BIT,
2216 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07002217 false);
2218
egdaniel17b89252016-04-05 07:23:38 -07002219 srcImage->setImageLayout(this,
2220 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07002221 VK_ACCESS_TRANSFER_READ_BIT,
2222 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07002223 false);
2224
2225 // Flip rect if necessary
Greg Daniel46cfbc62019-06-07 11:43:30 -04002226 SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY, srcRect.width(),
2227 srcRect.height());
egdaniel17b89252016-04-05 07:23:38 -07002228
2229 VkImageBlit blitRegion;
2230 memset(&blitRegion, 0, sizeof(VkImageBlit));
2231 blitRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
Greg Daniel46cfbc62019-06-07 11:43:30 -04002232 blitRegion.srcOffsets[0] = { srcRect.fLeft, srcRect.fTop, 0 };
2233 blitRegion.srcOffsets[1] = { srcRect.fRight, srcRect.fBottom, 1 };
egdaniel17b89252016-04-05 07:23:38 -07002234 blitRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
2235 blitRegion.dstOffsets[0] = { dstRect.fLeft, dstRect.fTop, 0 };
Greg Daniele76071c2016-11-02 11:57:06 -04002236 blitRegion.dstOffsets[1] = { dstRect.fRight, dstRect.fBottom, 1 };
egdaniel17b89252016-04-05 07:23:38 -07002237
2238 fCurrentCmdBuffer->blitImage(this,
egdanielb2df0c22016-05-13 11:30:37 -07002239 *srcImage,
2240 *dstImage,
egdaniel17b89252016-04-05 07:23:38 -07002241 1,
2242 &blitRegion,
2243 VK_FILTER_NEAREST); // We never scale so any filter works here
jvanverth900bd4a2016-04-29 13:53:12 -07002244
Greg Daniel46cfbc62019-06-07 11:43:30 -04002245 // The rect is already in device space so we pass in kTopLeft so no flip is done.
2246 this->didWriteToSurface(dst, kTopLeft_GrSurfaceOrigin, &dstRect);
egdaniel17b89252016-04-05 07:23:38 -07002247}
2248
Greg Daniel46cfbc62019-06-07 11:43:30 -04002249void GrVkGpu::copySurfaceAsResolve(GrSurface* dst, GrSurface* src, const SkIRect& srcRect,
2250 const SkIPoint& dstPoint) {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04002251 if (src->isProtected() && !dst->isProtected()) {
2252 SkDebugf("Can't copy from protected memory to non-protected");
2253 return;
2254 }
egdaniel4bcd62e2016-08-31 07:37:31 -07002255 GrVkRenderTarget* srcRT = static_cast<GrVkRenderTarget*>(src->asRenderTarget());
Brian Salomon1fabd512018-02-09 09:54:25 -05002256 this->resolveImage(dst, srcRT, srcRect, dstPoint);
Greg Daniel46cfbc62019-06-07 11:43:30 -04002257 SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY,
Greg Daniel1ba1bfc2018-06-21 13:55:19 -04002258 srcRect.width(), srcRect.height());
Greg Daniel46cfbc62019-06-07 11:43:30 -04002259 // The rect is already in device space so we pass in kTopLeft so no flip is done.
2260 this->didWriteToSurface(dst, kTopLeft_GrSurfaceOrigin, &dstRect);
egdaniel4bcd62e2016-08-31 07:37:31 -07002261}
2262
Greg Daniel46cfbc62019-06-07 11:43:30 -04002263bool GrVkGpu::onCopySurface(GrSurface* dst, GrSurface* src, const SkIRect& srcRect,
2264 const SkIPoint& dstPoint, bool canDiscardOutsideDstRect) {
Greg Danielbe7fc462019-01-03 16:40:42 -05002265#ifdef SK_DEBUG
2266 if (GrVkRenderTarget* srcRT = static_cast<GrVkRenderTarget*>(src->asRenderTarget())) {
2267 SkASSERT(!srcRT->wrapsSecondaryCommandBuffer());
2268 }
2269 if (GrVkRenderTarget* dstRT = static_cast<GrVkRenderTarget*>(dst->asRenderTarget())) {
2270 SkASSERT(!dstRT->wrapsSecondaryCommandBuffer());
2271 }
2272#endif
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04002273 if (src->isProtected() && !dst->isProtected()) {
2274 SkDebugf("Can't copy from protected memory to non-protected");
2275 return false;
2276 }
Greg Danielbe7fc462019-01-03 16:40:42 -05002277
Greg Daniel25af6712018-04-25 10:44:38 -04002278 GrPixelConfig dstConfig = dst->config();
2279 GrPixelConfig srcConfig = src->config();
2280
2281 int dstSampleCnt = get_surface_sample_cnt(dst);
2282 int srcSampleCnt = get_surface_sample_cnt(src);
2283
egdaniel17b89252016-04-05 07:23:38 -07002284 GrVkImage* dstImage;
2285 GrVkImage* srcImage;
egdaniel4bcd62e2016-08-31 07:37:31 -07002286 GrRenderTarget* dstRT = dst->asRenderTarget();
2287 if (dstRT) {
2288 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(dstRT);
Greg Danielbe7fc462019-01-03 16:40:42 -05002289 if (vkRT->wrapsSecondaryCommandBuffer()) {
2290 return false;
2291 }
Chris Dalton6ce447a2019-06-23 18:07:38 -06002292 dstImage = vkRT->numSamples() > 1 ? vkRT->msaaImage() : vkRT;
egdaniel4bcd62e2016-08-31 07:37:31 -07002293 } else {
2294 SkASSERT(dst->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07002295 dstImage = static_cast<GrVkTexture*>(dst->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07002296 }
egdaniel4bcd62e2016-08-31 07:37:31 -07002297 GrRenderTarget* srcRT = src->asRenderTarget();
2298 if (srcRT) {
2299 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(srcRT);
Chris Dalton6ce447a2019-06-23 18:07:38 -06002300 srcImage = vkRT->numSamples() > 1 ? vkRT->msaaImage() : vkRT;
egdaniel17b89252016-04-05 07:23:38 -07002301 } else {
egdaniel4bcd62e2016-08-31 07:37:31 -07002302 SkASSERT(src->asTexture());
2303 srcImage = static_cast<GrVkTexture*>(src->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07002304 }
2305
Greg Daniela51e93c2019-03-25 12:30:45 -04002306 bool dstHasYcbcr = dstImage->ycbcrConversionInfo().isValid();
2307 bool srcHasYcbcr = srcImage->ycbcrConversionInfo().isValid();
2308
Greg Daniel46cfbc62019-06-07 11:43:30 -04002309 if (this->vkCaps().canCopyAsResolve(dstConfig, dstSampleCnt, dstHasYcbcr,
2310 srcConfig, srcSampleCnt, srcHasYcbcr)) {
2311 this->copySurfaceAsResolve(dst, src, srcRect, dstPoint);
Greg Daniela51e93c2019-03-25 12:30:45 -04002312 return true;
2313 }
2314
Greg Daniel46cfbc62019-06-07 11:43:30 -04002315 if (this->vkCaps().canCopyImage(dstConfig, dstSampleCnt, dstHasYcbcr,
2316 srcConfig, srcSampleCnt, srcHasYcbcr)) {
2317 this->copySurfaceAsCopyImage(dst, src, dstImage, srcImage, srcRect, dstPoint);
egdaniel17b89252016-04-05 07:23:38 -07002318 return true;
2319 }
2320
Greg Daniel25af6712018-04-25 10:44:38 -04002321 if (this->vkCaps().canCopyAsBlit(dstConfig, dstSampleCnt, dstImage->isLinearTiled(),
Greg Daniela51e93c2019-03-25 12:30:45 -04002322 dstHasYcbcr, srcConfig, srcSampleCnt,
2323 srcImage->isLinearTiled(), srcHasYcbcr)) {
Greg Daniel46cfbc62019-06-07 11:43:30 -04002324 this->copySurfaceAsBlit(dst, src, dstImage, srcImage, srcRect, dstPoint);
Greg Daniel164a9f02016-02-22 09:56:40 -05002325 return true;
2326 }
2327
Greg Daniel164a9f02016-02-22 09:56:40 -05002328 return false;
2329}
2330
Brian Salomona6948702018-06-01 15:33:20 -04002331bool GrVkGpu::onReadPixels(GrSurface* surface, int left, int top, int width, int height,
2332 GrColorType dstColorType, void* buffer, size_t rowBytes) {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -04002333 if (surface->isProtected()) {
2334 return false;
2335 }
2336
Brian Salomonc320b152018-02-20 14:05:36 -05002337 if (GrPixelConfigToColorType(surface->config()) != dstColorType) {
Greg Daniel164a9f02016-02-22 09:56:40 -05002338 return false;
2339 }
2340
egdaniel66933552016-08-24 07:22:19 -07002341 GrVkImage* image = nullptr;
2342 GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(surface->asRenderTarget());
2343 if (rt) {
Greg Danielbe7fc462019-01-03 16:40:42 -05002344 // Reading from render targets that wrap a secondary command buffer is not allowed since
2345 // it would require us to know the VkImage, which we don't have, as well as need us to
2346 // stop and start the VkRenderPass which we don't have access to.
2347 if (rt->wrapsSecondaryCommandBuffer()) {
2348 return false;
2349 }
egdaniel66933552016-08-24 07:22:19 -07002350 // resolve the render target if necessary
2351 switch (rt->getResolveType()) {
2352 case GrVkRenderTarget::kCantResolve_ResolveType:
2353 return false;
2354 case GrVkRenderTarget::kAutoResolves_ResolveType:
2355 break;
2356 case GrVkRenderTarget::kCanResolve_ResolveType:
Greg Daniel0a77f432018-12-06 11:23:32 -05002357 this->resolveRenderTargetNoFlush(rt);
egdaniel66933552016-08-24 07:22:19 -07002358 break;
2359 default:
Ben Wagnerb4aab9a2017-08-16 10:53:04 -04002360 SK_ABORT("Unknown resolve type");
egdaniel66933552016-08-24 07:22:19 -07002361 }
2362 image = rt;
2363 } else {
2364 image = static_cast<GrVkTexture*>(surface->asTexture());
2365 }
2366
2367 if (!image) {
Greg Daniel164a9f02016-02-22 09:56:40 -05002368 return false;
2369 }
2370
Greg Daniel475eb702018-09-28 14:16:50 -04002371 // Skia's RGB_888x color type, which we map to the vulkan R8G8B8_UNORM, expects the data to be
2372 // 32 bits, but the Vulkan format is only 24. So we first copy the surface into an R8G8B8A8
2373 // image and then do the read pixels from that.
2374 sk_sp<GrVkTextureRenderTarget> copySurface;
Greg Danielf259b8b2019-02-14 09:03:43 -05002375 if (dstColorType == GrColorType::kRGB_888x && image->imageFormat() == VK_FORMAT_R8G8B8_UNORM) {
2376 SkASSERT(surface->config() == kRGB_888_GrPixelConfig);
Greg Daniel475eb702018-09-28 14:16:50 -04002377
Greg Daniel46cfbc62019-06-07 11:43:30 -04002378 int srcSampleCount = 0;
2379 if (rt) {
Chris Dalton6ce447a2019-06-23 18:07:38 -06002380 srcSampleCount = rt->numSamples();
Greg Daniel46cfbc62019-06-07 11:43:30 -04002381 }
2382 bool srcHasYcbcr = image->ycbcrConversionInfo().isValid();
2383 if (!this->vkCaps().canCopyAsBlit(kRGBA_8888_GrPixelConfig, 1, false, false,
2384 surface->config(), srcSampleCount, image->isLinearTiled(),
2385 srcHasYcbcr)) {
2386 return false;
2387 }
2388
Greg Daniel475eb702018-09-28 14:16:50 -04002389 // Make a new surface that is RGBA to copy the RGB surface into.
2390 GrSurfaceDesc surfDesc;
2391 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
2392 surfDesc.fWidth = width;
2393 surfDesc.fHeight = height;
2394 surfDesc.fConfig = kRGBA_8888_GrPixelConfig;
2395 surfDesc.fSampleCnt = 1;
2396
2397 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
2398 VK_IMAGE_USAGE_SAMPLED_BIT |
2399 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
2400 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2401
2402 GrVkImage::ImageDesc imageDesc;
2403 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
2404 imageDesc.fFormat = VK_FORMAT_R8G8B8A8_UNORM;
2405 imageDesc.fWidth = width;
2406 imageDesc.fHeight = height;
2407 imageDesc.fLevels = 1;
2408 imageDesc.fSamples = 1;
2409 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
2410 imageDesc.fUsageFlags = usageFlags;
2411 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2412
2413 copySurface = GrVkTextureRenderTarget::MakeNewTextureRenderTarget(
2414 this, SkBudgeted::kYes, surfDesc, imageDesc, GrMipMapsStatus::kNotAllocated);
2415 if (!copySurface) {
2416 return false;
2417 }
2418
Greg Daniel475eb702018-09-28 14:16:50 -04002419 SkIRect srcRect = SkIRect::MakeXYWH(left, top, width, height);
Greg Daniel46cfbc62019-06-07 11:43:30 -04002420 SkAssertResult(this->copySurface(copySurface.get(), surface, srcRect, SkIPoint::Make(0,0)));
2421
Greg Daniel475eb702018-09-28 14:16:50 -04002422 top = 0;
2423 left = 0;
2424 dstColorType = GrColorType::kRGBA_8888;
2425 image = copySurface.get();
2426 }
2427
Greg Daniel164a9f02016-02-22 09:56:40 -05002428 // Change layout of our target so it can be used as copy
egdaniel66933552016-08-24 07:22:19 -07002429 image->setImageLayout(this,
2430 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2431 VK_ACCESS_TRANSFER_READ_BIT,
2432 VK_PIPELINE_STAGE_TRANSFER_BIT,
2433 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05002434
Brian Salomonc320b152018-02-20 14:05:36 -05002435 int bpp = GrColorTypeBytesPerPixel(dstColorType);
egdaniel6fa0a912016-09-12 11:51:29 -07002436 size_t tightRowBytes = bpp * width;
Greg Daniel164a9f02016-02-22 09:56:40 -05002437
Greg Daniel164a9f02016-02-22 09:56:40 -05002438 VkBufferImageCopy region;
2439 memset(&region, 0, sizeof(VkBufferImageCopy));
egdaniel6fa0a912016-09-12 11:51:29 -07002440
2441 bool copyFromOrigin = this->vkCaps().mustDoCopiesFromOrigin();
2442 if (copyFromOrigin) {
2443 region.imageOffset = { 0, 0, 0 };
Brian Salomona6948702018-06-01 15:33:20 -04002444 region.imageExtent = { (uint32_t)(left + width), (uint32_t)(top + height), 1 };
egdaniel6fa0a912016-09-12 11:51:29 -07002445 } else {
Brian Salomona6948702018-06-01 15:33:20 -04002446 VkOffset3D offset = { left, top, 0 };
egdaniel6fa0a912016-09-12 11:51:29 -07002447 region.imageOffset = offset;
2448 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
2449 }
2450
2451 size_t transBufferRowBytes = bpp * region.imageExtent.width;
Greg Daniel386a9b62018-07-03 10:52:30 -04002452 size_t imageRows = region.imageExtent.height;
Brian Salomon12d22642019-01-29 14:38:50 -05002453 auto transferBuffer = sk_sp<GrVkTransferBuffer>(
Greg Daniel3cdfa092018-02-26 16:14:10 -05002454 static_cast<GrVkTransferBuffer*>(this->createBuffer(transBufferRowBytes * imageRows,
Brian Salomonae64c192019-02-05 09:41:37 -05002455 GrGpuBufferType::kXferGpuToCpu,
Brian Salomon12d22642019-01-29 14:38:50 -05002456 kStream_GrAccessPattern)
2457 .release()));
egdaniel6fa0a912016-09-12 11:51:29 -07002458
2459 // Copy the image to a buffer so we can map it to cpu memory
jvanverthdb379092016-07-07 11:18:46 -07002460 region.bufferOffset = transferBuffer->offset();
egdaniel88e8aef2016-06-27 14:34:55 -07002461 region.bufferRowLength = 0; // Forces RowLength to be width. We handle the rowBytes below.
Greg Daniel164a9f02016-02-22 09:56:40 -05002462 region.bufferImageHeight = 0; // Forces height to be tightly packed. Only useful for 3d images.
2463 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
Greg Daniel164a9f02016-02-22 09:56:40 -05002464
2465 fCurrentCmdBuffer->copyImageToBuffer(this,
egdaniel66933552016-08-24 07:22:19 -07002466 image,
Greg Daniel164a9f02016-02-22 09:56:40 -05002467 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Brian Salomon12d22642019-01-29 14:38:50 -05002468 transferBuffer.get(),
Greg Daniel164a9f02016-02-22 09:56:40 -05002469 1,
2470 &region);
2471
2472 // make sure the copy to buffer has finished
2473 transferBuffer->addMemoryBarrier(this,
2474 VK_ACCESS_TRANSFER_WRITE_BIT,
2475 VK_ACCESS_HOST_READ_BIT,
2476 VK_PIPELINE_STAGE_TRANSFER_BIT,
2477 VK_PIPELINE_STAGE_HOST_BIT,
2478 false);
2479
2480 // We need to submit the current command buffer to the Queue and make sure it finishes before
2481 // we can copy the data out of the buffer.
2482 this->submitCommandBuffer(kForce_SyncQueue);
Greg Daniel88fdee92018-02-24 22:41:50 +00002483 void* mappedMemory = transferBuffer->map();
Greg Daniele35a99e2018-03-02 11:44:22 -05002484 const GrVkAlloc& transAlloc = transferBuffer->alloc();
Greg Daniel81df0412018-05-31 13:13:33 -04002485 GrVkMemory::InvalidateMappedAlloc(this, transAlloc, 0, transAlloc.fSize);
Greg Daniel164a9f02016-02-22 09:56:40 -05002486
egdaniel6fa0a912016-09-12 11:51:29 -07002487 if (copyFromOrigin) {
2488 uint32_t skipRows = region.imageExtent.height - height;
2489 mappedMemory = (char*)mappedMemory + transBufferRowBytes * skipRows + bpp * left;
2490 }
2491
Brian Salomona6948702018-06-01 15:33:20 -04002492 SkRectMemcpy(buffer, rowBytes, mappedMemory, transBufferRowBytes, tightRowBytes, height);
Greg Daniel164a9f02016-02-22 09:56:40 -05002493
2494 transferBuffer->unmap();
Greg Daniel164a9f02016-02-22 09:56:40 -05002495 return true;
2496}
egdaniel066df7c2016-06-08 14:02:27 -07002497
egdaniel27bb2842016-07-07 11:58:35 -07002498// The RenderArea bounds we pass into BeginRenderPass must have a start x value that is a multiple
2499// of the granularity. The width must also be a multiple of the granularity or eaqual to the width
2500// the the entire attachment. Similar requirements for the y and height components.
2501void adjust_bounds_to_granularity(SkIRect* dstBounds, const SkIRect& srcBounds,
2502 const VkExtent2D& granularity, int maxWidth, int maxHeight) {
2503 // Adjust Width
egdanield5797b32016-09-20 12:57:45 -07002504 if ((0 != granularity.width && 1 != granularity.width)) {
2505 // Start with the right side of rect so we know if we end up going pass the maxWidth.
2506 int rightAdj = srcBounds.fRight % granularity.width;
2507 if (rightAdj != 0) {
2508 rightAdj = granularity.width - rightAdj;
2509 }
2510 dstBounds->fRight = srcBounds.fRight + rightAdj;
2511 if (dstBounds->fRight > maxWidth) {
2512 dstBounds->fRight = maxWidth;
2513 dstBounds->fLeft = 0;
2514 } else {
2515 dstBounds->fLeft = srcBounds.fLeft - srcBounds.fLeft % granularity.width;
2516 }
egdaniel27bb2842016-07-07 11:58:35 -07002517 } else {
egdanield5797b32016-09-20 12:57:45 -07002518 dstBounds->fLeft = srcBounds.fLeft;
2519 dstBounds->fRight = srcBounds.fRight;
egdaniel27bb2842016-07-07 11:58:35 -07002520 }
2521
2522 // Adjust height
egdanield5797b32016-09-20 12:57:45 -07002523 if ((0 != granularity.height && 1 != granularity.height)) {
2524 // Start with the bottom side of rect so we know if we end up going pass the maxHeight.
2525 int bottomAdj = srcBounds.fBottom % granularity.height;
2526 if (bottomAdj != 0) {
2527 bottomAdj = granularity.height - bottomAdj;
2528 }
2529 dstBounds->fBottom = srcBounds.fBottom + bottomAdj;
2530 if (dstBounds->fBottom > maxHeight) {
2531 dstBounds->fBottom = maxHeight;
2532 dstBounds->fTop = 0;
2533 } else {
2534 dstBounds->fTop = srcBounds.fTop - srcBounds.fTop % granularity.height;
2535 }
egdaniel27bb2842016-07-07 11:58:35 -07002536 } else {
egdanield5797b32016-09-20 12:57:45 -07002537 dstBounds->fTop = srcBounds.fTop;
2538 dstBounds->fBottom = srcBounds.fBottom;
egdaniel27bb2842016-07-07 11:58:35 -07002539 }
2540}
2541
Greg Daniel22bc8652017-03-22 15:45:43 -04002542void GrVkGpu::submitSecondaryCommandBuffer(const SkTArray<GrVkSecondaryCommandBuffer*>& buffers,
egdaniel9cb63402016-06-23 08:37:05 -07002543 const GrVkRenderPass* renderPass,
2544 const VkClearValue* colorClear,
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002545 GrVkRenderTarget* target, GrSurfaceOrigin origin,
egdaniel9cb63402016-06-23 08:37:05 -07002546 const SkIRect& bounds) {
Greg Danielbe7fc462019-01-03 16:40:42 -05002547 SkASSERT (!target->wrapsSecondaryCommandBuffer());
egdaniele7d1b242016-07-01 08:06:45 -07002548 const SkIRect* pBounds = &bounds;
2549 SkIRect flippedBounds;
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002550 if (kBottomLeft_GrSurfaceOrigin == origin) {
egdaniele7d1b242016-07-01 08:06:45 -07002551 flippedBounds = bounds;
2552 flippedBounds.fTop = target->height() - bounds.fBottom;
2553 flippedBounds.fBottom = target->height() - bounds.fTop;
2554 pBounds = &flippedBounds;
2555 }
2556
egdaniel27bb2842016-07-07 11:58:35 -07002557 // The bounds we use for the render pass should be of the granularity supported
2558 // by the device.
2559 const VkExtent2D& granularity = renderPass->granularity();
2560 SkIRect adjustedBounds;
2561 if ((0 != granularity.width && 1 != granularity.width) ||
2562 (0 != granularity.height && 1 != granularity.height)) {
2563 adjust_bounds_to_granularity(&adjustedBounds, *pBounds, granularity,
2564 target->width(), target->height());
2565 pBounds = &adjustedBounds;
2566 }
2567
Robert Phillips95214472017-08-08 18:00:03 -04002568#ifdef SK_DEBUG
2569 uint32_t index;
2570 bool result = renderPass->colorAttachmentIndex(&index);
2571 SkASSERT(result && 0 == index);
2572 result = renderPass->stencilAttachmentIndex(&index);
2573 if (result) {
2574 SkASSERT(1 == index);
2575 }
2576#endif
2577 VkClearValue clears[2];
2578 clears[0].color = colorClear->color;
Robert Phillips8c326e92017-08-10 13:50:17 -04002579 clears[1].depthStencil.depth = 0.0f;
2580 clears[1].depthStencil.stencil = 0;
Robert Phillips95214472017-08-08 18:00:03 -04002581
2582 fCurrentCmdBuffer->beginRenderPass(this, renderPass, clears, *target, *pBounds, true);
Greg Daniel22bc8652017-03-22 15:45:43 -04002583 for (int i = 0; i < buffers.count(); ++i) {
2584 fCurrentCmdBuffer->executeCommands(this, buffers[i]);
2585 }
Greg Daniel164a9f02016-02-22 09:56:40 -05002586 fCurrentCmdBuffer->endRenderPass(this);
egdaniel66933552016-08-24 07:22:19 -07002587
Brian Salomon1fabd512018-02-09 09:54:25 -05002588 this->didWriteToSurface(target, origin, &bounds);
Greg Daniel164a9f02016-02-22 09:56:40 -05002589}
egdaniel9cb63402016-06-23 08:37:05 -07002590
Robert Phillips5b5d84c2018-08-09 15:12:18 -04002591void GrVkGpu::submit(GrGpuCommandBuffer* buffer) {
2592 if (buffer->asRTCommandBuffer()) {
2593 SkASSERT(fCachedRTCommandBuffer.get() == buffer);
2594
2595 fCachedRTCommandBuffer->submit();
2596 fCachedRTCommandBuffer->reset();
2597 } else {
2598 SkASSERT(fCachedTexCommandBuffer.get() == buffer);
2599
2600 fCachedTexCommandBuffer->submit();
2601 fCachedTexCommandBuffer->reset();
2602 }
2603}
2604
Greg Daniel6be35232017-03-01 17:01:09 -05002605GrFence SK_WARN_UNUSED_RESULT GrVkGpu::insertFence() {
jvanverth84741b32016-09-30 08:39:02 -07002606 VkFenceCreateInfo createInfo;
2607 memset(&createInfo, 0, sizeof(VkFenceCreateInfo));
2608 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
2609 createInfo.pNext = nullptr;
2610 createInfo.flags = 0;
2611 VkFence fence = VK_NULL_HANDLE;
Greg Daniel6be35232017-03-01 17:01:09 -05002612
2613 VK_CALL_ERRCHECK(CreateFence(this->device(), &createInfo, nullptr, &fence));
2614 VK_CALL(QueueSubmit(this->queue(), 0, nullptr, fence));
2615
2616 GR_STATIC_ASSERT(sizeof(GrFence) >= sizeof(VkFence));
jvanverth84741b32016-09-30 08:39:02 -07002617 return (GrFence)fence;
2618}
2619
Greg Daniel6be35232017-03-01 17:01:09 -05002620bool GrVkGpu::waitFence(GrFence fence, uint64_t timeout) {
2621 SkASSERT(VK_NULL_HANDLE != (VkFence)fence);
2622
2623 VkResult result = VK_CALL(WaitForFences(this->device(), 1, (VkFence*)&fence, VK_TRUE, timeout));
jvanverth84741b32016-09-30 08:39:02 -07002624 return (VK_SUCCESS == result);
2625}
2626
2627void GrVkGpu::deleteFence(GrFence fence) const {
Greg Daniel6be35232017-03-01 17:01:09 -05002628 VK_CALL(DestroyFence(this->device(), (VkFence)fence, nullptr));
2629}
2630
Greg Daniela5cb7812017-06-16 09:45:32 -04002631sk_sp<GrSemaphore> SK_WARN_UNUSED_RESULT GrVkGpu::makeSemaphore(bool isOwned) {
2632 return GrVkSemaphore::Make(this, isOwned);
Greg Daniel6be35232017-03-01 17:01:09 -05002633}
2634
Greg Daniel48661b82018-01-22 16:11:35 -05002635sk_sp<GrSemaphore> GrVkGpu::wrapBackendSemaphore(const GrBackendSemaphore& semaphore,
2636 GrResourceProvider::SemaphoreWrapType wrapType,
2637 GrWrapOwnership ownership) {
2638 return GrVkSemaphore::MakeWrapped(this, semaphore.vkSemaphore(), wrapType, ownership);
Greg Daniela5cb7812017-06-16 09:45:32 -04002639}
2640
Greg Daniel858e12c2018-12-06 11:11:37 -05002641void GrVkGpu::insertSemaphore(sk_sp<GrSemaphore> semaphore) {
Greg Daniel6be35232017-03-01 17:01:09 -05002642 GrVkSemaphore* vkSem = static_cast<GrVkSemaphore*>(semaphore.get());
2643
Greg Daniel48661b82018-01-22 16:11:35 -05002644 GrVkSemaphore::Resource* resource = vkSem->getResource();
2645 if (resource->shouldSignal()) {
Greg Daniel17b7c052018-01-09 13:55:33 -05002646 resource->ref();
2647 fSemaphoresToSignal.push_back(resource);
2648 }
Greg Daniel6be35232017-03-01 17:01:09 -05002649}
2650
Greg Daniel48661b82018-01-22 16:11:35 -05002651void GrVkGpu::waitSemaphore(sk_sp<GrSemaphore> semaphore) {
Greg Daniel6be35232017-03-01 17:01:09 -05002652 GrVkSemaphore* vkSem = static_cast<GrVkSemaphore*>(semaphore.get());
2653
Greg Daniel48661b82018-01-22 16:11:35 -05002654 GrVkSemaphore::Resource* resource = vkSem->getResource();
2655 if (resource->shouldWait()) {
2656 resource->ref();
2657 fSemaphoresToWaitOn.push_back(resource);
2658 }
jvanverth84741b32016-09-30 08:39:02 -07002659}
Brian Osman13dddce2017-05-09 13:19:50 -04002660
2661sk_sp<GrSemaphore> GrVkGpu::prepareTextureForCrossContextUsage(GrTexture* texture) {
2662 SkASSERT(texture);
2663 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
2664 vkTexture->setImageLayout(this,
2665 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2666 VK_ACCESS_SHADER_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -04002667 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
Brian Osman13dddce2017-05-09 13:19:50 -04002668 false);
2669 this->submitCommandBuffer(kSkip_SyncQueue);
2670
Greg Danielb3f66542019-05-10 17:11:19 -04002671 // The image layout change serves as a barrier, so no semaphore is needed.
2672 // If we ever decide we need to return a semaphore here, we need to make sure GrVkSemaphore is
2673 // thread safe so that only the first thread that tries to use the semaphore actually submits
2674 // it. This additionally would also require thread safety in command buffer submissions to
2675 // queues in general.
Brian Osman13dddce2017-05-09 13:19:50 -04002676 return nullptr;
2677}
Greg Danielf5d87582017-12-18 14:48:15 -05002678
Greg Daniel64cc9aa2018-10-19 13:54:56 -04002679void GrVkGpu::addDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
2680 fDrawables.emplace_back(std::move(drawable));
2681}
2682
Greg Daniel7a82edf2018-12-04 10:54:34 -05002683uint32_t GrVkGpu::getExtraSamplerKeyForProgram(const GrSamplerState& samplerState,
2684 const GrBackendFormat& format) {
2685 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
2686 SkASSERT(ycbcrInfo);
2687 if (!ycbcrInfo->isValid()) {
2688 return 0;
2689 }
2690
2691 const GrVkSampler* sampler = this->resourceProvider().findOrCreateCompatibleSampler(
2692 samplerState, *ycbcrInfo);
2693
2694 return sampler->uniqueID();
2695}
2696
Greg Daniela870b462019-01-08 15:49:46 -05002697void GrVkGpu::storeVkPipelineCacheData() {
Robert Phillips9da87e02019-02-04 13:26:26 -05002698 if (this->getContext()->priv().getPersistentCache()) {
Greg Daniela870b462019-01-08 15:49:46 -05002699 this->resourceProvider().storePipelineCacheData();
2700 }
2701}