blob: f2bdb74f2697cf9f7f5cf390dbf59c64cf12463a [file] [log] [blame]
Chia-I Wu525c6602014-08-27 10:22:34 +08001/*
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -06002 * Vulkan
Chia-I Wu525c6602014-08-27 10:22:34 +08003 *
4 * Copyright (C) 2014 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
Chia-I Wu44e42362014-09-02 08:32:09 +080023 *
24 * Authors:
25 * Chia-I Wu <olv@lunarg.com>
Chia-I Wu525c6602014-08-27 10:22:34 +080026 */
27
28#include "genhw/genhw.h"
29#include "img.h"
Chia-I Wu714df452015-01-01 07:55:04 +080030#include "buf.h"
Chia-I Wu525c6602014-08-27 10:22:34 +080031#include "cmd_priv.h"
32
33enum {
Chia-I Wuc45db532015-02-19 11:20:38 -070034 READ_OP = 1 << 0,
35 WRITE_OP = 1 << 1,
36 HIZ_OP = 1 << 2,
37};
38
39enum {
Chia-I Wu525c6602014-08-27 10:22:34 +080040 MEM_CACHE = 1 << 0,
41 DATA_READ_CACHE = 1 << 1,
42 DATA_WRITE_CACHE = 1 << 2,
43 RENDER_CACHE = 1 << 3,
44 SAMPLER_CACHE = 1 << 4,
45};
46
Chia-I Wuc45db532015-02-19 11:20:38 -070047static uint32_t img_get_layout_ops(const struct intel_img *img,
Courtney Goeltzenleuchter382489d2015-04-10 08:34:15 -060048 VkImageLayout layout)
Chia-I Wuc45db532015-02-19 11:20:38 -070049{
50 uint32_t ops;
51
Chia-I Wu5b66aa52015-04-16 22:02:10 +080052 switch ((int) layout) {
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060053 case VK_IMAGE_LAYOUT_GENERAL:
Ian Elliott338dedb2015-08-21 15:09:33 -060054 case VK_IMAGE_LAYOUT_PRESENT_SOURCE_KHR:
Chia-I Wuc45db532015-02-19 11:20:38 -070055 ops = READ_OP | WRITE_OP;
56 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060057 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
Chia-I Wuc45db532015-02-19 11:20:38 -070058 ops = READ_OP | WRITE_OP;
59 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060060 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
Chia-I Wuc45db532015-02-19 11:20:38 -070061 ops = READ_OP | WRITE_OP | HIZ_OP;
62 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060063 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
Chia-I Wuc45db532015-02-19 11:20:38 -070064 ops = READ_OP | HIZ_OP;
65 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060066 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
Chia-I Wuc45db532015-02-19 11:20:38 -070067 ops = READ_OP;
68 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060069 case VK_IMAGE_LAYOUT_TRANSFER_SOURCE_OPTIMAL:
Chia-I Wuc45db532015-02-19 11:20:38 -070070 ops = READ_OP;
71 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060072 case VK_IMAGE_LAYOUT_TRANSFER_DESTINATION_OPTIMAL:
Chia-I Wuc45db532015-02-19 11:20:38 -070073 ops = WRITE_OP;
74 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060075 case VK_IMAGE_LAYOUT_UNDEFINED:
Chia-I Wuc45db532015-02-19 11:20:38 -070076 default:
77 ops = 0;
78 break;
79 }
80
81 return ops;
82}
83
Mike Stroyan55658c22014-12-04 11:08:39 +000084static uint32_t img_get_layout_caches(const struct intel_img *img,
Courtney Goeltzenleuchter382489d2015-04-10 08:34:15 -060085 VkImageLayout layout)
Chia-I Wu525c6602014-08-27 10:22:34 +080086{
87 uint32_t caches;
88
Chia-I Wu5b66aa52015-04-16 22:02:10 +080089 switch ((int) layout) {
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060090 case VK_IMAGE_LAYOUT_GENERAL:
Ian Elliott338dedb2015-08-21 15:09:33 -060091 case VK_IMAGE_LAYOUT_PRESENT_SOURCE_KHR:
Mike Stroyan55658c22014-12-04 11:08:39 +000092 // General layout when image can be used for any kind of access
93 caches = MEM_CACHE | DATA_READ_CACHE | DATA_WRITE_CACHE | RENDER_CACHE | SAMPLER_CACHE;
Chia-I Wub5c1cdf2014-11-22 03:17:45 +080094 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060095 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
Mike Stroyan55658c22014-12-04 11:08:39 +000096 // Optimal layout when image is only used for color attachment read/write
97 caches = DATA_WRITE_CACHE | RENDER_CACHE;
Chia-I Wu525c6602014-08-27 10:22:34 +080098 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -060099 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
Mike Stroyan55658c22014-12-04 11:08:39 +0000100 // Optimal layout when image is only used for depth/stencil attachment read/write
101 caches = DATA_WRITE_CACHE | RENDER_CACHE;
Chia-I Wu525c6602014-08-27 10:22:34 +0800102 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600103 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
Mike Stroyan55658c22014-12-04 11:08:39 +0000104 // Optimal layout when image is used for read only depth/stencil attachment and shader access
Chia-I Wu525c6602014-08-27 10:22:34 +0800105 caches = RENDER_CACHE;
106 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600107 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
Mike Stroyan55658c22014-12-04 11:08:39 +0000108 // Optimal layout when image is used for read only shader access
109 caches = DATA_READ_CACHE | SAMPLER_CACHE;
110 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600111 case VK_IMAGE_LAYOUT_TRANSFER_SOURCE_OPTIMAL:
Mike Stroyan55658c22014-12-04 11:08:39 +0000112 // Optimal layout when image is used only as source of transfer operations
113 caches = MEM_CACHE | DATA_READ_CACHE | RENDER_CACHE | SAMPLER_CACHE;
114 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600115 case VK_IMAGE_LAYOUT_TRANSFER_DESTINATION_OPTIMAL:
Mike Stroyan55658c22014-12-04 11:08:39 +0000116 // Optimal layout when image is used only as destination of transfer operations
117 caches = MEM_CACHE | DATA_WRITE_CACHE | RENDER_CACHE;
Chia-I Wu525c6602014-08-27 10:22:34 +0800118 break;
119 default:
120 caches = 0;
121 break;
122 }
123
124 return caches;
125}
126
Chia-I Wuc45db532015-02-19 11:20:38 -0700127static void cmd_resolve_depth(struct intel_cmd *cmd,
128 struct intel_img *img,
Courtney Goeltzenleuchter382489d2015-04-10 08:34:15 -0600129 VkImageLayout old_layout,
130 VkImageLayout new_layout,
131 const VkImageSubresourceRange *range)
Chia-I Wuc45db532015-02-19 11:20:38 -0700132{
133 const uint32_t old_ops = img_get_layout_ops(img, old_layout);
134 const uint32_t new_ops = img_get_layout_ops(img, new_layout);
135
136 if (old_ops & WRITE_OP) {
137 if ((old_ops & HIZ_OP) && !(new_ops & HIZ_OP))
138 cmd_meta_ds_op(cmd, INTEL_CMD_META_DS_RESOLVE, img, range);
139 else if (!(old_ops & HIZ_OP) && (new_ops & HIZ_OP))
140 cmd_meta_ds_op(cmd, INTEL_CMD_META_DS_HIZ_RESOLVE, img, range);
141 }
142}
143
Chia-I Wub5c1cdf2014-11-22 03:17:45 +0800144static uint32_t cmd_get_flush_flags(const struct intel_cmd *cmd,
145 uint32_t old_caches,
146 uint32_t new_caches,
147 bool is_ds)
Chia-I Wu525c6602014-08-27 10:22:34 +0800148{
149 uint32_t flags = 0;
150
151 /* not dirty */
152 if (!(old_caches & (MEM_CACHE | RENDER_CACHE | DATA_WRITE_CACHE)))
153 return 0;
154
155 if ((old_caches & RENDER_CACHE) && (new_caches & ~RENDER_CACHE)) {
Chia-I Wub5c1cdf2014-11-22 03:17:45 +0800156 if (is_ds)
Chia-I Wu525c6602014-08-27 10:22:34 +0800157 flags |= GEN6_PIPE_CONTROL_DEPTH_CACHE_FLUSH;
158 else
159 flags |= GEN6_PIPE_CONTROL_RENDER_CACHE_FLUSH;
160 }
161
162 if ((old_caches & DATA_WRITE_CACHE) &&
163 (new_caches & ~(DATA_READ_CACHE | DATA_WRITE_CACHE))) {
164 if (cmd_gen(cmd) >= INTEL_GEN(7))
Chia-I Wu97aa4de2015-03-05 15:43:16 -0700165 flags |= GEN7_PIPE_CONTROL_DC_FLUSH;
Chia-I Wu525c6602014-08-27 10:22:34 +0800166 }
167
168 if (new_caches & SAMPLER_CACHE)
169 flags |= GEN6_PIPE_CONTROL_TEXTURE_CACHE_INVALIDATE;
170
171 if ((new_caches & DATA_READ_CACHE) && old_caches != DATA_WRITE_CACHE)
172 flags |= GEN6_PIPE_CONTROL_CONSTANT_CACHE_INVALIDATE;
173
174 if (!flags)
175 return 0;
176
177 flags |= GEN6_PIPE_CONTROL_CS_STALL;
178
179 return flags;
180}
181
Mike Stroyan55658c22014-12-04 11:08:39 +0000182static void cmd_memory_barriers(struct intel_cmd *cmd,
Courtney Goeltzenleuchteraeffeae2015-09-10 17:58:54 -0600183 uint32_t flush_flags,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -0600184 uint32_t memory_barrier_count,
Courtney Goeltzenleuchter73a21d32015-07-12 13:20:05 -0600185 const void* const* memory_barriers)
Chia-I Wu525c6602014-08-27 10:22:34 +0800186{
Mike Stroyan55658c22014-12-04 11:08:39 +0000187 uint32_t i;
Courtney Goeltzenleuchter382489d2015-04-10 08:34:15 -0600188 VkFlags input_mask = 0;
189 VkFlags output_mask = 0;
Chia-I Wu525c6602014-08-27 10:22:34 +0800190
Mike Stroyan55658c22014-12-04 11:08:39 +0000191 for (i = 0; i < memory_barrier_count; i++) {
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600192
193 const union {
Courtney Goeltzenleuchter382489d2015-04-10 08:34:15 -0600194 VkStructureType type;
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600195
Courtney Goeltzenleuchter382489d2015-04-10 08:34:15 -0600196 VkMemoryBarrier mem;
197 VkBufferMemoryBarrier buf;
198 VkImageMemoryBarrier img;
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600199 } *u = memory_barriers[i];
200
201 switch(u->type)
Mike Stroyan55658c22014-12-04 11:08:39 +0000202 {
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600203 case VK_STRUCTURE_TYPE_MEMORY_BARRIER:
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600204 output_mask |= u->mem.outputMask;
205 input_mask |= u->mem.inputMask;
Mike Stroyan55658c22014-12-04 11:08:39 +0000206 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600207 case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER:
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600208 output_mask |= u->buf.outputMask;
209 input_mask |= u->buf.inputMask;
Mike Stroyan55658c22014-12-04 11:08:39 +0000210 break;
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600211 case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER:
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600212 output_mask |= u->img.outputMask;
213 input_mask |= u->img.inputMask;
Mike Stroyan55658c22014-12-04 11:08:39 +0000214 {
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600215 struct intel_img *img = intel_img(u->img.image);
Chia-I Wuc45db532015-02-19 11:20:38 -0700216
217 cmd_resolve_depth(cmd, img, u->img.oldLayout,
218 u->img.newLayout, &u->img.subresourceRange);
219
Mike Stroyan55658c22014-12-04 11:08:39 +0000220 flush_flags |= cmd_get_flush_flags(cmd,
Mark Lobodzinskid3eabd72015-01-29 14:24:14 -0600221 img_get_layout_caches(img, u->img.oldLayout),
222 img_get_layout_caches(img, u->img.newLayout),
Jeremy Hayes2b7e88a2015-01-23 08:51:43 -0700223 icd_format_is_ds(img->layout.format));
Mike Stroyan55658c22014-12-04 11:08:39 +0000224 }
225 break;
226 default:
227 break;
228 }
Chia-I Wu525c6602014-08-27 10:22:34 +0800229 }
230
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600231 if (output_mask & VK_MEMORY_OUTPUT_SHADER_WRITE_BIT) {
Chia-I Wu97aa4de2015-03-05 15:43:16 -0700232 flush_flags |= GEN7_PIPE_CONTROL_DC_FLUSH;
Mike Stroyan55658c22014-12-04 11:08:39 +0000233 }
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600234 if (output_mask & VK_MEMORY_OUTPUT_COLOR_ATTACHMENT_BIT) {
Mike Stroyan55658c22014-12-04 11:08:39 +0000235 flush_flags |= GEN6_PIPE_CONTROL_RENDER_CACHE_FLUSH;
236 }
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600237 if (output_mask & VK_MEMORY_OUTPUT_DEPTH_STENCIL_ATTACHMENT_BIT) {
Mike Stroyan55658c22014-12-04 11:08:39 +0000238 flush_flags |= GEN6_PIPE_CONTROL_DEPTH_CACHE_FLUSH;
239 }
240
Courtney Goeltzenleuchtera569a502015-04-29 17:16:21 -0600241 /* CPU write is cache coherent, so VK_MEMORY_OUTPUT_HOST_WRITE_BIT needs no flush. */
Courtney Goeltzenleuchterad870812015-04-15 15:29:59 -0600242 /* Meta handles flushes, so VK_MEMORY_OUTPUT_TRANSFER_BIT needs no flush. */
Mike Stroyan55658c22014-12-04 11:08:39 +0000243
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600244 if (input_mask & (VK_MEMORY_INPUT_SHADER_READ_BIT | VK_MEMORY_INPUT_UNIFORM_READ_BIT)) {
Mike Stroyan55658c22014-12-04 11:08:39 +0000245 flush_flags |= GEN6_PIPE_CONTROL_TEXTURE_CACHE_INVALIDATE;
246 }
247
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600248 if (input_mask & VK_MEMORY_INPUT_UNIFORM_READ_BIT) {
Mike Stroyan55658c22014-12-04 11:08:39 +0000249 flush_flags |= GEN6_PIPE_CONTROL_CONSTANT_CACHE_INVALIDATE;
250 }
251
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600252 if (input_mask & VK_MEMORY_INPUT_VERTEX_ATTRIBUTE_FETCH_BIT) {
Mike Stroyan55658c22014-12-04 11:08:39 +0000253 flush_flags |= GEN6_PIPE_CONTROL_VF_CACHE_INVALIDATE;
254 }
255
256 /* These bits have no corresponding cache invalidate operation.
Courtney Goeltzenleuchtera569a502015-04-29 17:16:21 -0600257 * VK_MEMORY_INPUT_HOST_READ_BIT
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600258 * VK_MEMORY_INPUT_INDIRECT_COMMAND_BIT
259 * VK_MEMORY_INPUT_INDEX_FETCH_BIT
260 * VK_MEMORY_INPUT_COLOR_ATTACHMENT_BIT
261 * VK_MEMORY_INPUT_DEPTH_STENCIL_ATTACHMENT_BIT
Courtney Goeltzenleuchterad870812015-04-15 15:29:59 -0600262 * VK_MEMORY_INPUT_TRANSFER_BIT
Mike Stroyan55658c22014-12-04 11:08:39 +0000263 */
264
Chia-I Wu525c6602014-08-27 10:22:34 +0800265 cmd_batch_flush(cmd, flush_flags);
266}
267
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600268ICD_EXPORT void VKAPI vkCmdWaitEvents(
Tony Barbour8205d902015-04-16 15:59:00 -0600269 VkCmdBuffer cmdBuffer,
Tony Barbour8205d902015-04-16 15:59:00 -0600270 uint32_t eventCount,
271 const VkEvent* pEvents,
Tony Barbourc2e987e2015-06-29 16:20:35 -0600272 VkPipelineStageFlags sourceStageMask,
273 VkPipelineStageFlags destStageMask,
Tony Barbour8205d902015-04-16 15:59:00 -0600274 uint32_t memBarrierCount,
Courtney Goeltzenleuchterd9ba3422015-07-12 12:58:58 -0600275 const void* const* ppMemBarriers)
Chia-I Wu525c6602014-08-27 10:22:34 +0800276{
277 struct intel_cmd *cmd = intel_cmd(cmdBuffer);
Chia-I Wu525c6602014-08-27 10:22:34 +0800278
Tony Barbourc2e987e2015-06-29 16:20:35 -0600279 /* This hardware will always wait at VK_PIPELINE_STAGE_TOP_OF_PIPE.
280 * Passing a stageMask specifying other stages
Mike Stroyan55658c22014-12-04 11:08:39 +0000281 * does not change that.
282 */
Chia-I Wu525c6602014-08-27 10:22:34 +0800283
Mike Stroyan55658c22014-12-04 11:08:39 +0000284 /* Because the command buffer is serialized, reaching
285 * a pipelined wait is always after completion of prior events.
286 * pWaitInfo->pEvents need not be examined.
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600287 * vkCmdWaitEvents is equivalent to memory barrier part of vkCmdPipelineBarrier.
Mike Stroyan55658c22014-12-04 11:08:39 +0000288 * cmd_memory_barriers will wait for GEN6_PIPE_CONTROL_CS_STALL and perform
289 * appropriate cache control.
290 */
291 cmd_memory_barriers(cmd,
292 GEN6_PIPE_CONTROL_CS_STALL,
Tony Barbour8205d902015-04-16 15:59:00 -0600293 memBarrierCount, ppMemBarriers);
Mike Stroyan55658c22014-12-04 11:08:39 +0000294}
295
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600296ICD_EXPORT void VKAPI vkCmdPipelineBarrier(
Tony Barbour8205d902015-04-16 15:59:00 -0600297 VkCmdBuffer cmdBuffer,
Courtney Goeltzenleuchter82b348f2015-07-12 13:07:46 -0600298 VkPipelineStageFlags srcStageMask,
Tony Barbourc2e987e2015-06-29 16:20:35 -0600299 VkPipelineStageFlags destStageMask,
Courtney Goeltzenleuchter1f41f542015-07-09 11:44:38 -0600300 VkBool32 byRegion,
Tony Barbour8205d902015-04-16 15:59:00 -0600301 uint32_t memBarrierCount,
Courtney Goeltzenleuchter82b348f2015-07-12 13:07:46 -0600302 const void* const* ppMemBarriers)
Mike Stroyan55658c22014-12-04 11:08:39 +0000303{
304 struct intel_cmd *cmd = intel_cmd(cmdBuffer);
305 uint32_t pipe_control_flags = 0;
Mike Stroyan55658c22014-12-04 11:08:39 +0000306
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600307 /* This hardware will always wait at VK_WAIT_EVENT_TOP_OF_PIPE.
Tony Barbourc2e987e2015-06-29 16:20:35 -0600308 * Passing a stageMask specifying other stages
Mike Stroyan55658c22014-12-04 11:08:39 +0000309 * does not change that.
310 */
311
312 /* Cache control is done with PIPE_CONTROL flags.
Courtney Goeltzenleuchter9cc421e2015-04-08 15:36:08 -0600313 * With no GEN6_PIPE_CONTROL_CS_STALL flag set, it behaves as VK_PIPE_EVENT_TOP_OF_PIPE.
Tony Barbour8205d902015-04-16 15:59:00 -0600314 * All other pEvents values will behave as VK_PIPE_EVENT_COMMANDS_COMPLETE.
Mike Stroyan55658c22014-12-04 11:08:39 +0000315 */
Tony Barbourc2e987e2015-06-29 16:20:35 -0600316
Courtney Goeltzenleuchter73a21d32015-07-12 13:20:05 -0600317 if ((srcStageMask & VK_PIPELINE_STAGE_ALL_GRAPHICS) ||
Tony Barbourc2e987e2015-06-29 16:20:35 -0600318 (destStageMask & VK_PIPELINE_STAGE_ALL_GRAPHICS)){
319 pipe_control_flags = GEN6_PIPE_CONTROL_CS_STALL;
Chia-I Wu525c6602014-08-27 10:22:34 +0800320 }
321
Mike Stroyan55658c22014-12-04 11:08:39 +0000322 /* cmd_memory_barriers can wait for GEN6_PIPE_CONTROL_CS_STALL and perform
323 * appropriate cache control.
324 */
325 cmd_memory_barriers(cmd,
326 pipe_control_flags,
Tony Barbour8205d902015-04-16 15:59:00 -0600327 memBarrierCount, ppMemBarriers);
Chia-I Wu525c6602014-08-27 10:22:34 +0800328}