Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2012 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | */ |
| 23 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 24 | #include "main/context.h" |
| 25 | #include "main/teximage.h" |
| 26 | #include "main/blend.h" |
| 27 | #include "main/fbobject.h" |
| 28 | #include "main/renderbuffer.h" |
| 29 | #include "main/glformats.h" |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 30 | |
| 31 | #include "brw_blorp.h" |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 32 | #include "brw_context.h" |
Jason Ekstrand | 6d2f8f8 | 2016-08-17 09:31:27 -0700 | [diff] [blame] | 33 | #include "brw_defines.h" |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 34 | #include "brw_meta_util.h" |
Kenneth Graunke | 6f7c41d | 2013-09-30 18:11:03 -0700 | [diff] [blame] | 35 | #include "brw_state.h" |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 36 | #include "intel_fbo.h" |
| 37 | #include "intel_debug.h" |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 38 | |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 39 | #define FILE_DEBUG_FLAG DEBUG_BLORP |
| 40 | |
Jason Ekstrand | 600446c | 2016-08-26 10:07:40 -0700 | [diff] [blame] | 41 | static bool |
| 42 | brw_blorp_lookup_shader(struct blorp_context *blorp, |
| 43 | const void *key, uint32_t key_size, |
| 44 | uint32_t *kernel_out, void *prog_data_out) |
| 45 | { |
| 46 | struct brw_context *brw = blorp->driver_ctx; |
| 47 | return brw_search_cache(&brw->cache, BRW_CACHE_BLORP_PROG, |
| 48 | key, key_size, kernel_out, prog_data_out); |
| 49 | } |
| 50 | |
| 51 | static void |
| 52 | brw_blorp_upload_shader(struct blorp_context *blorp, |
| 53 | const void *key, uint32_t key_size, |
| 54 | const void *kernel, uint32_t kernel_size, |
Jason Ekstrand | 4306c10 | 2016-11-01 14:16:34 -0700 | [diff] [blame] | 55 | const struct brw_stage_prog_data *prog_data, |
| 56 | uint32_t prog_data_size, |
Jason Ekstrand | 600446c | 2016-08-26 10:07:40 -0700 | [diff] [blame] | 57 | uint32_t *kernel_out, void *prog_data_out) |
| 58 | { |
| 59 | struct brw_context *brw = blorp->driver_ctx; |
| 60 | brw_upload_cache(&brw->cache, BRW_CACHE_BLORP_PROG, key, key_size, |
| 61 | kernel, kernel_size, prog_data, prog_data_size, |
| 62 | kernel_out, prog_data_out); |
| 63 | } |
| 64 | |
Jason Ekstrand | a14d1b6 | 2016-08-15 15:07:22 -0700 | [diff] [blame] | 65 | void |
| 66 | brw_blorp_init(struct brw_context *brw) |
| 67 | { |
| 68 | blorp_init(&brw->blorp, brw, &brw->isl_dev); |
Jason Ekstrand | 600446c | 2016-08-26 10:07:40 -0700 | [diff] [blame] | 69 | |
Kenneth Graunke | 9694b23 | 2015-11-30 15:47:13 -0800 | [diff] [blame] | 70 | brw->blorp.compiler = brw->screen->compiler; |
Jason Ekstrand | 99b9e9b | 2016-08-19 00:49:18 -0700 | [diff] [blame] | 71 | |
Jason Ekstrand | 6d2f8f8 | 2016-08-17 09:31:27 -0700 | [diff] [blame] | 72 | switch (brw->gen) { |
| 73 | case 6: |
| 74 | brw->blorp.mocs.tex = 0; |
| 75 | brw->blorp.mocs.rb = 0; |
| 76 | brw->blorp.mocs.vb = 0; |
Jason Ekstrand | bc159ff | 2016-08-18 10:02:03 -0700 | [diff] [blame] | 77 | brw->blorp.exec = gen6_blorp_exec; |
Jason Ekstrand | 6d2f8f8 | 2016-08-17 09:31:27 -0700 | [diff] [blame] | 78 | break; |
| 79 | case 7: |
| 80 | brw->blorp.mocs.tex = GEN7_MOCS_L3; |
| 81 | brw->blorp.mocs.rb = GEN7_MOCS_L3; |
| 82 | brw->blorp.mocs.vb = GEN7_MOCS_L3; |
Jason Ekstrand | bc159ff | 2016-08-18 10:02:03 -0700 | [diff] [blame] | 83 | if (brw->is_haswell) { |
| 84 | brw->blorp.exec = gen75_blorp_exec; |
| 85 | } else { |
| 86 | brw->blorp.exec = gen7_blorp_exec; |
| 87 | } |
Jason Ekstrand | 6d2f8f8 | 2016-08-17 09:31:27 -0700 | [diff] [blame] | 88 | break; |
| 89 | case 8: |
| 90 | brw->blorp.mocs.tex = BDW_MOCS_WB; |
| 91 | brw->blorp.mocs.rb = BDW_MOCS_PTE; |
| 92 | brw->blorp.mocs.vb = BDW_MOCS_WB; |
Jason Ekstrand | bc159ff | 2016-08-18 10:02:03 -0700 | [diff] [blame] | 93 | brw->blorp.exec = gen8_blorp_exec; |
Jason Ekstrand | 6d2f8f8 | 2016-08-17 09:31:27 -0700 | [diff] [blame] | 94 | break; |
| 95 | case 9: |
| 96 | brw->blorp.mocs.tex = SKL_MOCS_WB; |
| 97 | brw->blorp.mocs.rb = SKL_MOCS_PTE; |
| 98 | brw->blorp.mocs.vb = SKL_MOCS_WB; |
Jason Ekstrand | bc159ff | 2016-08-18 10:02:03 -0700 | [diff] [blame] | 99 | brw->blorp.exec = gen9_blorp_exec; |
Jason Ekstrand | 6d2f8f8 | 2016-08-17 09:31:27 -0700 | [diff] [blame] | 100 | break; |
| 101 | default: |
| 102 | unreachable("Invalid gen"); |
| 103 | } |
| 104 | |
Jason Ekstrand | 600446c | 2016-08-26 10:07:40 -0700 | [diff] [blame] | 105 | brw->blorp.lookup_shader = brw_blorp_lookup_shader; |
| 106 | brw->blorp.upload_shader = brw_blorp_upload_shader; |
Jason Ekstrand | a14d1b6 | 2016-08-15 15:07:22 -0700 | [diff] [blame] | 107 | } |
| 108 | |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 109 | static void |
| 110 | apply_gen6_stencil_hiz_offset(struct isl_surf *surf, |
| 111 | struct intel_mipmap_tree *mt, |
| 112 | uint32_t lod, |
| 113 | uint32_t *offset) |
| 114 | { |
| 115 | assert(mt->array_layout == ALL_SLICES_AT_EACH_LOD); |
| 116 | |
Jason Ekstrand | d8644f3 | 2016-07-19 19:59:16 -0700 | [diff] [blame] | 117 | if (mt->format == MESA_FORMAT_S_UINT8) { |
| 118 | /* Note: we can't compute the stencil offset using |
| 119 | * intel_miptree_get_aligned_offset(), because the miptree |
| 120 | * claims that the region is untiled even though it's W tiled. |
| 121 | */ |
| 122 | *offset = mt->level[lod].level_y * mt->pitch + |
| 123 | mt->level[lod].level_x * 64; |
| 124 | } else { |
| 125 | *offset = intel_miptree_get_aligned_offset(mt, |
| 126 | mt->level[lod].level_x, |
Jason Ekstrand | c30b716 | 2016-10-24 10:38:07 -0700 | [diff] [blame] | 127 | mt->level[lod].level_y); |
Jason Ekstrand | d8644f3 | 2016-07-19 19:59:16 -0700 | [diff] [blame] | 128 | } |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 129 | |
| 130 | surf->logical_level0_px.width = minify(surf->logical_level0_px.width, lod); |
| 131 | surf->logical_level0_px.height = minify(surf->logical_level0_px.height, lod); |
| 132 | surf->phys_level0_sa.width = minify(surf->phys_level0_sa.width, lod); |
| 133 | surf->phys_level0_sa.height = minify(surf->phys_level0_sa.height, lod); |
| 134 | surf->levels = 1; |
| 135 | surf->array_pitch_el_rows = |
| 136 | ALIGN(surf->phys_level0_sa.height, surf->image_alignment_el.height); |
| 137 | } |
| 138 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 139 | static void |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 140 | blorp_surf_for_miptree(struct brw_context *brw, |
| 141 | struct blorp_surf *surf, |
| 142 | struct intel_mipmap_tree *mt, |
| 143 | bool is_render_target, |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 144 | uint32_t safe_aux_usage, |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 145 | unsigned *level, |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 146 | unsigned start_layer, unsigned num_layers, |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 147 | struct isl_surf tmp_surfs[2]) |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 148 | { |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 149 | if (mt->msaa_layout == INTEL_MSAA_LAYOUT_UMS || |
| 150 | mt->msaa_layout == INTEL_MSAA_LAYOUT_CMS) { |
| 151 | const unsigned num_samples = MAX2(1, mt->num_samples); |
| 152 | for (unsigned i = 0; i < num_layers; i++) { |
| 153 | for (unsigned s = 0; s < num_samples; s++) { |
| 154 | const unsigned phys_layer = (start_layer + i) * num_samples + s; |
| 155 | intel_miptree_check_level_layer(mt, *level, phys_layer); |
| 156 | } |
| 157 | } |
| 158 | } else { |
| 159 | for (unsigned i = 0; i < num_layers; i++) |
| 160 | intel_miptree_check_level_layer(mt, *level, start_layer + i); |
| 161 | } |
| 162 | |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 163 | intel_miptree_get_isl_surf(brw, mt, &tmp_surfs[0]); |
| 164 | surf->surf = &tmp_surfs[0]; |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 165 | surf->addr = (struct blorp_address) { |
| 166 | .buffer = mt->bo, |
| 167 | .offset = mt->offset, |
| 168 | .read_domains = is_render_target ? I915_GEM_DOMAIN_RENDER : |
| 169 | I915_GEM_DOMAIN_SAMPLER, |
| 170 | .write_domain = is_render_target ? I915_GEM_DOMAIN_RENDER : 0, |
| 171 | }; |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 172 | |
Jason Ekstrand | d8644f3 | 2016-07-19 19:59:16 -0700 | [diff] [blame] | 173 | if (brw->gen == 6 && mt->format == MESA_FORMAT_S_UINT8 && |
| 174 | mt->array_layout == ALL_SLICES_AT_EACH_LOD) { |
| 175 | /* Sandy bridge stencil and HiZ use this ALL_SLICES_AT_EACH_LOD hack in |
| 176 | * order to allow for layered rendering. The hack makes each LOD of the |
| 177 | * stencil or HiZ buffer a single tightly packed array surface at some |
| 178 | * offset into the surface. Since ISL doesn't know how to deal with the |
| 179 | * crazy ALL_SLICES_AT_EACH_LOD layout and since we have to do a manual |
| 180 | * offset of it anyway, we might as well do the offset here and keep the |
| 181 | * hacks inside the i965 driver. |
| 182 | * |
| 183 | * See also gen6_depth_stencil_state.c |
| 184 | */ |
| 185 | uint32_t offset; |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 186 | apply_gen6_stencil_hiz_offset(&tmp_surfs[0], mt, *level, &offset); |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 187 | surf->addr.offset += offset; |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 188 | *level = 0; |
Jason Ekstrand | d8644f3 | 2016-07-19 19:59:16 -0700 | [diff] [blame] | 189 | } |
| 190 | |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 191 | struct isl_surf *aux_surf = &tmp_surfs[1]; |
| 192 | intel_miptree_get_aux_isl_surf(brw, mt, aux_surf, &surf->aux_usage); |
Jason Ekstrand | 4d86b3f | 2016-07-01 16:01:56 -0700 | [diff] [blame] | 193 | |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 194 | if (surf->aux_usage != ISL_AUX_USAGE_NONE) { |
| 195 | if (surf->aux_usage == ISL_AUX_USAGE_HIZ) { |
| 196 | /* If we're not going to use it as a depth buffer, resolve HiZ */ |
| 197 | if (!(safe_aux_usage & (1 << ISL_AUX_USAGE_HIZ))) { |
| 198 | for (unsigned i = 0; i < num_layers; i++) { |
| 199 | intel_miptree_slice_resolve_depth(brw, mt, *level, |
| 200 | start_layer + i); |
| 201 | |
| 202 | /* If we're rendering to it then we'll need a HiZ resolve once |
| 203 | * we're done before we can use it with HiZ again. |
| 204 | */ |
| 205 | if (is_render_target) |
| 206 | intel_miptree_slice_set_needs_hiz_resolve(mt, *level, |
| 207 | start_layer + i); |
| 208 | } |
| 209 | surf->aux_usage = ISL_AUX_USAGE_NONE; |
| 210 | } |
| 211 | } else if (!(safe_aux_usage & (1 << surf->aux_usage))) { |
| 212 | uint32_t flags = 0; |
| 213 | if (safe_aux_usage & (1 << ISL_AUX_USAGE_CCS_E)) |
| 214 | flags |= INTEL_MIPTREE_IGNORE_CCS_E; |
| 215 | |
| 216 | intel_miptree_resolve_color(brw, mt, flags); |
| 217 | |
| 218 | assert(mt->fast_clear_state == INTEL_FAST_CLEAR_STATE_RESOLVED); |
| 219 | surf->aux_usage = ISL_AUX_USAGE_NONE; |
| 220 | } |
| 221 | } |
| 222 | |
Topi Pohjolainen | 71d48d6 | 2016-07-06 12:29:41 +0300 | [diff] [blame^] | 223 | if (is_render_target) |
| 224 | intel_miptree_used_for_rendering(brw, mt); |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 225 | |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 226 | if (surf->aux_usage != ISL_AUX_USAGE_NONE) { |
Jason Ekstrand | 4d86b3f | 2016-07-01 16:01:56 -0700 | [diff] [blame] | 227 | /* We only really need a clear color if we also have an auxiliary |
| 228 | * surface. Without one, it does nothing. |
| 229 | */ |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 230 | surf->clear_color = intel_miptree_get_isl_clear_color(brw, mt); |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 231 | |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 232 | surf->aux_surf = aux_surf; |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 233 | surf->aux_addr = (struct blorp_address) { |
| 234 | .read_domains = is_render_target ? I915_GEM_DOMAIN_RENDER : |
| 235 | I915_GEM_DOMAIN_SAMPLER, |
| 236 | .write_domain = is_render_target ? I915_GEM_DOMAIN_RENDER : 0, |
| 237 | }; |
| 238 | |
Jordan Justen | 0041169 | 2016-10-21 12:56:49 +0100 | [diff] [blame] | 239 | if (mt->mcs_buf) { |
Ben Widawsky | 36d1c55 | 2016-10-21 14:20:39 +0100 | [diff] [blame] | 240 | surf->aux_addr.buffer = mt->mcs_buf->bo; |
| 241 | surf->aux_addr.offset = mt->mcs_buf->offset; |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 242 | } else { |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 243 | assert(surf->aux_usage == ISL_AUX_USAGE_HIZ); |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 244 | struct intel_mipmap_tree *hiz_mt = mt->hiz_buf->mt; |
| 245 | if (hiz_mt) { |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 246 | surf->aux_addr.buffer = hiz_mt->bo; |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 247 | if (brw->gen == 6 && |
| 248 | hiz_mt->array_layout == ALL_SLICES_AT_EACH_LOD) { |
| 249 | /* gen6 requires the HiZ buffer to be manually offset to the |
| 250 | * right location. We could fixup the surf but it doesn't |
| 251 | * matter since most of those fields don't matter. |
| 252 | */ |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 253 | apply_gen6_stencil_hiz_offset(aux_surf, hiz_mt, *level, |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 254 | &surf->aux_addr.offset); |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 255 | } else { |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 256 | surf->aux_addr.offset = 0; |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 257 | } |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 258 | assert(hiz_mt->pitch == aux_surf->row_pitch); |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 259 | } else { |
Ben Widawsky | c53e9c9 | 2016-10-21 15:10:56 +0100 | [diff] [blame] | 260 | surf->aux_addr.buffer = mt->hiz_buf->aux_base.bo; |
| 261 | surf->aux_addr.offset = mt->hiz_buf->aux_base.offset; |
Jason Ekstrand | 406c503 | 2016-07-22 14:41:43 -0700 | [diff] [blame] | 262 | } |
| 263 | } |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 264 | } else { |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 265 | surf->aux_addr = (struct blorp_address) { |
| 266 | .buffer = NULL, |
| 267 | }; |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 268 | memset(&surf->clear_color, 0, sizeof(surf->clear_color)); |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 269 | } |
Jason Ekstrand | ac08bc8 | 2016-08-18 02:19:29 -0700 | [diff] [blame] | 270 | assert((surf->aux_usage == ISL_AUX_USAGE_NONE) == |
| 271 | (surf->aux_addr.buffer == NULL)); |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 272 | } |
| 273 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 274 | static enum isl_format |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 275 | brw_blorp_to_isl_format(struct brw_context *brw, mesa_format format, |
| 276 | bool is_render_target) |
| 277 | { |
| 278 | switch (format) { |
| 279 | case MESA_FORMAT_NONE: |
| 280 | return ISL_FORMAT_UNSUPPORTED; |
| 281 | case MESA_FORMAT_S_UINT8: |
| 282 | return ISL_FORMAT_R8_UINT; |
| 283 | case MESA_FORMAT_Z24_UNORM_X8_UINT: |
| 284 | return ISL_FORMAT_R24_UNORM_X8_TYPELESS; |
| 285 | case MESA_FORMAT_Z_FLOAT32: |
| 286 | return ISL_FORMAT_R32_FLOAT; |
| 287 | case MESA_FORMAT_Z_UNORM16: |
| 288 | return ISL_FORMAT_R16_UNORM; |
| 289 | default: { |
| 290 | if (is_render_target) { |
| 291 | assert(brw->format_supported_as_render_target[format]); |
| 292 | return brw->render_target_format[format]; |
| 293 | } else { |
| 294 | return brw_format_for_mesa_format(format); |
| 295 | } |
| 296 | break; |
| 297 | } |
| 298 | } |
| 299 | } |
| 300 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 301 | /** |
Jason Ekstrand | 2dba548 | 2016-08-27 21:48:40 -0700 | [diff] [blame] | 302 | * Convert an swizzle enumeration (i.e. SWIZZLE_X) to one of the Gen7.5+ |
| 303 | * "Shader Channel Select" enumerations (i.e. HSW_SCS_RED). The mappings are |
| 304 | * |
| 305 | * SWIZZLE_X, SWIZZLE_Y, SWIZZLE_Z, SWIZZLE_W, SWIZZLE_ZERO, SWIZZLE_ONE |
| 306 | * 0 1 2 3 4 5 |
| 307 | * 4 5 6 7 0 1 |
| 308 | * SCS_RED, SCS_GREEN, SCS_BLUE, SCS_ALPHA, SCS_ZERO, SCS_ONE |
| 309 | * |
| 310 | * which is simply adding 4 then modding by 8 (or anding with 7). |
| 311 | * |
| 312 | * We then may need to apply workarounds for textureGather hardware bugs. |
| 313 | */ |
| 314 | static enum isl_channel_select |
| 315 | swizzle_to_scs(GLenum swizzle) |
| 316 | { |
| 317 | return (enum isl_channel_select)((swizzle + 4) & 7); |
| 318 | } |
| 319 | |
Jason Ekstrand | 54db5af | 2016-09-03 11:40:09 -0700 | [diff] [blame] | 320 | static unsigned |
| 321 | physical_to_logical_layer(struct intel_mipmap_tree *mt, |
| 322 | unsigned physical_layer) |
| 323 | { |
| 324 | if (mt->num_samples > 1 && |
| 325 | (mt->msaa_layout == INTEL_MSAA_LAYOUT_UMS || |
| 326 | mt->msaa_layout == INTEL_MSAA_LAYOUT_CMS)) { |
| 327 | assert(physical_layer % mt->num_samples == 0); |
| 328 | return physical_layer / mt->num_samples; |
| 329 | } else { |
| 330 | return physical_layer; |
| 331 | } |
| 332 | } |
| 333 | |
Jason Ekstrand | 2dba548 | 2016-08-27 21:48:40 -0700 | [diff] [blame] | 334 | /** |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 335 | * Note: if the src (or dst) is a 2D multisample array texture on Gen7+ using |
| 336 | * INTEL_MSAA_LAYOUT_UMS or INTEL_MSAA_LAYOUT_CMS, src_layer (dst_layer) is |
| 337 | * the physical layer holding sample 0. So, for example, if |
| 338 | * src_mt->num_samples == 4, then logical layer n corresponds to src_layer == |
| 339 | * 4*n. |
| 340 | */ |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 341 | void |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 342 | brw_blorp_blit_miptrees(struct brw_context *brw, |
| 343 | struct intel_mipmap_tree *src_mt, |
| 344 | unsigned src_level, unsigned src_layer, |
| 345 | mesa_format src_format, int src_swizzle, |
| 346 | struct intel_mipmap_tree *dst_mt, |
| 347 | unsigned dst_level, unsigned dst_layer, |
| 348 | mesa_format dst_format, |
| 349 | float src_x0, float src_y0, |
| 350 | float src_x1, float src_y1, |
| 351 | float dst_x0, float dst_y0, |
| 352 | float dst_x1, float dst_y1, |
| 353 | GLenum filter, bool mirror_x, bool mirror_y, |
| 354 | bool decode_srgb, bool encode_srgb) |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 355 | { |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 356 | /* Blorp operates in logical layers */ |
| 357 | src_layer = physical_to_logical_layer(src_mt, src_layer); |
| 358 | dst_layer = physical_to_logical_layer(dst_mt, dst_layer); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 359 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 360 | DBG("%s from %dx %s mt %p %d %d (%f,%f) (%f,%f)" |
| 361 | "to %dx %s mt %p %d %d (%f,%f) (%f,%f) (flip %d,%d)\n", |
| 362 | __func__, |
| 363 | src_mt->num_samples, _mesa_get_format_name(src_mt->format), src_mt, |
| 364 | src_level, src_layer, src_x0, src_y0, src_x1, src_y1, |
| 365 | dst_mt->num_samples, _mesa_get_format_name(dst_mt->format), dst_mt, |
| 366 | dst_level, dst_layer, dst_x0, dst_y0, dst_x1, dst_y1, |
| 367 | mirror_x, mirror_y); |
| 368 | |
| 369 | if (!decode_srgb && _mesa_get_format_color_encoding(src_format) == GL_SRGB) |
| 370 | src_format = _mesa_get_srgb_format_linear(src_format); |
| 371 | |
| 372 | if (!encode_srgb && _mesa_get_format_color_encoding(dst_format) == GL_SRGB) |
| 373 | dst_format = _mesa_get_srgb_format_linear(dst_format); |
| 374 | |
| 375 | /* When doing a multisample resolve of a GL_LUMINANCE32F or GL_INTENSITY32F |
| 376 | * texture, the above code configures the source format for L32_FLOAT or |
| 377 | * I32_FLOAT, and the destination format for R32_FLOAT. On Sandy Bridge, |
| 378 | * the SAMPLE message appears to handle multisampled L32_FLOAT and |
| 379 | * I32_FLOAT textures incorrectly, resulting in blocky artifacts. So work |
| 380 | * around the problem by using a source format of R32_FLOAT. This |
| 381 | * shouldn't affect rendering correctness, since the destination format is |
| 382 | * R32_FLOAT, so only the contents of the red channel matters. |
| 383 | */ |
| 384 | if (brw->gen == 6 && |
| 385 | src_mt->num_samples > 1 && dst_mt->num_samples <= 1 && |
| 386 | src_mt->format == dst_mt->format && |
| 387 | (dst_format == MESA_FORMAT_L_FLOAT32 || |
| 388 | dst_format == MESA_FORMAT_I_FLOAT32)) { |
| 389 | src_format = dst_format = MESA_FORMAT_R_FLOAT32; |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 390 | } |
| 391 | |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 392 | uint32_t src_usage_flags = (1 << ISL_AUX_USAGE_MCS); |
| 393 | if (src_format == src_mt->format) |
| 394 | src_usage_flags |= (1 << ISL_AUX_USAGE_CCS_E); |
| 395 | |
| 396 | uint32_t dst_usage_flags = (1 << ISL_AUX_USAGE_MCS); |
| 397 | if (dst_format == dst_mt->format) { |
| 398 | dst_usage_flags |= (1 << ISL_AUX_USAGE_CCS_E) | |
| 399 | (1 << ISL_AUX_USAGE_CCS_D); |
| 400 | } |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 401 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 402 | struct isl_surf tmp_surfs[4]; |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 403 | struct blorp_surf src_surf, dst_surf; |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 404 | blorp_surf_for_miptree(brw, &src_surf, src_mt, false, src_usage_flags, |
| 405 | &src_level, src_layer, 1, &tmp_surfs[0]); |
| 406 | blorp_surf_for_miptree(brw, &dst_surf, dst_mt, true, dst_usage_flags, |
| 407 | &dst_level, dst_layer, 1, &tmp_surfs[2]); |
Jason Ekstrand | 75deae9 | 2016-07-19 19:04:03 -0700 | [diff] [blame] | 408 | |
Jason Ekstrand | 2dba548 | 2016-08-27 21:48:40 -0700 | [diff] [blame] | 409 | struct isl_swizzle src_isl_swizzle = { |
| 410 | .r = swizzle_to_scs(GET_SWZ(src_swizzle, 0)), |
| 411 | .g = swizzle_to_scs(GET_SWZ(src_swizzle, 1)), |
| 412 | .b = swizzle_to_scs(GET_SWZ(src_swizzle, 2)), |
| 413 | .a = swizzle_to_scs(GET_SWZ(src_swizzle, 3)), |
| 414 | }; |
| 415 | |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 416 | struct blorp_batch batch; |
Jason Ekstrand | d80c030 | 2016-10-07 17:20:00 -0700 | [diff] [blame] | 417 | blorp_batch_init(&brw->blorp, &batch, brw, 0); |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 418 | blorp_blit(&batch, &src_surf, src_level, src_layer, |
Jason Ekstrand | 2dba548 | 2016-08-27 21:48:40 -0700 | [diff] [blame] | 419 | brw_blorp_to_isl_format(brw, src_format, false), src_isl_swizzle, |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 420 | &dst_surf, dst_level, dst_layer, |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 421 | brw_blorp_to_isl_format(brw, dst_format, true), |
Jason Ekstrand | 2fc9c7e | 2016-08-27 21:57:51 -0700 | [diff] [blame] | 422 | ISL_SWIZZLE_IDENTITY, |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 423 | src_x0, src_y0, src_x1, src_y1, |
| 424 | dst_x0, dst_y0, dst_x1, dst_y1, |
| 425 | filter, mirror_x, mirror_y); |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 426 | blorp_batch_finish(&batch); |
Paul Berry | 506d70b | 2012-04-29 22:44:25 -0700 | [diff] [blame] | 427 | } |
| 428 | |
Jason Ekstrand | 540395b | 2016-09-08 21:27:01 -0700 | [diff] [blame] | 429 | void |
| 430 | brw_blorp_copy_miptrees(struct brw_context *brw, |
| 431 | struct intel_mipmap_tree *src_mt, |
| 432 | unsigned src_level, unsigned src_layer, |
| 433 | struct intel_mipmap_tree *dst_mt, |
| 434 | unsigned dst_level, unsigned dst_layer, |
| 435 | unsigned src_x, unsigned src_y, |
| 436 | unsigned dst_x, unsigned dst_y, |
| 437 | unsigned src_width, unsigned src_height) |
| 438 | { |
Jason Ekstrand | 540395b | 2016-09-08 21:27:01 -0700 | [diff] [blame] | 439 | DBG("%s from %dx %s mt %p %d %d (%d,%d) %dx%d" |
| 440 | "to %dx %s mt %p %d %d (%d,%d)\n", |
| 441 | __func__, |
| 442 | src_mt->num_samples, _mesa_get_format_name(src_mt->format), src_mt, |
| 443 | src_level, src_layer, src_x, src_y, src_width, src_height, |
| 444 | dst_mt->num_samples, _mesa_get_format_name(dst_mt->format), dst_mt, |
| 445 | dst_level, dst_layer, dst_x, dst_y); |
| 446 | |
Jason Ekstrand | 540395b | 2016-09-08 21:27:01 -0700 | [diff] [blame] | 447 | struct isl_surf tmp_surfs[4]; |
| 448 | struct blorp_surf src_surf, dst_surf; |
| 449 | blorp_surf_for_miptree(brw, &src_surf, src_mt, false, |
Jason Ekstrand | 2b5644e | 2016-10-26 02:27:01 -0700 | [diff] [blame] | 450 | (1 << ISL_AUX_USAGE_MCS) | |
| 451 | (1 << ISL_AUX_USAGE_CCS_E), |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 452 | &src_level, src_layer, 1, &tmp_surfs[0]); |
Jason Ekstrand | 540395b | 2016-09-08 21:27:01 -0700 | [diff] [blame] | 453 | blorp_surf_for_miptree(brw, &dst_surf, dst_mt, true, |
Jason Ekstrand | 2b5644e | 2016-10-26 02:27:01 -0700 | [diff] [blame] | 454 | (1 << ISL_AUX_USAGE_MCS) | |
| 455 | (1 << ISL_AUX_USAGE_CCS_E), |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 456 | &dst_level, dst_layer, 1, &tmp_surfs[2]); |
Jason Ekstrand | 540395b | 2016-09-08 21:27:01 -0700 | [diff] [blame] | 457 | |
| 458 | struct blorp_batch batch; |
Jason Ekstrand | d80c030 | 2016-10-07 17:20:00 -0700 | [diff] [blame] | 459 | blorp_batch_init(&brw->blorp, &batch, brw, 0); |
Jason Ekstrand | 540395b | 2016-09-08 21:27:01 -0700 | [diff] [blame] | 460 | blorp_copy(&batch, &src_surf, src_level, src_layer, |
| 461 | &dst_surf, dst_level, dst_layer, |
| 462 | src_x, src_y, dst_x, dst_y, src_width, src_height); |
| 463 | blorp_batch_finish(&batch); |
Jason Ekstrand | 540395b | 2016-09-08 21:27:01 -0700 | [diff] [blame] | 464 | } |
| 465 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 466 | static struct intel_mipmap_tree * |
| 467 | find_miptree(GLbitfield buffer_bit, struct intel_renderbuffer *irb) |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 468 | { |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 469 | struct intel_mipmap_tree *mt = irb->mt; |
| 470 | if (buffer_bit == GL_STENCIL_BUFFER_BIT && mt->stencil_mt) |
| 471 | mt = mt->stencil_mt; |
| 472 | return mt; |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 473 | } |
| 474 | |
| 475 | static int |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 476 | blorp_get_texture_swizzle(const struct intel_renderbuffer *irb) |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 477 | { |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 478 | return irb->Base.Base._BaseFormat == GL_RGB ? |
| 479 | MAKE_SWIZZLE4(SWIZZLE_X, SWIZZLE_Y, SWIZZLE_Z, SWIZZLE_ONE) : |
| 480 | SWIZZLE_XYZW; |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 481 | } |
| 482 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 483 | static void |
| 484 | do_blorp_blit(struct brw_context *brw, GLbitfield buffer_bit, |
| 485 | struct intel_renderbuffer *src_irb, mesa_format src_format, |
| 486 | struct intel_renderbuffer *dst_irb, mesa_format dst_format, |
| 487 | GLfloat srcX0, GLfloat srcY0, GLfloat srcX1, GLfloat srcY1, |
| 488 | GLfloat dstX0, GLfloat dstY0, GLfloat dstX1, GLfloat dstY1, |
| 489 | GLenum filter, bool mirror_x, bool mirror_y) |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 490 | { |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 491 | const struct gl_context *ctx = &brw->ctx; |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 492 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 493 | /* Find source/dst miptrees */ |
| 494 | struct intel_mipmap_tree *src_mt = find_miptree(buffer_bit, src_irb); |
| 495 | struct intel_mipmap_tree *dst_mt = find_miptree(buffer_bit, dst_irb); |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 496 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 497 | const bool do_srgb = ctx->Color.sRGBEnabled; |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 498 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 499 | /* Do the blit */ |
| 500 | brw_blorp_blit_miptrees(brw, |
| 501 | src_mt, src_irb->mt_level, src_irb->mt_layer, |
| 502 | src_format, blorp_get_texture_swizzle(src_irb), |
| 503 | dst_mt, dst_irb->mt_level, dst_irb->mt_layer, |
| 504 | dst_format, |
| 505 | srcX0, srcY0, srcX1, srcY1, |
| 506 | dstX0, dstY0, dstX1, dstY1, |
| 507 | filter, mirror_x, mirror_y, |
| 508 | do_srgb, do_srgb); |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 509 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 510 | dst_irb->need_downsample = true; |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 511 | } |
| 512 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 513 | static bool |
| 514 | try_blorp_blit(struct brw_context *brw, |
| 515 | const struct gl_framebuffer *read_fb, |
| 516 | const struct gl_framebuffer *draw_fb, |
| 517 | GLfloat srcX0, GLfloat srcY0, GLfloat srcX1, GLfloat srcY1, |
| 518 | GLfloat dstX0, GLfloat dstY0, GLfloat dstX1, GLfloat dstY1, |
| 519 | GLenum filter, GLbitfield buffer_bit) |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 520 | { |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 521 | struct gl_context *ctx = &brw->ctx; |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 522 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 523 | /* Sync up the state of window system buffers. We need to do this before |
| 524 | * we go looking for the buffers. |
| 525 | */ |
| 526 | intel_prepare_render(brw); |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 527 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 528 | bool mirror_x, mirror_y; |
| 529 | if (brw_meta_mirror_clip_and_scissor(ctx, read_fb, draw_fb, |
| 530 | &srcX0, &srcY0, &srcX1, &srcY1, |
| 531 | &dstX0, &dstY0, &dstX1, &dstY1, |
| 532 | &mirror_x, &mirror_y)) |
| 533 | return true; |
Jason Ekstrand | 871893c | 2016-06-27 17:30:35 -0700 | [diff] [blame] | 534 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 535 | /* Find buffers */ |
| 536 | struct intel_renderbuffer *src_irb; |
| 537 | struct intel_renderbuffer *dst_irb; |
| 538 | struct intel_mipmap_tree *src_mt; |
| 539 | struct intel_mipmap_tree *dst_mt; |
| 540 | switch (buffer_bit) { |
| 541 | case GL_COLOR_BUFFER_BIT: |
| 542 | src_irb = intel_renderbuffer(read_fb->_ColorReadBuffer); |
| 543 | for (unsigned i = 0; i < draw_fb->_NumColorDrawBuffers; ++i) { |
| 544 | dst_irb = intel_renderbuffer(draw_fb->_ColorDrawBuffers[i]); |
| 545 | if (dst_irb) |
| 546 | do_blorp_blit(brw, buffer_bit, |
| 547 | src_irb, src_irb->Base.Base.Format, |
| 548 | dst_irb, dst_irb->Base.Base.Format, |
| 549 | srcX0, srcY0, srcX1, srcY1, |
| 550 | dstX0, dstY0, dstX1, dstY1, |
| 551 | filter, mirror_x, mirror_y); |
| 552 | } |
| 553 | break; |
| 554 | case GL_DEPTH_BUFFER_BIT: |
| 555 | src_irb = |
| 556 | intel_renderbuffer(read_fb->Attachment[BUFFER_DEPTH].Renderbuffer); |
| 557 | dst_irb = |
| 558 | intel_renderbuffer(draw_fb->Attachment[BUFFER_DEPTH].Renderbuffer); |
| 559 | src_mt = find_miptree(buffer_bit, src_irb); |
| 560 | dst_mt = find_miptree(buffer_bit, dst_irb); |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 561 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 562 | /* We can't handle format conversions between Z24 and other formats |
| 563 | * since we have to lie about the surface format. See the comments in |
| 564 | * brw_blorp_surface_info::set(). |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 565 | */ |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 566 | if ((src_mt->format == MESA_FORMAT_Z24_UNORM_X8_UINT) != |
| 567 | (dst_mt->format == MESA_FORMAT_Z24_UNORM_X8_UINT)) |
| 568 | return false; |
| 569 | |
| 570 | do_blorp_blit(brw, buffer_bit, src_irb, MESA_FORMAT_NONE, |
| 571 | dst_irb, MESA_FORMAT_NONE, srcX0, srcY0, |
| 572 | srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, |
| 573 | filter, mirror_x, mirror_y); |
| 574 | break; |
| 575 | case GL_STENCIL_BUFFER_BIT: |
| 576 | src_irb = |
| 577 | intel_renderbuffer(read_fb->Attachment[BUFFER_STENCIL].Renderbuffer); |
| 578 | dst_irb = |
| 579 | intel_renderbuffer(draw_fb->Attachment[BUFFER_STENCIL].Renderbuffer); |
| 580 | do_blorp_blit(brw, buffer_bit, src_irb, MESA_FORMAT_NONE, |
| 581 | dst_irb, MESA_FORMAT_NONE, srcX0, srcY0, |
| 582 | srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, |
| 583 | filter, mirror_x, mirror_y); |
| 584 | break; |
| 585 | default: |
| 586 | unreachable("not reached"); |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 587 | } |
| 588 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 589 | return true; |
| 590 | } |
| 591 | |
| 592 | bool |
| 593 | brw_blorp_copytexsubimage(struct brw_context *brw, |
| 594 | struct gl_renderbuffer *src_rb, |
| 595 | struct gl_texture_image *dst_image, |
| 596 | int slice, |
| 597 | int srcX0, int srcY0, |
| 598 | int dstX0, int dstY0, |
| 599 | int width, int height) |
| 600 | { |
| 601 | struct gl_context *ctx = &brw->ctx; |
| 602 | struct intel_renderbuffer *src_irb = intel_renderbuffer(src_rb); |
| 603 | struct intel_texture_image *intel_image = intel_texture_image(dst_image); |
| 604 | |
| 605 | /* No pixel transfer operations (zoom, bias, mapping), just a blit */ |
| 606 | if (brw->ctx._ImageTransferState) |
| 607 | return false; |
| 608 | |
| 609 | /* Sync up the state of window system buffers. We need to do this before |
| 610 | * we go looking at the src renderbuffer's miptree. |
| 611 | */ |
| 612 | intel_prepare_render(brw); |
| 613 | |
| 614 | struct intel_mipmap_tree *src_mt = src_irb->mt; |
| 615 | struct intel_mipmap_tree *dst_mt = intel_image->mt; |
| 616 | |
| 617 | /* There is support for only up to eight samples. */ |
| 618 | if (src_mt->num_samples > 8 || dst_mt->num_samples > 8) |
| 619 | return false; |
| 620 | |
| 621 | /* BLORP is only supported from Gen6 onwards. */ |
| 622 | if (brw->gen < 6) |
| 623 | return false; |
| 624 | |
| 625 | if (_mesa_get_format_base_format(src_rb->Format) != |
| 626 | _mesa_get_format_base_format(dst_image->TexFormat)) { |
| 627 | return false; |
| 628 | } |
| 629 | |
| 630 | /* We can't handle format conversions between Z24 and other formats since |
| 631 | * we have to lie about the surface format. See the comments in |
| 632 | * brw_blorp_surface_info::set(). |
| 633 | */ |
| 634 | if ((src_mt->format == MESA_FORMAT_Z24_UNORM_X8_UINT) != |
| 635 | (dst_mt->format == MESA_FORMAT_Z24_UNORM_X8_UINT)) { |
| 636 | return false; |
| 637 | } |
| 638 | |
| 639 | if (!brw->format_supported_as_render_target[dst_image->TexFormat]) |
| 640 | return false; |
| 641 | |
| 642 | /* Source clipping shouldn't be necessary, since copytexsubimage (in |
| 643 | * src/mesa/main/teximage.c) calls _mesa_clip_copytexsubimage() which |
| 644 | * takes care of it. |
| 645 | * |
| 646 | * Destination clipping shouldn't be necessary since the restrictions on |
| 647 | * glCopyTexSubImage prevent the user from specifying a destination rectangle |
| 648 | * that falls outside the bounds of the destination texture. |
| 649 | * See error_check_subtexture_dimensions(). |
| 650 | */ |
| 651 | |
| 652 | int srcY1 = srcY0 + height; |
| 653 | int srcX1 = srcX0 + width; |
| 654 | int dstX1 = dstX0 + width; |
| 655 | int dstY1 = dstY0 + height; |
| 656 | |
| 657 | /* Account for the fact that in the system framebuffer, the origin is at |
| 658 | * the lower left. |
| 659 | */ |
| 660 | bool mirror_y = false; |
| 661 | if (_mesa_is_winsys_fbo(ctx->ReadBuffer)) { |
| 662 | GLint tmp = src_rb->Height - srcY0; |
| 663 | srcY0 = src_rb->Height - srcY1; |
| 664 | srcY1 = tmp; |
| 665 | mirror_y = true; |
| 666 | } |
| 667 | |
| 668 | /* Account for face selection and texture view MinLayer */ |
| 669 | int dst_slice = slice + dst_image->TexObject->MinLayer + dst_image->Face; |
| 670 | int dst_level = dst_image->Level + dst_image->TexObject->MinLevel; |
| 671 | |
| 672 | brw_blorp_blit_miptrees(brw, |
| 673 | src_mt, src_irb->mt_level, src_irb->mt_layer, |
| 674 | src_rb->Format, blorp_get_texture_swizzle(src_irb), |
| 675 | dst_mt, dst_level, dst_slice, |
| 676 | dst_image->TexFormat, |
| 677 | srcX0, srcY0, srcX1, srcY1, |
| 678 | dstX0, dstY0, dstX1, dstY1, |
| 679 | GL_NEAREST, false, mirror_y, |
| 680 | false, false); |
| 681 | |
| 682 | /* If we're copying to a packed depth stencil texture and the source |
| 683 | * framebuffer has separate stencil, we need to also copy the stencil data |
| 684 | * over. |
| 685 | */ |
| 686 | src_rb = ctx->ReadBuffer->Attachment[BUFFER_STENCIL].Renderbuffer; |
| 687 | if (_mesa_get_format_bits(dst_image->TexFormat, GL_STENCIL_BITS) > 0 && |
| 688 | src_rb != NULL) { |
| 689 | src_irb = intel_renderbuffer(src_rb); |
| 690 | src_mt = src_irb->mt; |
| 691 | |
| 692 | if (src_mt->stencil_mt) |
| 693 | src_mt = src_mt->stencil_mt; |
| 694 | if (dst_mt->stencil_mt) |
| 695 | dst_mt = dst_mt->stencil_mt; |
| 696 | |
| 697 | if (src_mt != dst_mt) { |
| 698 | brw_blorp_blit_miptrees(brw, |
| 699 | src_mt, src_irb->mt_level, src_irb->mt_layer, |
| 700 | src_mt->format, |
| 701 | blorp_get_texture_swizzle(src_irb), |
| 702 | dst_mt, dst_level, dst_slice, |
| 703 | dst_mt->format, |
| 704 | srcX0, srcY0, srcX1, srcY1, |
| 705 | dstX0, dstY0, dstX1, dstY1, |
| 706 | GL_NEAREST, false, mirror_y, |
| 707 | false, false); |
| 708 | } |
| 709 | } |
| 710 | |
| 711 | return true; |
| 712 | } |
| 713 | |
| 714 | |
| 715 | GLbitfield |
| 716 | brw_blorp_framebuffer(struct brw_context *brw, |
| 717 | struct gl_framebuffer *readFb, |
| 718 | struct gl_framebuffer *drawFb, |
| 719 | GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, |
| 720 | GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, |
| 721 | GLbitfield mask, GLenum filter) |
| 722 | { |
| 723 | /* BLORP is not supported before Gen6. */ |
| 724 | if (brw->gen < 6) |
| 725 | return mask; |
| 726 | |
| 727 | static GLbitfield buffer_bits[] = { |
| 728 | GL_COLOR_BUFFER_BIT, |
| 729 | GL_DEPTH_BUFFER_BIT, |
| 730 | GL_STENCIL_BUFFER_BIT, |
| 731 | }; |
| 732 | |
| 733 | for (unsigned int i = 0; i < ARRAY_SIZE(buffer_bits); ++i) { |
| 734 | if ((mask & buffer_bits[i]) && |
| 735 | try_blorp_blit(brw, readFb, drawFb, |
| 736 | srcX0, srcY0, srcX1, srcY1, |
| 737 | dstX0, dstY0, dstX1, dstY1, |
| 738 | filter, buffer_bits[i])) { |
| 739 | mask &= ~buffer_bits[i]; |
| 740 | } |
| 741 | } |
| 742 | |
| 743 | return mask; |
| 744 | } |
| 745 | |
| 746 | static bool |
| 747 | set_write_disables(const struct intel_renderbuffer *irb, |
| 748 | const GLubyte *color_mask, bool *color_write_disable) |
| 749 | { |
| 750 | /* Format information in the renderbuffer represents the requirements |
| 751 | * given by the client. There are cases where the backing miptree uses, |
| 752 | * for example, RGBA to represent RGBX. Since the client is only expecting |
| 753 | * RGB we can treat alpha as not used and write whatever we like into it. |
| 754 | */ |
| 755 | const GLenum base_format = irb->Base.Base._BaseFormat; |
| 756 | const int components = _mesa_base_format_component_count(base_format); |
| 757 | bool disables = false; |
| 758 | |
| 759 | assert(components > 0); |
| 760 | |
| 761 | for (int i = 0; i < components; i++) { |
| 762 | color_write_disable[i] = !color_mask[i]; |
| 763 | disables = disables || !color_mask[i]; |
| 764 | } |
| 765 | |
| 766 | return disables; |
| 767 | } |
| 768 | |
Jason Ekstrand | 54db5af | 2016-09-03 11:40:09 -0700 | [diff] [blame] | 769 | static unsigned |
| 770 | irb_logical_mt_layer(struct intel_renderbuffer *irb) |
| 771 | { |
| 772 | return physical_to_logical_layer(irb->mt, irb->mt_layer); |
| 773 | } |
| 774 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 775 | static bool |
| 776 | do_single_blorp_clear(struct brw_context *brw, struct gl_framebuffer *fb, |
| 777 | struct gl_renderbuffer *rb, unsigned buf, |
Topi Pohjolainen | 1df4b66 | 2016-08-29 08:57:31 +0300 | [diff] [blame] | 778 | bool partial_clear, bool encode_srgb) |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 779 | { |
| 780 | struct gl_context *ctx = &brw->ctx; |
| 781 | struct intel_renderbuffer *irb = intel_renderbuffer(rb); |
| 782 | mesa_format format = irb->mt->format; |
| 783 | uint32_t x0, x1, y0, y1; |
| 784 | |
| 785 | if (!encode_srgb && _mesa_get_format_color_encoding(format) == GL_SRGB) |
| 786 | format = _mesa_get_srgb_format_linear(format); |
| 787 | |
| 788 | x0 = fb->_Xmin; |
| 789 | x1 = fb->_Xmax; |
| 790 | if (rb->Name != 0) { |
| 791 | y0 = fb->_Ymin; |
| 792 | y1 = fb->_Ymax; |
| 793 | } else { |
| 794 | y0 = rb->Height - fb->_Ymax; |
| 795 | y1 = rb->Height - fb->_Ymin; |
| 796 | } |
| 797 | |
Jason Ekstrand | e0bc2cb | 2016-08-08 12:10:26 -0700 | [diff] [blame] | 798 | /* If the clear region is empty, just return. */ |
| 799 | if (x0 == x1 || y0 == y1) |
| 800 | return true; |
| 801 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 802 | bool can_fast_clear = !partial_clear; |
| 803 | |
| 804 | bool color_write_disable[4] = { false, false, false, false }; |
| 805 | if (set_write_disables(irb, ctx->Color.ColorMask[buf], color_write_disable)) |
| 806 | can_fast_clear = false; |
| 807 | |
| 808 | if (irb->mt->fast_clear_state == INTEL_FAST_CLEAR_STATE_NO_MCS || |
| 809 | !brw_is_color_fast_clear_compatible(brw, irb->mt, &ctx->Color.ClearColor)) |
| 810 | can_fast_clear = false; |
| 811 | |
Topi Pohjolainen | 39712b2 | 2016-08-26 09:26:15 +0300 | [diff] [blame] | 812 | const bool is_lossless_compressed = intel_miptree_is_lossless_compressed( |
| 813 | brw, irb->mt); |
| 814 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 815 | if (can_fast_clear) { |
Topi Pohjolainen | 7c75fd9 | 2016-06-12 20:49:54 +0300 | [diff] [blame] | 816 | union gl_color_union override_color = |
| 817 | brw_meta_convert_fast_clear_color(brw, irb->mt, |
| 818 | &ctx->Color.ClearColor); |
| 819 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 820 | /* Record the clear color in the miptree so that it will be |
| 821 | * programmed in SURFACE_STATE by later rendering and resolve |
| 822 | * operations. |
| 823 | */ |
| 824 | const bool color_updated = brw_meta_set_fast_clear_color( |
Topi Pohjolainen | 7c75fd9 | 2016-06-12 20:49:54 +0300 | [diff] [blame] | 825 | brw, &irb->mt->gen9_fast_clear_color, |
| 826 | &override_color); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 827 | |
| 828 | /* If the buffer is already in INTEL_FAST_CLEAR_STATE_CLEAR, the clear |
| 829 | * is redundant and can be skipped. |
| 830 | */ |
| 831 | if (!color_updated && |
| 832 | irb->mt->fast_clear_state == INTEL_FAST_CLEAR_STATE_CLEAR) |
| 833 | return true; |
| 834 | |
| 835 | /* If the MCS buffer hasn't been allocated yet, we need to allocate |
| 836 | * it now. |
| 837 | */ |
Jordan Justen | 0041169 | 2016-10-21 12:56:49 +0100 | [diff] [blame] | 838 | if (!irb->mt->mcs_buf) { |
Topi Pohjolainen | 39712b2 | 2016-08-26 09:26:15 +0300 | [diff] [blame] | 839 | assert(!is_lossless_compressed); |
| 840 | if (!intel_miptree_alloc_non_msrt_mcs(brw, irb->mt, false)) { |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 841 | /* MCS allocation failed--probably this will only happen in |
| 842 | * out-of-memory conditions. But in any case, try to recover |
| 843 | * by falling back to a non-blorp clear technique. |
| 844 | */ |
| 845 | return false; |
| 846 | } |
| 847 | } |
| 848 | } |
| 849 | |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 850 | const unsigned logical_layer = irb_logical_mt_layer(irb); |
| 851 | const unsigned num_layers = fb->MaxNumLayers ? irb->layer_count : 1; |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 852 | |
| 853 | /* We can't setup the blorp_surf until we've allocated the MCS above */ |
| 854 | struct isl_surf isl_tmp[2]; |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 855 | struct blorp_surf surf; |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 856 | unsigned level = irb->mt_level; |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 857 | blorp_surf_for_miptree(brw, &surf, irb->mt, true, |
| 858 | (1 << ISL_AUX_USAGE_MCS) | |
| 859 | (1 << ISL_AUX_USAGE_CCS_E) | |
| 860 | (1 << ISL_AUX_USAGE_CCS_D), |
| 861 | &level, logical_layer, num_layers, isl_tmp); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 862 | |
| 863 | if (can_fast_clear) { |
Topi Pohjolainen | 1df4b66 | 2016-08-29 08:57:31 +0300 | [diff] [blame] | 864 | DBG("%s (fast) to mt %p level %d layers %d+%d\n", __FUNCTION__, |
| 865 | irb->mt, irb->mt_level, irb->mt_layer, num_layers); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 866 | |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 867 | struct blorp_batch batch; |
Jason Ekstrand | d80c030 | 2016-10-07 17:20:00 -0700 | [diff] [blame] | 868 | blorp_batch_init(&brw->blorp, &batch, brw, 0); |
Topi Pohjolainen | a1c7de0 | 2016-08-26 18:44:18 +0300 | [diff] [blame] | 869 | blorp_fast_clear(&batch, &surf, |
Jason Ekstrand | 8721441 | 2016-08-23 17:13:07 -0700 | [diff] [blame] | 870 | (enum isl_format)brw->render_target_format[format], |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 871 | level, logical_layer, num_layers, |
Jason Ekstrand | 54db5af | 2016-09-03 11:40:09 -0700 | [diff] [blame] | 872 | x0, y0, x1, y1); |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 873 | blorp_batch_finish(&batch); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 874 | |
| 875 | /* Now that the fast clear has occurred, put the buffer in |
| 876 | * INTEL_FAST_CLEAR_STATE_CLEAR so that we won't waste time doing |
| 877 | * redundant clears. |
| 878 | */ |
| 879 | irb->mt->fast_clear_state = INTEL_FAST_CLEAR_STATE_CLEAR; |
| 880 | } else { |
Topi Pohjolainen | 1df4b66 | 2016-08-29 08:57:31 +0300 | [diff] [blame] | 881 | DBG("%s (slow) to mt %p level %d layer %d+%d\n", __FUNCTION__, |
| 882 | irb->mt, irb->mt_level, irb->mt_layer, num_layers); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 883 | |
| 884 | union isl_color_value clear_color; |
| 885 | memcpy(clear_color.f32, ctx->Color.ClearColor.f, sizeof(float) * 4); |
| 886 | |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 887 | struct blorp_batch batch; |
Jason Ekstrand | d80c030 | 2016-10-07 17:20:00 -0700 | [diff] [blame] | 888 | blorp_batch_init(&brw->blorp, &batch, brw, 0); |
Jason Ekstrand | c70be1e | 2016-08-30 18:01:27 -0700 | [diff] [blame] | 889 | blorp_clear(&batch, &surf, |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 890 | (enum isl_format)brw->render_target_format[format], |
Jason Ekstrand | c70be1e | 2016-08-30 18:01:27 -0700 | [diff] [blame] | 891 | ISL_SWIZZLE_IDENTITY, |
Jason Ekstrand | 54db5af | 2016-09-03 11:40:09 -0700 | [diff] [blame] | 892 | level, irb_logical_mt_layer(irb), num_layers, |
Jason Ekstrand | c70be1e | 2016-08-30 18:01:27 -0700 | [diff] [blame] | 893 | x0, y0, x1, y1, |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 894 | clear_color, color_write_disable); |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 895 | blorp_batch_finish(&batch); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 896 | } |
| 897 | |
| 898 | return true; |
| 899 | } |
| 900 | |
| 901 | bool |
| 902 | brw_blorp_clear_color(struct brw_context *brw, struct gl_framebuffer *fb, |
| 903 | GLbitfield mask, bool partial_clear, bool encode_srgb) |
| 904 | { |
| 905 | for (unsigned buf = 0; buf < fb->_NumColorDrawBuffers; buf++) { |
| 906 | struct gl_renderbuffer *rb = fb->_ColorDrawBuffers[buf]; |
| 907 | struct intel_renderbuffer *irb = intel_renderbuffer(rb); |
| 908 | |
| 909 | /* Only clear the buffers present in the provided mask */ |
| 910 | if (((1 << fb->_ColorDrawBufferIndexes[buf]) & mask) == 0) |
| 911 | continue; |
| 912 | |
| 913 | /* If this is an ES2 context or GL_ARB_ES2_compatibility is supported, |
| 914 | * the framebuffer can be complete with some attachments missing. In |
| 915 | * this case the _ColorDrawBuffers pointer will be NULL. |
| 916 | */ |
| 917 | if (rb == NULL) |
| 918 | continue; |
| 919 | |
Topi Pohjolainen | 1df4b66 | 2016-08-29 08:57:31 +0300 | [diff] [blame] | 920 | if (!do_single_blorp_clear(brw, fb, rb, buf, partial_clear, |
| 921 | encode_srgb)) { |
| 922 | return false; |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 923 | } |
| 924 | |
| 925 | irb->need_downsample = true; |
| 926 | } |
| 927 | |
| 928 | return true; |
| 929 | } |
| 930 | |
| 931 | void |
| 932 | brw_blorp_resolve_color(struct brw_context *brw, struct intel_mipmap_tree *mt) |
| 933 | { |
| 934 | DBG("%s to mt %p\n", __FUNCTION__, mt); |
| 935 | |
| 936 | const mesa_format format = _mesa_get_srgb_format_linear(mt->format); |
| 937 | |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 938 | struct isl_surf isl_tmp[2]; |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 939 | struct blorp_surf surf; |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 940 | unsigned level = 0; |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 941 | blorp_surf_for_miptree(brw, &surf, mt, true, |
| 942 | (1 << ISL_AUX_USAGE_CCS_E) | |
| 943 | (1 << ISL_AUX_USAGE_CCS_D), |
| 944 | &level, 0 /* start_layer */, 1 /* num_layers */, |
| 945 | isl_tmp); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 946 | |
Jason Ekstrand | 1ba2f05 | 2016-10-25 10:48:12 -0700 | [diff] [blame] | 947 | enum blorp_fast_clear_op resolve_op; |
| 948 | if (brw->gen >= 9) { |
| 949 | if (surf.aux_usage == ISL_AUX_USAGE_CCS_E) |
| 950 | resolve_op = BLORP_FAST_CLEAR_OP_RESOLVE_FULL; |
| 951 | else |
| 952 | resolve_op = BLORP_FAST_CLEAR_OP_RESOLVE_PARTIAL; |
| 953 | } else { |
| 954 | assert(surf.aux_usage == ISL_AUX_USAGE_CCS_D); |
| 955 | /* Broadwell and earlier do not have a partial resolve */ |
| 956 | resolve_op = BLORP_FAST_CLEAR_OP_RESOLVE_FULL; |
| 957 | } |
| 958 | |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 959 | struct blorp_batch batch; |
Jason Ekstrand | d80c030 | 2016-10-07 17:20:00 -0700 | [diff] [blame] | 960 | blorp_batch_init(&brw->blorp, &batch, brw, 0); |
Pohjolainen, Topi | 7c560e8 | 2016-10-11 22:26:35 +0300 | [diff] [blame] | 961 | blorp_ccs_resolve(&batch, &surf, 0 /* level */, 0 /* layer */, |
Jason Ekstrand | 1ba2f05 | 2016-10-25 10:48:12 -0700 | [diff] [blame] | 962 | brw_blorp_to_isl_format(brw, format, true), |
| 963 | resolve_op); |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 964 | blorp_batch_finish(&batch); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 965 | |
| 966 | mt->fast_clear_state = INTEL_FAST_CLEAR_STATE_RESOLVED; |
| 967 | } |
| 968 | |
| 969 | static void |
| 970 | gen6_blorp_hiz_exec(struct brw_context *brw, struct intel_mipmap_tree *mt, |
Jason Ekstrand | 87a1cb6 | 2016-08-19 03:15:41 -0700 | [diff] [blame] | 971 | unsigned int level, unsigned int layer, enum blorp_hiz_op op) |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 972 | { |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 973 | assert(intel_miptree_level_has_hiz(mt, level)); |
| 974 | |
| 975 | struct isl_surf isl_tmp[2]; |
Jason Ekstrand | 8bd35d8 | 2016-08-19 05:43:29 -0700 | [diff] [blame] | 976 | struct blorp_surf surf; |
Jason Ekstrand | c8357b5 | 2016-11-16 13:47:13 -0800 | [diff] [blame] | 977 | blorp_surf_for_miptree(brw, &surf, mt, true, (1 << ISL_AUX_USAGE_HIZ), |
| 978 | &level, layer, 1, isl_tmp); |
Jason Ekstrand | f5fbcc3 | 2016-08-08 15:25:17 -0700 | [diff] [blame] | 979 | |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 980 | struct blorp_batch batch; |
Jason Ekstrand | d80c030 | 2016-10-07 17:20:00 -0700 | [diff] [blame] | 981 | blorp_batch_init(&brw->blorp, &batch, brw, 0); |
Jason Ekstrand | 2191f5c | 2016-08-19 00:54:56 -0700 | [diff] [blame] | 982 | blorp_gen6_hiz_op(&batch, &surf, level, layer, op); |
| 983 | blorp_batch_finish(&batch); |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 984 | } |
| 985 | |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 986 | /** |
| 987 | * Perform a HiZ or depth resolve operation. |
| 988 | * |
| 989 | * For an overview of HiZ ops, see the following sections of the Sandy Bridge |
| 990 | * PRM, Volume 1, Part 2: |
| 991 | * - 7.5.3.1 Depth Buffer Clear |
| 992 | * - 7.5.3.2 Depth Buffer Resolve |
| 993 | * - 7.5.3.3 Hierarchical Depth Buffer Resolve |
| 994 | */ |
Eric Anholt | 5b226ad | 2012-05-21 09:30:35 -0700 | [diff] [blame] | 995 | void |
Kenneth Graunke | ca43757 | 2013-07-02 23:17:14 -0700 | [diff] [blame] | 996 | intel_hiz_exec(struct brw_context *brw, struct intel_mipmap_tree *mt, |
Jason Ekstrand | 87a1cb6 | 2016-08-19 03:15:41 -0700 | [diff] [blame] | 997 | unsigned int level, unsigned int layer, enum blorp_hiz_op op) |
Eric Anholt | 5b226ad | 2012-05-21 09:30:35 -0700 | [diff] [blame] | 998 | { |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 999 | const char *opname = NULL; |
| 1000 | |
| 1001 | switch (op) { |
Jason Ekstrand | 87a1cb6 | 2016-08-19 03:15:41 -0700 | [diff] [blame] | 1002 | case BLORP_HIZ_OP_DEPTH_RESOLVE: |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 1003 | opname = "depth resolve"; |
| 1004 | break; |
Jason Ekstrand | 87a1cb6 | 2016-08-19 03:15:41 -0700 | [diff] [blame] | 1005 | case BLORP_HIZ_OP_HIZ_RESOLVE: |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 1006 | opname = "hiz ambiguate"; |
| 1007 | break; |
Jason Ekstrand | 87a1cb6 | 2016-08-19 03:15:41 -0700 | [diff] [blame] | 1008 | case BLORP_HIZ_OP_DEPTH_CLEAR: |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 1009 | opname = "depth clear"; |
| 1010 | break; |
Jason Ekstrand | 87a1cb6 | 2016-08-19 03:15:41 -0700 | [diff] [blame] | 1011 | case BLORP_HIZ_OP_NONE: |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 1012 | opname = "noop?"; |
| 1013 | break; |
| 1014 | } |
| 1015 | |
| 1016 | DBG("%s %s to mt %p level %d layer %d\n", |
Marius Predut | 28d9e90 | 2015-04-07 22:05:28 +0300 | [diff] [blame] | 1017 | __func__, opname, mt, level, layer); |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 1018 | |
Kenneth Graunke | 8cad1c1 | 2014-02-06 17:06:12 -0800 | [diff] [blame] | 1019 | if (brw->gen >= 8) { |
| 1020 | gen8_hiz_exec(brw, mt, level, layer, op); |
| 1021 | } else { |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 1022 | gen6_blorp_hiz_exec(brw, mt, level, layer, op); |
Kenneth Graunke | 8cad1c1 | 2014-02-06 17:06:12 -0800 | [diff] [blame] | 1023 | } |
Eric Anholt | 5b226ad | 2012-05-21 09:30:35 -0700 | [diff] [blame] | 1024 | } |