Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2012 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | */ |
| 23 | |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 24 | #include <errno.h> |
Eric Anholt | db31bc5 | 2013-02-07 18:46:18 -0800 | [diff] [blame] | 25 | #include "intel_batchbuffer.h" |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 26 | #include "intel_fbo.h" |
| 27 | |
| 28 | #include "brw_blorp.h" |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 29 | #include "brw_compiler.h" |
| 30 | #include "brw_nir.h" |
Kenneth Graunke | 6f7c41d | 2013-09-30 18:11:03 -0700 | [diff] [blame] | 31 | #include "brw_state.h" |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 32 | |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 33 | #define FILE_DEBUG_FLAG DEBUG_BLORP |
| 34 | |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 35 | void |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 36 | brw_blorp_surface_info_init(struct brw_context *brw, |
| 37 | struct brw_blorp_surface_info *info, |
Jason Ekstrand | a543f74 | 2016-04-21 16:19:51 -0700 | [diff] [blame] | 38 | struct intel_mipmap_tree *mt, |
| 39 | unsigned int level, unsigned int layer, |
| 40 | mesa_format format, bool is_render_target) |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 41 | { |
Paul Berry | b5fe413 | 2013-12-03 21:15:47 -0800 | [diff] [blame] | 42 | /* Layer is a physical layer, so if this is a 2D multisample array texture |
| 43 | * using INTEL_MSAA_LAYOUT_UMS or INTEL_MSAA_LAYOUT_CMS, then it had better |
| 44 | * be a multiple of num_samples. |
| 45 | */ |
| 46 | if (mt->msaa_layout == INTEL_MSAA_LAYOUT_UMS || |
| 47 | mt->msaa_layout == INTEL_MSAA_LAYOUT_CMS) { |
Topi Pohjolainen | 01ba26d | 2016-04-18 08:51:10 +0300 | [diff] [blame] | 48 | assert(mt->num_samples <= 1 || layer % mt->num_samples == 0); |
Paul Berry | b5fe413 | 2013-12-03 21:15:47 -0800 | [diff] [blame] | 49 | } |
| 50 | |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 51 | intel_miptree_check_level_layer(mt, level, layer); |
| 52 | |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 53 | info->mt = mt; |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 54 | |
| 55 | intel_miptree_get_isl_surf(brw, mt, &info->surf); |
| 56 | |
| 57 | if (mt->mcs_mt) { |
| 58 | intel_miptree_get_aux_isl_surf(brw, mt, &info->aux_surf, |
| 59 | &info->aux_usage); |
| 60 | } else { |
| 61 | info->aux_usage = ISL_AUX_USAGE_NONE; |
| 62 | } |
| 63 | |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 64 | info->level = level; |
| 65 | info->layer = layer; |
| 66 | info->width = minify(mt->physical_width0, level - mt->first_level); |
| 67 | info->height = minify(mt->physical_height0, level - mt->first_level); |
Paul Berry | c130ce7 | 2012-08-29 12:16:06 -0700 | [diff] [blame] | 68 | |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 69 | info->swizzle = SWIZZLE_XYZW; |
Paul Berry | 506d70b | 2012-04-29 22:44:25 -0700 | [diff] [blame] | 70 | |
Jason Ekstrand | 7599886 | 2014-09-03 13:53:33 -0700 | [diff] [blame] | 71 | if (format == MESA_FORMAT_NONE) |
| 72 | format = mt->format; |
| 73 | |
| 74 | switch (format) { |
Mark Mueller | 50a01d2 | 2014-01-20 19:08:54 -0800 | [diff] [blame] | 75 | case MESA_FORMAT_S_UINT8: |
Jason Ekstrand | aa4117a | 2016-06-22 16:46:20 -0700 | [diff] [blame] | 76 | assert(info->surf.tiling == ISL_TILING_W); |
| 77 | /* Prior to Broadwell, we can't render to R8_UINT */ |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 78 | info->brw_surfaceformat = brw->gen >= 8 ? BRW_SURFACEFORMAT_R8_UINT : |
Topi Pohjolainen | f7ab4e0 | 2016-04-07 18:50:56 +0300 | [diff] [blame] | 79 | BRW_SURFACEFORMAT_R8_UNORM; |
Paul Berry | 530bda2 | 2012-06-06 11:05:02 -0700 | [diff] [blame] | 80 | break; |
Kenneth Graunke | a487ef8 | 2014-02-07 21:53:18 -0800 | [diff] [blame] | 81 | case MESA_FORMAT_Z24_UNORM_X8_UINT: |
Kenneth Graunke | 0589eae | 2013-10-07 11:27:22 -0700 | [diff] [blame] | 82 | /* It would make sense to use BRW_SURFACEFORMAT_R24_UNORM_X8_TYPELESS |
| 83 | * here, but unfortunately it isn't supported as a render target, which |
| 84 | * would prevent us from blitting to 24-bit depth. |
| 85 | * |
| 86 | * The miptree consists of 32 bits per pixel, arranged as 24-bit depth |
Kenneth Graunke | 590d717 | 2013-10-07 11:19:11 -0700 | [diff] [blame] | 87 | * values interleaved with 8 "don't care" bits. Since depth values don't |
| 88 | * require any blending, it doesn't matter how we interpret the bit |
| 89 | * pattern as long as we copy the right amount of data, so just map it |
| 90 | * as 8-bit BGRA. |
Paul Berry | 530bda2 | 2012-06-06 11:05:02 -0700 | [diff] [blame] | 91 | */ |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 92 | info->brw_surfaceformat = BRW_SURFACEFORMAT_B8G8R8A8_UNORM; |
Paul Berry | 530bda2 | 2012-06-06 11:05:02 -0700 | [diff] [blame] | 93 | break; |
Mark Mueller | 50a01d2 | 2014-01-20 19:08:54 -0800 | [diff] [blame] | 94 | case MESA_FORMAT_Z_FLOAT32: |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 95 | info->brw_surfaceformat = BRW_SURFACEFORMAT_R32_FLOAT; |
Kenneth Graunke | 590d717 | 2013-10-07 11:19:11 -0700 | [diff] [blame] | 96 | break; |
Mark Mueller | 50a01d2 | 2014-01-20 19:08:54 -0800 | [diff] [blame] | 97 | case MESA_FORMAT_Z_UNORM16: |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 98 | info->brw_surfaceformat = BRW_SURFACEFORMAT_R16_UNORM; |
Paul Berry | 530bda2 | 2012-06-06 11:05:02 -0700 | [diff] [blame] | 99 | break; |
Kenneth Graunke | 72aade4 | 2013-10-07 12:44:01 -0700 | [diff] [blame] | 100 | default: { |
Kenneth Graunke | 6f7c41d | 2013-09-30 18:11:03 -0700 | [diff] [blame] | 101 | if (is_render_target) { |
Kenneth Graunke | 8679bb7 | 2016-03-16 20:15:52 -0700 | [diff] [blame] | 102 | assert(brw->format_supported_as_render_target[format]); |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 103 | info->brw_surfaceformat = brw->render_target_format[format]; |
Kenneth Graunke | 6f7c41d | 2013-09-30 18:11:03 -0700 | [diff] [blame] | 104 | } else { |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 105 | info->brw_surfaceformat = brw_format_for_mesa_format(format); |
Kenneth Graunke | 6f7c41d | 2013-09-30 18:11:03 -0700 | [diff] [blame] | 106 | } |
Paul Berry | 530bda2 | 2012-06-06 11:05:02 -0700 | [diff] [blame] | 107 | break; |
Paul Berry | 506d70b | 2012-04-29 22:44:25 -0700 | [diff] [blame] | 108 | } |
Kenneth Graunke | 72aade4 | 2013-10-07 12:44:01 -0700 | [diff] [blame] | 109 | } |
Jason Ekstrand | e046a46 | 2016-06-23 18:40:08 -0700 | [diff] [blame^] | 110 | |
| 111 | uint32_t x_offset, y_offset; |
| 112 | intel_miptree_get_image_offset(mt, level, layer, &x_offset, &y_offset); |
| 113 | |
| 114 | uint8_t bs = isl_format_get_layout(info->brw_surfaceformat)->bpb / 8; |
| 115 | isl_tiling_get_intratile_offset_el(&brw->isl_dev, info->surf.tiling, bs, |
| 116 | info->surf.row_pitch, x_offset, y_offset, |
| 117 | &info->bo_offset, |
| 118 | &info->tile_x_sa, &info->tile_y_sa); |
Paul Berry | 506d70b | 2012-04-29 22:44:25 -0700 | [diff] [blame] | 119 | } |
| 120 | |
Paul Berry | f04f219 | 2012-08-29 16:04:15 -0700 | [diff] [blame] | 121 | |
Jason Ekstrand | b3f08b5 | 2016-04-22 14:32:48 -0700 | [diff] [blame] | 122 | void |
| 123 | brw_blorp_params_init(struct brw_blorp_params *params) |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 124 | { |
Jason Ekstrand | b3f08b5 | 2016-04-22 14:32:48 -0700 | [diff] [blame] | 125 | memset(params, 0, sizeof(*params)); |
| 126 | params->hiz_op = GEN6_HIZ_OP_NONE; |
| 127 | params->fast_clear_op = 0; |
Jason Ekstrand | b3f08b5 | 2016-04-22 14:32:48 -0700 | [diff] [blame] | 128 | params->num_draw_buffers = 1; |
| 129 | params->num_layers = 1; |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 130 | } |
| 131 | |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 132 | void |
| 133 | brw_blorp_init_wm_prog_key(struct brw_wm_prog_key *wm_key) |
| 134 | { |
| 135 | memset(wm_key, 0, sizeof(*wm_key)); |
| 136 | wm_key->nr_color_regions = 1; |
| 137 | for (int i = 0; i < MAX_SAMPLERS; i++) |
| 138 | wm_key->tex.swizzles[i] = SWIZZLE_XYZW; |
| 139 | } |
| 140 | |
| 141 | static int |
| 142 | nir_uniform_type_size(const struct glsl_type *type) |
| 143 | { |
| 144 | /* Only very basic types are allowed */ |
| 145 | assert(glsl_type_is_vector_or_scalar(type)); |
| 146 | assert(glsl_get_bit_size(type) == 32); |
| 147 | |
| 148 | return glsl_get_vector_elements(type) * 4; |
| 149 | } |
| 150 | |
| 151 | const unsigned * |
| 152 | brw_blorp_compile_nir_shader(struct brw_context *brw, struct nir_shader *nir, |
| 153 | const struct brw_wm_prog_key *wm_key, |
| 154 | bool use_repclear, |
| 155 | struct brw_blorp_prog_data *prog_data, |
| 156 | unsigned *program_size) |
| 157 | { |
| 158 | const struct brw_compiler *compiler = brw->intelScreen->compiler; |
| 159 | |
| 160 | void *mem_ctx = ralloc_context(NULL); |
| 161 | |
| 162 | /* Calling brw_preprocess_nir and friends is destructive and, if cloning is |
| 163 | * enabled, may end up completely replacing the nir_shader. Therefore, we |
| 164 | * own it and might as well put it in our context for easy cleanup. |
| 165 | */ |
| 166 | ralloc_steal(mem_ctx, nir); |
| 167 | nir->options = |
| 168 | compiler->glsl_compiler_options[MESA_SHADER_FRAGMENT].NirOptions; |
| 169 | |
| 170 | struct brw_wm_prog_data wm_prog_data; |
| 171 | memset(&wm_prog_data, 0, sizeof(wm_prog_data)); |
| 172 | |
Topi Pohjolainen | 175e095 | 2016-05-18 22:01:17 +0300 | [diff] [blame] | 173 | wm_prog_data.base.nr_params = 0; |
| 174 | wm_prog_data.base.param = NULL; |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 175 | |
| 176 | /* BLORP always just uses the first two binding table entries */ |
| 177 | wm_prog_data.binding_table.render_target_start = 0; |
| 178 | wm_prog_data.base.binding_table.texture_start = 1; |
| 179 | |
| 180 | nir = brw_preprocess_nir(compiler, nir); |
| 181 | nir_remove_dead_variables(nir, nir_var_shader_in); |
| 182 | nir_shader_gather_info(nir, nir_shader_get_entrypoint(nir)->impl); |
| 183 | |
| 184 | /* Uniforms are required to be lowered before going into compile_fs. For |
| 185 | * BLORP, we'll assume that whoever builds the shader sets the location |
| 186 | * they want so we just need to lower them and figure out how many we have |
| 187 | * in total. |
| 188 | */ |
| 189 | nir->num_uniforms = 0; |
| 190 | nir_foreach_variable(var, &nir->uniforms) { |
| 191 | var->data.driver_location = var->data.location; |
| 192 | unsigned end = var->data.location + nir_uniform_type_size(var->type); |
| 193 | nir->num_uniforms = MAX2(nir->num_uniforms, end); |
| 194 | } |
| 195 | nir_lower_io(nir, nir_var_uniform, nir_uniform_type_size); |
| 196 | |
| 197 | const unsigned *program = |
| 198 | brw_compile_fs(compiler, brw, mem_ctx, wm_key, &wm_prog_data, nir, |
Jason Ekstrand | 265487a | 2016-05-16 14:30:25 -0700 | [diff] [blame] | 199 | NULL, -1, -1, false, use_repclear, program_size, NULL); |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 200 | |
| 201 | /* Copy the relavent bits of wm_prog_data over into the blorp prog data */ |
| 202 | prog_data->dispatch_8 = wm_prog_data.dispatch_8; |
| 203 | prog_data->dispatch_16 = wm_prog_data.dispatch_16; |
| 204 | prog_data->first_curbe_grf_0 = wm_prog_data.base.dispatch_grf_start_reg; |
| 205 | prog_data->first_curbe_grf_2 = wm_prog_data.dispatch_grf_start_reg_2; |
| 206 | prog_data->ksp_offset_2 = wm_prog_data.prog_offset_2; |
| 207 | prog_data->persample_msaa_dispatch = wm_prog_data.persample_dispatch; |
Topi Pohjolainen | f5e8575 | 2016-05-15 11:34:37 +0300 | [diff] [blame] | 208 | prog_data->flat_inputs = wm_prog_data.flat_inputs; |
| 209 | prog_data->num_varying_inputs = wm_prog_data.num_varying_inputs; |
Topi Pohjolainen | 9b2fa17 | 2016-06-01 12:27:56 +0300 | [diff] [blame] | 210 | prog_data->inputs_read = nir->info.inputs_read; |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 211 | |
Topi Pohjolainen | 175e095 | 2016-05-18 22:01:17 +0300 | [diff] [blame] | 212 | assert(wm_prog_data.base.nr_params == 0); |
Jason Ekstrand | c1fe885 | 2016-04-27 17:16:30 -0700 | [diff] [blame] | 213 | |
| 214 | return program; |
| 215 | } |
| 216 | |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 217 | struct surface_state_info { |
| 218 | unsigned num_dwords; |
| 219 | unsigned ss_align; /* Required alignment of RENDER_SURFACE_STATE in bytes */ |
| 220 | unsigned reloc_dw; |
| 221 | unsigned aux_reloc_dw; |
| 222 | unsigned tex_mocs; |
| 223 | unsigned rb_mocs; |
| 224 | }; |
| 225 | |
| 226 | static const struct surface_state_info surface_state_infos[] = { |
| 227 | [6] = {6, 32, 1, 0}, |
| 228 | [7] = {8, 32, 1, 6, GEN7_MOCS_L3, GEN7_MOCS_L3}, |
| 229 | [8] = {13, 64, 8, 10, BDW_MOCS_WB, BDW_MOCS_PTE}, |
| 230 | [9] = {16, 64, 8, 10, SKL_MOCS_WB, SKL_MOCS_PTE}, |
| 231 | }; |
| 232 | |
| 233 | uint32_t |
| 234 | brw_blorp_emit_surface_state(struct brw_context *brw, |
| 235 | const struct brw_blorp_surface_info *surface, |
| 236 | uint32_t read_domains, uint32_t write_domain, |
| 237 | bool is_render_target) |
| 238 | { |
| 239 | const struct surface_state_info ss_info = surface_state_infos[brw->gen]; |
| 240 | |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 241 | struct isl_surf surf = surface->surf; |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 242 | |
| 243 | /* Stomp surface dimensions and tiling (if needed) with info from blorp */ |
| 244 | surf.dim = ISL_SURF_DIM_2D; |
| 245 | surf.dim_layout = ISL_DIM_LAYOUT_GEN4_2D; |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 246 | surf.logical_level0_px.width = surface->width; |
| 247 | surf.logical_level0_px.height = surface->height; |
| 248 | surf.logical_level0_px.depth = 1; |
| 249 | surf.logical_level0_px.array_len = 1; |
| 250 | surf.levels = 1; |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 251 | |
| 252 | /* Alignment doesn't matter since we have 1 miplevel and 1 array slice so |
| 253 | * just pick something that works for everybody. |
| 254 | */ |
| 255 | surf.image_alignment_el = isl_extent3d(4, 4, 1); |
| 256 | |
Jason Ekstrand | 28b0ad8 | 2016-06-23 11:00:59 -0700 | [diff] [blame] | 257 | if (brw->gen == 6 && surf.samples > 1) { |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 258 | /* Since gen6 uses INTEL_MSAA_LAYOUT_IMS, width and height are measured |
| 259 | * in samples. But SURFACE_STATE wants them in pixels, so we need to |
| 260 | * divide them each by 2. |
| 261 | */ |
| 262 | surf.logical_level0_px.width /= 2; |
| 263 | surf.logical_level0_px.height /= 2; |
| 264 | } |
| 265 | |
| 266 | if (brw->gen == 6 && surf.image_alignment_el.height > 4) { |
| 267 | /* This can happen on stencil buffers on Sandy Bridge due to the |
| 268 | * single-LOD work-around. It's fairly harmless as long as we don't |
| 269 | * pass a bogus value into isl_surf_fill_state(). |
| 270 | */ |
| 271 | surf.image_alignment_el = isl_extent3d(4, 2, 1); |
| 272 | } |
| 273 | |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 274 | union isl_color_value clear_color = { .u32 = { 0, 0, 0, 0 } }; |
| 275 | |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 276 | const struct isl_surf *aux_surf = NULL; |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 277 | uint64_t aux_offset = 0; |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 278 | if (surface->mt->mcs_mt) { |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 279 | aux_surf = &surface->aux_surf; |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 280 | assert(surface->mt->mcs_mt->offset == 0); |
| 281 | aux_offset = surface->mt->mcs_mt->bo->offset64; |
| 282 | |
| 283 | /* We only really need a clear color if we also have an auxiliary |
| 284 | * surface. Without one, it does nothing. |
| 285 | */ |
| 286 | clear_color = intel_miptree_get_isl_clear_color(brw, surface->mt); |
| 287 | } |
| 288 | |
| 289 | struct isl_view view = { |
| 290 | .format = surface->brw_surfaceformat, |
| 291 | .base_level = 0, |
| 292 | .levels = 1, |
| 293 | .base_array_layer = 0, |
| 294 | .array_len = 1, |
| 295 | .channel_select = { |
| 296 | ISL_CHANNEL_SELECT_RED, |
| 297 | ISL_CHANNEL_SELECT_GREEN, |
| 298 | ISL_CHANNEL_SELECT_BLUE, |
| 299 | ISL_CHANNEL_SELECT_ALPHA, |
| 300 | }, |
| 301 | .usage = is_render_target ? ISL_SURF_USAGE_RENDER_TARGET_BIT : |
| 302 | ISL_SURF_USAGE_TEXTURE_BIT, |
| 303 | }; |
| 304 | |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 305 | uint32_t surf_offset; |
| 306 | uint32_t *dw = brw_state_batch(brw, AUB_TRACE_SURFACE_STATE, |
| 307 | ss_info.num_dwords * 4, ss_info.ss_align, |
| 308 | &surf_offset); |
| 309 | |
| 310 | const uint32_t mocs = is_render_target ? ss_info.rb_mocs : ss_info.tex_mocs; |
| 311 | |
| 312 | isl_surf_fill_state(&brw->isl_dev, dw, .surf = &surf, .view = &view, |
Jason Ekstrand | e046a46 | 2016-06-23 18:40:08 -0700 | [diff] [blame^] | 313 | .address = surface->mt->bo->offset64 + surface->bo_offset, |
Jason Ekstrand | b82de88 | 2016-06-22 15:33:44 -0700 | [diff] [blame] | 314 | .aux_surf = aux_surf, .aux_usage = surface->aux_usage, |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 315 | .aux_address = aux_offset, |
| 316 | .mocs = mocs, .clear_color = clear_color, |
Jason Ekstrand | e046a46 | 2016-06-23 18:40:08 -0700 | [diff] [blame^] | 317 | .x_offset_sa = surface->tile_x_sa, |
| 318 | .y_offset_sa = surface->tile_y_sa); |
Jason Ekstrand | 6553dc0 | 2016-06-10 12:03:18 -0700 | [diff] [blame] | 319 | |
| 320 | /* Emit relocation to surface contents */ |
| 321 | drm_intel_bo_emit_reloc(brw->batch.bo, |
| 322 | surf_offset + ss_info.reloc_dw * 4, |
| 323 | surface->mt->bo, |
| 324 | dw[ss_info.reloc_dw] - surface->mt->bo->offset64, |
| 325 | read_domains, write_domain); |
| 326 | |
| 327 | if (aux_surf) { |
| 328 | /* On gen7 and prior, the bottom 12 bits of the MCS base address are |
| 329 | * used to store other information. This should be ok, however, because |
| 330 | * surface buffer addresses are always 4K page alinged. |
| 331 | */ |
| 332 | assert((aux_offset & 0xfff) == 0); |
| 333 | drm_intel_bo_emit_reloc(brw->batch.bo, |
| 334 | surf_offset + ss_info.aux_reloc_dw * 4, |
| 335 | surface->mt->mcs_mt->bo, |
| 336 | dw[ss_info.aux_reloc_dw] & 0xfff, |
| 337 | read_domains, write_domain); |
| 338 | } |
| 339 | |
| 340 | return surf_offset; |
| 341 | } |
| 342 | |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 343 | /** |
| 344 | * Perform a HiZ or depth resolve operation. |
| 345 | * |
| 346 | * For an overview of HiZ ops, see the following sections of the Sandy Bridge |
| 347 | * PRM, Volume 1, Part 2: |
| 348 | * - 7.5.3.1 Depth Buffer Clear |
| 349 | * - 7.5.3.2 Depth Buffer Resolve |
| 350 | * - 7.5.3.3 Hierarchical Depth Buffer Resolve |
| 351 | */ |
Eric Anholt | 5b226ad | 2012-05-21 09:30:35 -0700 | [diff] [blame] | 352 | void |
Kenneth Graunke | ca43757 | 2013-07-02 23:17:14 -0700 | [diff] [blame] | 353 | intel_hiz_exec(struct brw_context *brw, struct intel_mipmap_tree *mt, |
Jason Ekstrand | bed7429 | 2016-04-22 16:04:05 -0700 | [diff] [blame] | 354 | unsigned int level, unsigned int layer, enum gen6_hiz_op op) |
Eric Anholt | 5b226ad | 2012-05-21 09:30:35 -0700 | [diff] [blame] | 355 | { |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 356 | const char *opname = NULL; |
| 357 | |
| 358 | switch (op) { |
| 359 | case GEN6_HIZ_OP_DEPTH_RESOLVE: |
| 360 | opname = "depth resolve"; |
| 361 | break; |
| 362 | case GEN6_HIZ_OP_HIZ_RESOLVE: |
| 363 | opname = "hiz ambiguate"; |
| 364 | break; |
| 365 | case GEN6_HIZ_OP_DEPTH_CLEAR: |
| 366 | opname = "depth clear"; |
| 367 | break; |
| 368 | case GEN6_HIZ_OP_NONE: |
| 369 | opname = "noop?"; |
| 370 | break; |
| 371 | } |
| 372 | |
| 373 | DBG("%s %s to mt %p level %d layer %d\n", |
Marius Predut | 28d9e90 | 2015-04-07 22:05:28 +0300 | [diff] [blame] | 374 | __func__, opname, mt, level, layer); |
Eric Anholt | a2ca98b | 2013-05-30 14:53:55 -0700 | [diff] [blame] | 375 | |
Kenneth Graunke | 8cad1c1 | 2014-02-06 17:06:12 -0800 | [diff] [blame] | 376 | if (brw->gen >= 8) { |
| 377 | gen8_hiz_exec(brw, mt, level, layer, op); |
| 378 | } else { |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 379 | gen6_blorp_hiz_exec(brw, mt, level, layer, op); |
Kenneth Graunke | 8cad1c1 | 2014-02-06 17:06:12 -0800 | [diff] [blame] | 380 | } |
Eric Anholt | 5b226ad | 2012-05-21 09:30:35 -0700 | [diff] [blame] | 381 | } |
| 382 | |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 383 | void |
Jason Ekstrand | b3f08b5 | 2016-04-22 14:32:48 -0700 | [diff] [blame] | 384 | brw_blorp_exec(struct brw_context *brw, const struct brw_blorp_params *params) |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 385 | { |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 386 | struct gl_context *ctx = &brw->ctx; |
Topi Pohjolainen | 7de72f7 | 2016-03-30 20:41:30 +0300 | [diff] [blame] | 387 | const uint32_t estimated_max_batch_usage = brw->gen >= 8 ? 1800 : 1500; |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 388 | bool check_aperture_failed_once = false; |
| 389 | |
| 390 | /* Flush the sampler and render caches. We definitely need to flush the |
| 391 | * sampler cache so that we get updated contents from the render cache for |
| 392 | * the glBlitFramebuffer() source. Also, we are sometimes warned in the |
| 393 | * docs to flush the cache between reinterpretations of the same surface |
| 394 | * data with different formats, which blorp does for stencil and depth |
| 395 | * data. |
| 396 | */ |
Chris Wilson | 4b35ab9 | 2015-04-30 17:04:51 +0100 | [diff] [blame] | 397 | brw_emit_mi_flush(brw); |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 398 | |
Topi Pohjolainen | 7644e8a | 2016-04-15 10:43:05 +0300 | [diff] [blame] | 399 | brw_select_pipeline(brw, BRW_RENDER_PIPELINE); |
| 400 | |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 401 | retry: |
Kenneth Graunke | 6bc40f9 | 2013-10-28 16:06:10 -0700 | [diff] [blame] | 402 | intel_batchbuffer_require_space(brw, estimated_max_batch_usage, RENDER_RING); |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 403 | intel_batchbuffer_save_state(brw); |
| 404 | drm_intel_bo *saved_bo = brw->batch.bo; |
Matt Turner | 131573d | 2015-07-11 14:36:25 -0700 | [diff] [blame] | 405 | uint32_t saved_used = USED_BATCH(brw->batch); |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 406 | uint32_t saved_state_batch_offset = brw->batch.state_batch_offset; |
| 407 | |
Kenneth Graunke | 53631be | 2013-07-06 00:36:46 -0700 | [diff] [blame] | 408 | switch (brw->gen) { |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 409 | case 6: |
Kenneth Graunke | ca43757 | 2013-07-02 23:17:14 -0700 | [diff] [blame] | 410 | gen6_blorp_exec(brw, params); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 411 | break; |
| 412 | case 7: |
Kenneth Graunke | ca43757 | 2013-07-02 23:17:14 -0700 | [diff] [blame] | 413 | gen7_blorp_exec(brw, params); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 414 | break; |
Topi Pohjolainen | 7de72f7 | 2016-03-30 20:41:30 +0300 | [diff] [blame] | 415 | case 8: |
| 416 | case 9: |
| 417 | gen8_blorp_exec(brw, params); |
| 418 | break; |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 419 | default: |
| 420 | /* BLORP is not supported before Gen6. */ |
Matt Turner | 3d82672 | 2014-06-29 14:54:01 -0700 | [diff] [blame] | 421 | unreachable("not reached"); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 422 | } |
Eric Anholt | db31bc5 | 2013-02-07 18:46:18 -0800 | [diff] [blame] | 423 | |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 424 | /* Make sure we didn't wrap the batch unintentionally, and make sure we |
| 425 | * reserved enough space that a wrap will never happen. |
| 426 | */ |
| 427 | assert(brw->batch.bo == saved_bo); |
Matt Turner | 131573d | 2015-07-11 14:36:25 -0700 | [diff] [blame] | 428 | assert((USED_BATCH(brw->batch) - saved_used) * 4 + |
Eric Anholt | 185b5a5 | 2013-06-18 13:52:03 -0700 | [diff] [blame] | 429 | (saved_state_batch_offset - brw->batch.state_batch_offset) < |
| 430 | estimated_max_batch_usage); |
| 431 | /* Shut up compiler warnings on release build */ |
| 432 | (void)saved_bo; |
| 433 | (void)saved_used; |
| 434 | (void)saved_state_batch_offset; |
| 435 | |
| 436 | /* Check if the blorp op we just did would make our batch likely to fail to |
| 437 | * map all the BOs into the GPU at batch exec time later. If so, flush the |
| 438 | * batch and try again with nothing else in the batch. |
| 439 | */ |
| 440 | if (dri_bufmgr_check_aperture_space(&brw->batch.bo, 1)) { |
| 441 | if (!check_aperture_failed_once) { |
| 442 | check_aperture_failed_once = true; |
| 443 | intel_batchbuffer_reset_to_saved(brw); |
| 444 | intel_batchbuffer_flush(brw); |
| 445 | goto retry; |
| 446 | } else { |
| 447 | int ret = intel_batchbuffer_flush(brw); |
| 448 | WARN_ONCE(ret == -ENOSPC, |
| 449 | "i965: blorp emit exceeded available aperture space\n"); |
| 450 | } |
| 451 | } |
| 452 | |
Kenneth Graunke | e334390 | 2013-07-03 13:54:53 -0700 | [diff] [blame] | 453 | if (unlikely(brw->always_flush_batch)) |
Kenneth Graunke | ca43757 | 2013-07-02 23:17:14 -0700 | [diff] [blame] | 454 | intel_batchbuffer_flush(brw); |
Eric Anholt | db31bc5 | 2013-02-07 18:46:18 -0800 | [diff] [blame] | 455 | |
| 456 | /* We've smashed all state compared to what the normal 3D pipeline |
| 457 | * rendering tracks for GL. |
| 458 | */ |
Topi Pohjolainen | 234b5f2 | 2016-04-22 13:43:39 +0300 | [diff] [blame] | 459 | brw->ctx.NewDriverState |= BRW_NEW_BLORP; |
Eric Anholt | 3f9440c | 2014-04-06 10:49:49 -0700 | [diff] [blame] | 460 | brw->no_depth_or_stencil = false; |
Eric Anholt | 2e2445f | 2013-06-18 14:54:18 -0700 | [diff] [blame] | 461 | brw->ib.type = -1; |
Eric Anholt | db31bc5 | 2013-02-07 18:46:18 -0800 | [diff] [blame] | 462 | |
| 463 | /* Flush the sampler cache so any texturing from the destination is |
| 464 | * coherent. |
| 465 | */ |
Chris Wilson | 4b35ab9 | 2015-04-30 17:04:51 +0100 | [diff] [blame] | 466 | brw_emit_mi_flush(brw); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 467 | } |
| 468 | |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 469 | void |
| 470 | gen6_blorp_hiz_exec(struct brw_context *brw, struct intel_mipmap_tree *mt, |
| 471 | unsigned int level, unsigned int layer, enum gen6_hiz_op op) |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 472 | { |
Jason Ekstrand | b3f08b5 | 2016-04-22 14:32:48 -0700 | [diff] [blame] | 473 | struct brw_blorp_params params; |
| 474 | brw_blorp_params_init(¶ms); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 475 | |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 476 | params.hiz_op = op; |
| 477 | |
Jason Ekstrand | b6dd8e4 | 2016-04-21 16:39:56 -0700 | [diff] [blame] | 478 | brw_blorp_surface_info_init(brw, ¶ms.depth, mt, level, layer, |
| 479 | mt->format, true); |
Chad Versace | a14dc4f | 2013-03-11 19:21:46 -0700 | [diff] [blame] | 480 | |
| 481 | /* Align the rectangle primitive to 8x4 pixels. |
| 482 | * |
| 483 | * During fast depth clears, the emitted rectangle primitive must be |
| 484 | * aligned to 8x4 pixels. From the Ivybridge PRM, Vol 2 Part 1 Section |
| 485 | * 11.5.3.1 Depth Buffer Clear (and the matching section in the Sandybridge |
| 486 | * PRM): |
| 487 | * If Number of Multisamples is NUMSAMPLES_1, the rectangle must be |
| 488 | * aligned to an 8x4 pixel block relative to the upper left corner |
| 489 | * of the depth buffer [...] |
| 490 | * |
| 491 | * For hiz resolves, the rectangle must also be 8x4 aligned. Item |
| 492 | * WaHizAmbiguate8x4Aligned from the Haswell workarounds page and the |
| 493 | * Ivybridge simulator require the alignment. |
| 494 | * |
| 495 | * To be safe, let's just align the rect for all hiz operations and all |
| 496 | * hardware generations. |
| 497 | * |
| 498 | * However, for some miptree slices of a Z24 texture, emitting an 8x4 |
| 499 | * aligned rectangle that covers the slice may clobber adjacent slices if |
| 500 | * we strictly adhered to the texture alignments specified in the PRM. The |
| 501 | * Ivybridge PRM, Section "Alignment Unit Size", states that |
| 502 | * SURFACE_STATE.Surface_Horizontal_Alignment should be 4 for Z24 surfaces, |
| 503 | * not 8. But commit 1f112cc increased the alignment from 4 to 8, which |
| 504 | * prevents the clobbering. |
| 505 | */ |
Jason Ekstrand | 28b0ad8 | 2016-06-23 11:00:59 -0700 | [diff] [blame] | 506 | params.dst.surf.samples = MAX2(mt->num_samples, 1); |
| 507 | if (params.depth.surf.samples > 1) { |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 508 | params.depth.width = ALIGN(mt->logical_width0, 8); |
| 509 | params.depth.height = ALIGN(mt->logical_height0, 4); |
Chris Forbes | 43d23e8 | 2014-11-18 21:49:53 +1300 | [diff] [blame] | 510 | } else { |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 511 | params.depth.width = ALIGN(params.depth.width, 8); |
| 512 | params.depth.height = ALIGN(params.depth.height, 4); |
Chris Forbes | 43d23e8 | 2014-11-18 21:49:53 +1300 | [diff] [blame] | 513 | } |
Chad Versace | a14dc4f | 2013-03-11 19:21:46 -0700 | [diff] [blame] | 514 | |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 515 | params.x1 = params.depth.width; |
| 516 | params.y1 = params.depth.height; |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 517 | |
Eric Anholt | 11bef60 | 2014-04-23 14:21:21 -0700 | [diff] [blame] | 518 | assert(intel_miptree_level_has_hiz(mt, level)); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 519 | |
| 520 | switch (mt->format) { |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 521 | case MESA_FORMAT_Z_UNORM16: |
| 522 | params.depth_format = BRW_DEPTHFORMAT_D16_UNORM; |
| 523 | break; |
| 524 | case MESA_FORMAT_Z_FLOAT32: |
| 525 | params.depth_format = BRW_DEPTHFORMAT_D32_FLOAT; |
| 526 | break; |
| 527 | case MESA_FORMAT_Z24_UNORM_X8_UINT: |
| 528 | params.depth_format = BRW_DEPTHFORMAT_D24_UNORM_X8_UINT; |
| 529 | break; |
| 530 | default: |
| 531 | unreachable("not reached"); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 532 | } |
Jason Ekstrand | 8096ed7 | 2016-04-22 13:46:25 -0700 | [diff] [blame] | 533 | |
| 534 | brw_blorp_exec(brw, ¶ms); |
Paul Berry | 2c5510b | 2012-04-29 22:00:46 -0700 | [diff] [blame] | 535 | } |