Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 1 | /* |
Daniele Castagna | 7a755de | 2016-12-16 17:32:30 -0500 | [diff] [blame] | 2 | * Copyright 2014 The Chromium OS Authors. All rights reserved. |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 3 | * Use of this source code is governed by a BSD-style license that can be |
| 4 | * found in the LICENSE file. |
| 5 | */ |
| 6 | |
Gurchetan Singh | 46faf6b | 2016-08-05 14:40:07 -0700 | [diff] [blame] | 7 | #ifdef DRV_TEGRA |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 8 | |
Joe Kniss | 5f61c79 | 2017-06-28 14:06:24 -0700 | [diff] [blame] | 9 | #include <assert.h> |
Stéphane Marchesin | 6ac299f | 2019-03-21 12:23:29 -0700 | [diff] [blame] | 10 | #include <errno.h> |
Ilja H. Friedel | f9d2ab7 | 2015-04-09 14:08:36 -0700 | [diff] [blame] | 11 | #include <stdio.h> |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 12 | #include <string.h> |
Gurchetan Singh | ef92053 | 2016-08-12 16:38:25 -0700 | [diff] [blame] | 13 | #include <sys/mman.h> |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 14 | #include <tegra_drm.h> |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 15 | #include <xf86drm.h> |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 16 | |
Gurchetan Singh | 46faf6b | 2016-08-05 14:40:07 -0700 | [diff] [blame] | 17 | #include "drv_priv.h" |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 18 | #include "helpers.h" |
Yuly Novikov | 96c7a3b | 2015-12-08 22:48:29 -0500 | [diff] [blame] | 19 | #include "util.h" |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 20 | |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 21 | /* |
| 22 | * GOB (Group Of Bytes) is the basic unit of the blocklinear layout. |
| 23 | * GOBs are arranged to blocks, where the height of the block (measured |
| 24 | * in GOBs) is configurable. |
| 25 | */ |
| 26 | #define NV_BLOCKLINEAR_GOB_HEIGHT 8 |
| 27 | #define NV_BLOCKLINEAR_GOB_WIDTH 64 |
| 28 | #define NV_DEFAULT_BLOCK_HEIGHT_LOG2 4 |
| 29 | #define NV_PREFERRED_PAGE_SIZE (128 * 1024) |
| 30 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 31 | // clang-format off |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 32 | enum nv_mem_kind |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 33 | { |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 34 | NV_MEM_KIND_PITCH = 0, |
Vince Hsu | 0fd1142 | 2016-05-19 17:46:08 +0800 | [diff] [blame] | 35 | NV_MEM_KIND_C32_2CRA = 0xdb, |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 36 | NV_MEM_KIND_GENERIC_16Bx2 = 0xfe, |
| 37 | }; |
| 38 | |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 39 | enum tegra_map_type { |
| 40 | TEGRA_READ_TILED_BUFFER = 0, |
| 41 | TEGRA_WRITE_TILED_BUFFER = 1, |
| 42 | }; |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 43 | // clang-format on |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 44 | |
| 45 | struct tegra_private_map_data { |
| 46 | void *tiled; |
| 47 | void *untiled; |
| 48 | }; |
| 49 | |
Gurchetan Singh | 8ac0c9a | 2017-05-15 09:34:22 -0700 | [diff] [blame] | 50 | static const uint32_t render_target_formats[] = { DRM_FORMAT_ARGB8888, DRM_FORMAT_XRGB8888 }; |
Gurchetan Singh | 179687e | 2016-10-28 10:07:35 -0700 | [diff] [blame] | 51 | |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 52 | static int compute_block_height_log2(int height) |
| 53 | { |
| 54 | int block_height_log2 = NV_DEFAULT_BLOCK_HEIGHT_LOG2; |
| 55 | |
| 56 | if (block_height_log2 > 0) { |
| 57 | /* Shrink, if a smaller block height could cover the whole |
| 58 | * surface height. */ |
| 59 | int proposed = NV_BLOCKLINEAR_GOB_HEIGHT << (block_height_log2 - 1); |
| 60 | while (proposed >= height) { |
| 61 | block_height_log2--; |
| 62 | if (block_height_log2 == 0) |
| 63 | break; |
| 64 | proposed /= 2; |
| 65 | } |
| 66 | } |
| 67 | return block_height_log2; |
| 68 | } |
| 69 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 70 | static void compute_layout_blocklinear(int width, int height, int format, enum nv_mem_kind *kind, |
| 71 | uint32_t *block_height_log2, uint32_t *stride, |
| 72 | uint32_t *size) |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 73 | { |
Gurchetan Singh | 83dc4fb | 2016-07-19 15:52:33 -0700 | [diff] [blame] | 74 | int pitch = drv_stride_from_format(format, width, 0); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 75 | |
| 76 | /* Align to blocklinear blocks. */ |
Yuly Novikov | 96c7a3b | 2015-12-08 22:48:29 -0500 | [diff] [blame] | 77 | pitch = ALIGN(pitch, NV_BLOCKLINEAR_GOB_WIDTH); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 78 | |
| 79 | /* Compute padded height. */ |
| 80 | *block_height_log2 = compute_block_height_log2(height); |
| 81 | int block_height = 1 << *block_height_log2; |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 82 | int padded_height = ALIGN(height, NV_BLOCKLINEAR_GOB_HEIGHT * block_height); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 83 | |
| 84 | int bytes = pitch * padded_height; |
| 85 | |
| 86 | /* Pad the allocation to the preferred page size. |
| 87 | * This will reduce the required page table size (see discussion in NV |
| 88 | * bug 1321091), and also acts as a WAR for NV bug 1325421. |
| 89 | */ |
Yuly Novikov | 96c7a3b | 2015-12-08 22:48:29 -0500 | [diff] [blame] | 90 | bytes = ALIGN(bytes, NV_PREFERRED_PAGE_SIZE); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 91 | |
Vince Hsu | 0fd1142 | 2016-05-19 17:46:08 +0800 | [diff] [blame] | 92 | *kind = NV_MEM_KIND_C32_2CRA; |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 93 | *stride = pitch; |
| 94 | *size = bytes; |
| 95 | } |
| 96 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 97 | static void compute_layout_linear(int width, int height, int format, uint32_t *stride, |
| 98 | uint32_t *size) |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 99 | { |
Gurchetan Singh | 5972eec | 2016-12-16 15:51:46 -0800 | [diff] [blame] | 100 | *stride = ALIGN(drv_stride_from_format(format, width, 0), 64); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 101 | *size = *stride * height; |
| 102 | } |
| 103 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 104 | static void transfer_tile(struct bo *bo, uint8_t *tiled, uint8_t *untiled, enum tegra_map_type type, |
| 105 | uint32_t bytes_per_pixel, uint32_t gob_top, uint32_t gob_left, |
Joe Kniss | 6570585 | 2017-06-29 15:02:46 -0700 | [diff] [blame] | 106 | uint32_t gob_size_pixels, uint8_t *tiled_last) |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 107 | { |
| 108 | uint8_t *tmp; |
| 109 | uint32_t x, y, k; |
| 110 | for (k = 0; k < gob_size_pixels; k++) { |
| 111 | /* |
| 112 | * Given the kth pixel starting from the tile specified by |
| 113 | * gob_top and gob_left, unswizzle to get the standard (x, y) |
| 114 | * representation. |
| 115 | */ |
| 116 | x = gob_left + (((k >> 3) & 8) | ((k >> 1) & 4) | (k & 3)); |
| 117 | y = gob_top + ((k >> 7 << 3) | ((k >> 3) & 6) | ((k >> 2) & 1)); |
| 118 | |
Joe Kniss | 6570585 | 2017-06-29 15:02:46 -0700 | [diff] [blame] | 119 | if (tiled >= tiled_last) |
| 120 | return; |
| 121 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 122 | if (x >= bo->meta.width || y >= bo->meta.height) { |
Joe Kniss | 6570585 | 2017-06-29 15:02:46 -0700 | [diff] [blame] | 123 | tiled += bytes_per_pixel; |
| 124 | continue; |
| 125 | } |
| 126 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 127 | tmp = untiled + y * bo->meta.strides[0] + x * bytes_per_pixel; |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 128 | |
| 129 | if (type == TEGRA_READ_TILED_BUFFER) |
| 130 | memcpy(tmp, tiled, bytes_per_pixel); |
| 131 | else if (type == TEGRA_WRITE_TILED_BUFFER) |
| 132 | memcpy(tiled, tmp, bytes_per_pixel); |
| 133 | |
| 134 | /* Move on to next pixel. */ |
| 135 | tiled += bytes_per_pixel; |
| 136 | } |
| 137 | } |
| 138 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 139 | static void transfer_tiled_memory(struct bo *bo, uint8_t *tiled, uint8_t *untiled, |
| 140 | enum tegra_map_type type) |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 141 | { |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 142 | uint32_t gob_width, gob_height, gob_size_bytes, gob_size_pixels, gob_count_x, gob_count_y, |
| 143 | gob_top, gob_left; |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 144 | uint32_t i, j, offset; |
Joe Kniss | 6570585 | 2017-06-29 15:02:46 -0700 | [diff] [blame] | 145 | uint8_t *tmp, *tiled_last; |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 146 | uint32_t bytes_per_pixel = drv_stride_from_format(bo->meta.format, 1, 0); |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 147 | |
| 148 | /* |
| 149 | * The blocklinear format consists of 8*(2^n) x 64 byte sized tiles, |
| 150 | * where 0 <= n <= 4. |
| 151 | */ |
| 152 | gob_width = DIV_ROUND_UP(NV_BLOCKLINEAR_GOB_WIDTH, bytes_per_pixel); |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 153 | gob_height = NV_BLOCKLINEAR_GOB_HEIGHT * (1 << NV_DEFAULT_BLOCK_HEIGHT_LOG2); |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 154 | /* Calculate the height from maximum possible gob height */ |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 155 | while (gob_height > NV_BLOCKLINEAR_GOB_HEIGHT && gob_height >= 2 * bo->meta.height) |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 156 | gob_height /= 2; |
| 157 | |
| 158 | gob_size_bytes = gob_height * NV_BLOCKLINEAR_GOB_WIDTH; |
| 159 | gob_size_pixels = gob_height * gob_width; |
| 160 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 161 | gob_count_x = DIV_ROUND_UP(bo->meta.strides[0], NV_BLOCKLINEAR_GOB_WIDTH); |
| 162 | gob_count_y = DIV_ROUND_UP(bo->meta.height, gob_height); |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 163 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 164 | tiled_last = tiled + bo->meta.total_size; |
Joe Kniss | 6570585 | 2017-06-29 15:02:46 -0700 | [diff] [blame] | 165 | |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 166 | offset = 0; |
| 167 | for (j = 0; j < gob_count_y; j++) { |
| 168 | gob_top = j * gob_height; |
| 169 | for (i = 0; i < gob_count_x; i++) { |
| 170 | tmp = tiled + offset; |
| 171 | gob_left = i * gob_width; |
| 172 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 173 | transfer_tile(bo, tmp, untiled, type, bytes_per_pixel, gob_top, gob_left, |
Joe Kniss | 6570585 | 2017-06-29 15:02:46 -0700 | [diff] [blame] | 174 | gob_size_pixels, tiled_last); |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 175 | |
| 176 | offset += gob_size_bytes; |
| 177 | } |
| 178 | } |
| 179 | } |
| 180 | |
Gurchetan Singh | 179687e | 2016-10-28 10:07:35 -0700 | [diff] [blame] | 181 | static int tegra_init(struct driver *drv) |
| 182 | { |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 183 | struct format_metadata metadata; |
Gurchetan Singh | a1892b2 | 2017-09-28 16:40:52 -0700 | [diff] [blame] | 184 | uint64_t use_flags = BO_USE_RENDER_MASK; |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 185 | |
| 186 | metadata.tiling = NV_MEM_KIND_PITCH; |
| 187 | metadata.priority = 1; |
Kristian H. Kristensen | bc8c593 | 2017-10-24 18:36:32 -0700 | [diff] [blame] | 188 | metadata.modifier = DRM_FORMAT_MOD_LINEAR; |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 189 | |
Gurchetan Singh | d300145 | 2017-11-03 17:18:36 -0700 | [diff] [blame] | 190 | drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats), |
| 191 | &metadata, use_flags); |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 192 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 193 | drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT); |
| 194 | drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT); |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 195 | |
Gurchetan Singh | a1892b2 | 2017-09-28 16:40:52 -0700 | [diff] [blame] | 196 | use_flags &= ~BO_USE_SW_WRITE_OFTEN; |
| 197 | use_flags &= ~BO_USE_SW_READ_OFTEN; |
| 198 | use_flags &= ~BO_USE_LINEAR; |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 199 | |
| 200 | metadata.tiling = NV_MEM_KIND_C32_2CRA; |
| 201 | metadata.priority = 2; |
| 202 | |
Gurchetan Singh | d300145 | 2017-11-03 17:18:36 -0700 | [diff] [blame] | 203 | drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats), |
| 204 | &metadata, use_flags); |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 205 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 206 | drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_SCANOUT); |
| 207 | drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_SCANOUT); |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 208 | return 0; |
Gurchetan Singh | 179687e | 2016-10-28 10:07:35 -0700 | [diff] [blame] | 209 | } |
| 210 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 211 | static int tegra_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format, |
Gurchetan Singh | a1892b2 | 2017-09-28 16:40:52 -0700 | [diff] [blame] | 212 | uint64_t use_flags) |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 213 | { |
| 214 | uint32_t size, stride, block_height_log2 = 0; |
| 215 | enum nv_mem_kind kind = NV_MEM_KIND_PITCH; |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 216 | struct drm_tegra_gem_create gem_create; |
| 217 | int ret; |
| 218 | |
Gurchetan Singh | a1892b2 | 2017-09-28 16:40:52 -0700 | [diff] [blame] | 219 | if (use_flags & |
| 220 | (BO_USE_CURSOR | BO_USE_LINEAR | BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN)) |
Gurchetan Singh | 6b41fb5 | 2017-03-01 20:14:39 -0800 | [diff] [blame] | 221 | compute_layout_linear(width, height, format, &stride, &size); |
| 222 | else |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 223 | compute_layout_blocklinear(width, height, format, &kind, &block_height_log2, |
| 224 | &stride, &size); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 225 | |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 226 | memset(&gem_create, 0, sizeof(gem_create)); |
| 227 | gem_create.size = size; |
| 228 | gem_create.flags = 0; |
| 229 | |
Gurchetan Singh | 46faf6b | 2016-08-05 14:40:07 -0700 | [diff] [blame] | 230 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_TEGRA_GEM_CREATE, &gem_create); |
Ilja H. Friedel | f9d2ab7 | 2015-04-09 14:08:36 -0700 | [diff] [blame] | 231 | if (ret) { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 232 | drv_log("DRM_IOCTL_TEGRA_GEM_CREATE failed (size=%zu)\n", size); |
Stéphane Marchesin | 6ac299f | 2019-03-21 12:23:29 -0700 | [diff] [blame] | 233 | return -errno; |
Ilja H. Friedel | f9d2ab7 | 2015-04-09 14:08:36 -0700 | [diff] [blame] | 234 | } |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 235 | |
Yuly Novikov | 96c7a3b | 2015-12-08 22:48:29 -0500 | [diff] [blame] | 236 | bo->handles[0].u32 = gem_create.handle; |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 237 | bo->meta.offsets[0] = 0; |
| 238 | bo->meta.total_size = bo->meta.sizes[0] = size; |
| 239 | bo->meta.strides[0] = stride; |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 240 | |
| 241 | if (kind != NV_MEM_KIND_PITCH) { |
| 242 | struct drm_tegra_gem_set_tiling gem_tile; |
| 243 | |
| 244 | memset(&gem_tile, 0, sizeof(gem_tile)); |
Yuly Novikov | 96c7a3b | 2015-12-08 22:48:29 -0500 | [diff] [blame] | 245 | gem_tile.handle = bo->handles[0].u32; |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 246 | gem_tile.mode = DRM_TEGRA_GEM_TILING_MODE_BLOCK; |
| 247 | gem_tile.value = block_height_log2; |
| 248 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 249 | ret = drmCommandWriteRead(bo->drv->fd, DRM_TEGRA_GEM_SET_TILING, &gem_tile, |
| 250 | sizeof(gem_tile)); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 251 | if (ret < 0) { |
Gurchetan Singh | 46faf6b | 2016-08-05 14:40:07 -0700 | [diff] [blame] | 252 | drv_gem_bo_destroy(bo); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 253 | return ret; |
| 254 | } |
| 255 | |
| 256 | /* Encode blocklinear parameters for EGLImage creation. */ |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 257 | bo->meta.tiling = (kind & 0xff) | ((block_height_log2 & 0xf) << 8); |
| 258 | bo->meta.format_modifiers[0] = fourcc_mod_code(NV, bo->meta.tiling); |
Lauri Peltonen | 7842d8f | 2014-12-17 23:01:37 -0800 | [diff] [blame] | 259 | } |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 260 | |
| 261 | return 0; |
| 262 | } |
| 263 | |
Gurchetan Singh | 7dcdff1 | 2017-09-14 13:04:11 -0700 | [diff] [blame] | 264 | static int tegra_bo_import(struct bo *bo, struct drv_import_fd_data *data) |
| 265 | { |
| 266 | int ret; |
| 267 | struct drm_tegra_gem_get_tiling gem_get_tiling; |
| 268 | |
| 269 | ret = drv_prime_bo_import(bo, data); |
| 270 | if (ret) |
| 271 | return ret; |
| 272 | |
| 273 | /* TODO(gsingh): export modifiers and get rid of backdoor tiling. */ |
| 274 | memset(&gem_get_tiling, 0, sizeof(gem_get_tiling)); |
| 275 | gem_get_tiling.handle = bo->handles[0].u32; |
| 276 | |
| 277 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_TEGRA_GEM_GET_TILING, &gem_get_tiling); |
| 278 | if (ret) { |
| 279 | drv_gem_bo_destroy(bo); |
Stéphane Marchesin | 6ac299f | 2019-03-21 12:23:29 -0700 | [diff] [blame] | 280 | return -errno; |
Gurchetan Singh | 7dcdff1 | 2017-09-14 13:04:11 -0700 | [diff] [blame] | 281 | } |
| 282 | |
| 283 | /* NOTE(djmk): we only know about one tiled format, so if our drmIoctl call tells us we are |
| 284 | tiled, assume it is this format (NV_MEM_KIND_C32_2CRA) otherwise linear (KIND_PITCH). */ |
| 285 | if (gem_get_tiling.mode == DRM_TEGRA_GEM_TILING_MODE_PITCH) { |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 286 | bo->meta.tiling = NV_MEM_KIND_PITCH; |
Gurchetan Singh | 7dcdff1 | 2017-09-14 13:04:11 -0700 | [diff] [blame] | 287 | } else if (gem_get_tiling.mode == DRM_TEGRA_GEM_TILING_MODE_BLOCK) { |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 288 | bo->meta.tiling = NV_MEM_KIND_C32_2CRA; |
Gurchetan Singh | 7dcdff1 | 2017-09-14 13:04:11 -0700 | [diff] [blame] | 289 | } else { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 290 | drv_log("%s: unknown tile format %d\n", __func__, gem_get_tiling.mode); |
Gurchetan Singh | 7dcdff1 | 2017-09-14 13:04:11 -0700 | [diff] [blame] | 291 | drv_gem_bo_destroy(bo); |
| 292 | assert(0); |
| 293 | } |
| 294 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 295 | bo->meta.format_modifiers[0] = fourcc_mod_code(NV, bo->meta.tiling); |
Gurchetan Singh | 7dcdff1 | 2017-09-14 13:04:11 -0700 | [diff] [blame] | 296 | return 0; |
| 297 | } |
| 298 | |
Gurchetan Singh | ee43c30 | 2017-11-14 18:20:27 -0800 | [diff] [blame] | 299 | static void *tegra_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags) |
Gurchetan Singh | ef92053 | 2016-08-12 16:38:25 -0700 | [diff] [blame] | 300 | { |
| 301 | int ret; |
| 302 | struct drm_tegra_gem_mmap gem_map; |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 303 | struct tegra_private_map_data *priv; |
Gurchetan Singh | ef92053 | 2016-08-12 16:38:25 -0700 | [diff] [blame] | 304 | |
| 305 | memset(&gem_map, 0, sizeof(gem_map)); |
| 306 | gem_map.handle = bo->handles[0].u32; |
| 307 | |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 308 | ret = drmCommandWriteRead(bo->drv->fd, DRM_TEGRA_GEM_MMAP, &gem_map, sizeof(gem_map)); |
Gurchetan Singh | ef92053 | 2016-08-12 16:38:25 -0700 | [diff] [blame] | 309 | if (ret < 0) { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 310 | drv_log("DRM_TEGRA_GEM_MMAP failed\n"); |
Gurchetan Singh | ef92053 | 2016-08-12 16:38:25 -0700 | [diff] [blame] | 311 | return MAP_FAILED; |
| 312 | } |
| 313 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 314 | void *addr = mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd, |
Gurchetan Singh | cfb8876 | 2017-09-28 17:14:50 -0700 | [diff] [blame] | 315 | gem_map.offset); |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 316 | vma->length = bo->meta.total_size; |
| 317 | if ((bo->meta.tiling & 0xFF) == NV_MEM_KIND_C32_2CRA && addr != MAP_FAILED) { |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 318 | priv = calloc(1, sizeof(*priv)); |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 319 | priv->untiled = calloc(1, bo->meta.total_size); |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 320 | priv->tiled = addr; |
Gurchetan Singh | ee43c30 | 2017-11-14 18:20:27 -0800 | [diff] [blame] | 321 | vma->priv = priv; |
Gurchetan Singh | 1b1d56a | 2017-03-10 16:25:23 -0800 | [diff] [blame] | 322 | transfer_tiled_memory(bo, priv->tiled, priv->untiled, TEGRA_READ_TILED_BUFFER); |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 323 | addr = priv->untiled; |
| 324 | } |
| 325 | |
| 326 | return addr; |
| 327 | } |
| 328 | |
Gurchetan Singh | ee43c30 | 2017-11-14 18:20:27 -0800 | [diff] [blame] | 329 | static int tegra_bo_unmap(struct bo *bo, struct vma *vma) |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 330 | { |
Gurchetan Singh | ee43c30 | 2017-11-14 18:20:27 -0800 | [diff] [blame] | 331 | if (vma->priv) { |
| 332 | struct tegra_private_map_data *priv = vma->priv; |
| 333 | vma->addr = priv->tiled; |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 334 | free(priv->untiled); |
| 335 | free(priv); |
Gurchetan Singh | ee43c30 | 2017-11-14 18:20:27 -0800 | [diff] [blame] | 336 | vma->priv = NULL; |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 337 | } |
| 338 | |
Gurchetan Singh | ee43c30 | 2017-11-14 18:20:27 -0800 | [diff] [blame] | 339 | return munmap(vma->addr, vma->length); |
Gurchetan Singh | ef92053 | 2016-08-12 16:38:25 -0700 | [diff] [blame] | 340 | } |
| 341 | |
Gurchetan Singh | 47e629b | 2017-11-02 14:07:18 -0700 | [diff] [blame] | 342 | static int tegra_bo_flush(struct bo *bo, struct mapping *mapping) |
Gurchetan Singh | 8e02e05 | 2017-09-14 14:18:43 -0700 | [diff] [blame] | 343 | { |
Gurchetan Singh | 47e629b | 2017-11-02 14:07:18 -0700 | [diff] [blame] | 344 | struct tegra_private_map_data *priv = mapping->vma->priv; |
Gurchetan Singh | 8e02e05 | 2017-09-14 14:18:43 -0700 | [diff] [blame] | 345 | |
Gurchetan Singh | 47e629b | 2017-11-02 14:07:18 -0700 | [diff] [blame] | 346 | if (priv && (mapping->vma->map_flags & BO_MAP_WRITE)) |
Gurchetan Singh | 2426d03 | 2017-09-28 15:12:01 -0700 | [diff] [blame] | 347 | transfer_tiled_memory(bo, priv->tiled, priv->untiled, TEGRA_WRITE_TILED_BUFFER); |
Gurchetan Singh | 8e02e05 | 2017-09-14 14:18:43 -0700 | [diff] [blame] | 348 | |
| 349 | return 0; |
| 350 | } |
| 351 | |
Gurchetan Singh | 3e9d383 | 2017-10-31 10:36:25 -0700 | [diff] [blame] | 352 | const struct backend backend_tegra = { |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 353 | .name = "tegra", |
Gurchetan Singh | 179687e | 2016-10-28 10:07:35 -0700 | [diff] [blame] | 354 | .init = tegra_init, |
Gurchetan Singh | d7c84fd | 2016-08-16 18:18:24 -0700 | [diff] [blame] | 355 | .bo_create = tegra_bo_create, |
Gurchetan Singh | 46faf6b | 2016-08-05 14:40:07 -0700 | [diff] [blame] | 356 | .bo_destroy = drv_gem_bo_destroy, |
Joe Kniss | 5f61c79 | 2017-06-28 14:06:24 -0700 | [diff] [blame] | 357 | .bo_import = tegra_bo_import, |
Gurchetan Singh | d7c84fd | 2016-08-16 18:18:24 -0700 | [diff] [blame] | 358 | .bo_map = tegra_bo_map, |
Gurchetan Singh | 44d1fe4 | 2016-12-14 08:51:28 -0800 | [diff] [blame] | 359 | .bo_unmap = tegra_bo_unmap, |
Gurchetan Singh | 8e02e05 | 2017-09-14 14:18:43 -0700 | [diff] [blame] | 360 | .bo_flush = tegra_bo_flush, |
Stéphane Marchesin | 25a2606 | 2014-09-12 16:18:59 -0700 | [diff] [blame] | 361 | }; |
| 362 | |
| 363 | #endif |