blob: 2fac455b8d9edf4170772727a4fa41543820d5f2 [file] [log] [blame]
Stéphane Marchesin25a26062014-09-12 16:18:59 -07001/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2014 The Chromium OS Authors. All rights reserved.
Stéphane Marchesin25a26062014-09-12 16:18:59 -07003 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Gurchetan Singh46faf6b2016-08-05 14:40:07 -07007#ifdef DRV_TEGRA
Stéphane Marchesin25a26062014-09-12 16:18:59 -07008
Joe Kniss5f61c792017-06-28 14:06:24 -07009#include <assert.h>
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -070010#include <stdio.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070011#include <string.h>
Gurchetan Singhef920532016-08-12 16:38:25 -070012#include <sys/mman.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070013#include <tegra_drm.h>
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080014#include <xf86drm.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070015
Gurchetan Singh46faf6b2016-08-05 14:40:07 -070016#include "drv_priv.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070017#include "helpers.h"
Yuly Novikov96c7a3b2015-12-08 22:48:29 -050018#include "util.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070019
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080020/*
21 * GOB (Group Of Bytes) is the basic unit of the blocklinear layout.
22 * GOBs are arranged to blocks, where the height of the block (measured
23 * in GOBs) is configurable.
24 */
25#define NV_BLOCKLINEAR_GOB_HEIGHT 8
26#define NV_BLOCKLINEAR_GOB_WIDTH 64
27#define NV_DEFAULT_BLOCK_HEIGHT_LOG2 4
28#define NV_PREFERRED_PAGE_SIZE (128 * 1024)
29
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080030// clang-format off
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080031enum nv_mem_kind
Stéphane Marchesin25a26062014-09-12 16:18:59 -070032{
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080033 NV_MEM_KIND_PITCH = 0,
Vince Hsu0fd11422016-05-19 17:46:08 +080034 NV_MEM_KIND_C32_2CRA = 0xdb,
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080035 NV_MEM_KIND_GENERIC_16Bx2 = 0xfe,
36};
37
Gurchetan Singh44d1fe42016-12-14 08:51:28 -080038enum tegra_map_type {
39 TEGRA_READ_TILED_BUFFER = 0,
40 TEGRA_WRITE_TILED_BUFFER = 1,
41};
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080042// clang-format on
Gurchetan Singh44d1fe42016-12-14 08:51:28 -080043
44struct tegra_private_map_data {
45 void *tiled;
46 void *untiled;
Joe Kniss65705852017-06-29 15:02:46 -070047 int prot;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -080048};
49
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070050static const uint32_t render_target_formats[] = { DRM_FORMAT_ARGB8888, DRM_FORMAT_XRGB8888 };
Gurchetan Singh179687e2016-10-28 10:07:35 -070051
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080052static int compute_block_height_log2(int height)
53{
54 int block_height_log2 = NV_DEFAULT_BLOCK_HEIGHT_LOG2;
55
56 if (block_height_log2 > 0) {
57 /* Shrink, if a smaller block height could cover the whole
58 * surface height. */
59 int proposed = NV_BLOCKLINEAR_GOB_HEIGHT << (block_height_log2 - 1);
60 while (proposed >= height) {
61 block_height_log2--;
62 if (block_height_log2 == 0)
63 break;
64 proposed /= 2;
65 }
66 }
67 return block_height_log2;
68}
69
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080070static void compute_layout_blocklinear(int width, int height, int format, enum nv_mem_kind *kind,
71 uint32_t *block_height_log2, uint32_t *stride,
72 uint32_t *size)
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080073{
Gurchetan Singh83dc4fb2016-07-19 15:52:33 -070074 int pitch = drv_stride_from_format(format, width, 0);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080075
76 /* Align to blocklinear blocks. */
Yuly Novikov96c7a3b2015-12-08 22:48:29 -050077 pitch = ALIGN(pitch, NV_BLOCKLINEAR_GOB_WIDTH);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080078
79 /* Compute padded height. */
80 *block_height_log2 = compute_block_height_log2(height);
81 int block_height = 1 << *block_height_log2;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080082 int padded_height = ALIGN(height, NV_BLOCKLINEAR_GOB_HEIGHT * block_height);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080083
84 int bytes = pitch * padded_height;
85
86 /* Pad the allocation to the preferred page size.
87 * This will reduce the required page table size (see discussion in NV
88 * bug 1321091), and also acts as a WAR for NV bug 1325421.
89 */
Yuly Novikov96c7a3b2015-12-08 22:48:29 -050090 bytes = ALIGN(bytes, NV_PREFERRED_PAGE_SIZE);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080091
Vince Hsu0fd11422016-05-19 17:46:08 +080092 *kind = NV_MEM_KIND_C32_2CRA;
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080093 *stride = pitch;
94 *size = bytes;
95}
96
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080097static void compute_layout_linear(int width, int height, int format, uint32_t *stride,
98 uint32_t *size)
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080099{
Gurchetan Singh5972eec2016-12-16 15:51:46 -0800100 *stride = ALIGN(drv_stride_from_format(format, width, 0), 64);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800101 *size = *stride * height;
102}
103
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800104static void transfer_tile(struct bo *bo, uint8_t *tiled, uint8_t *untiled, enum tegra_map_type type,
105 uint32_t bytes_per_pixel, uint32_t gob_top, uint32_t gob_left,
Joe Kniss65705852017-06-29 15:02:46 -0700106 uint32_t gob_size_pixels, uint8_t *tiled_last)
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800107{
108 uint8_t *tmp;
109 uint32_t x, y, k;
110 for (k = 0; k < gob_size_pixels; k++) {
111 /*
112 * Given the kth pixel starting from the tile specified by
113 * gob_top and gob_left, unswizzle to get the standard (x, y)
114 * representation.
115 */
116 x = gob_left + (((k >> 3) & 8) | ((k >> 1) & 4) | (k & 3));
117 y = gob_top + ((k >> 7 << 3) | ((k >> 3) & 6) | ((k >> 2) & 1));
118
Joe Kniss65705852017-06-29 15:02:46 -0700119 if (tiled >= tiled_last)
120 return;
121
122 if (x >= bo->width || y >= bo->height) {
123 tiled += bytes_per_pixel;
124 continue;
125 }
126
127 tmp = untiled + y * bo->strides[0] + x * bytes_per_pixel;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800128
129 if (type == TEGRA_READ_TILED_BUFFER)
130 memcpy(tmp, tiled, bytes_per_pixel);
131 else if (type == TEGRA_WRITE_TILED_BUFFER)
132 memcpy(tiled, tmp, bytes_per_pixel);
133
134 /* Move on to next pixel. */
135 tiled += bytes_per_pixel;
136 }
137}
138
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800139static void transfer_tiled_memory(struct bo *bo, uint8_t *tiled, uint8_t *untiled,
140 enum tegra_map_type type)
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800141{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800142 uint32_t gob_width, gob_height, gob_size_bytes, gob_size_pixels, gob_count_x, gob_count_y,
143 gob_top, gob_left;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800144 uint32_t i, j, offset;
Joe Kniss65705852017-06-29 15:02:46 -0700145 uint8_t *tmp, *tiled_last;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800146 uint32_t bytes_per_pixel = drv_stride_from_format(bo->format, 1, 0);
147
148 /*
149 * The blocklinear format consists of 8*(2^n) x 64 byte sized tiles,
150 * where 0 <= n <= 4.
151 */
152 gob_width = DIV_ROUND_UP(NV_BLOCKLINEAR_GOB_WIDTH, bytes_per_pixel);
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800153 gob_height = NV_BLOCKLINEAR_GOB_HEIGHT * (1 << NV_DEFAULT_BLOCK_HEIGHT_LOG2);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800154 /* Calculate the height from maximum possible gob height */
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800155 while (gob_height > NV_BLOCKLINEAR_GOB_HEIGHT && gob_height >= 2 * bo->height)
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800156 gob_height /= 2;
157
158 gob_size_bytes = gob_height * NV_BLOCKLINEAR_GOB_WIDTH;
159 gob_size_pixels = gob_height * gob_width;
160
161 gob_count_x = DIV_ROUND_UP(bo->strides[0], NV_BLOCKLINEAR_GOB_WIDTH);
162 gob_count_y = DIV_ROUND_UP(bo->height, gob_height);
163
Joe Kniss65705852017-06-29 15:02:46 -0700164 tiled_last = tiled + bo->total_size;
165
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800166 offset = 0;
167 for (j = 0; j < gob_count_y; j++) {
168 gob_top = j * gob_height;
169 for (i = 0; i < gob_count_x; i++) {
170 tmp = tiled + offset;
171 gob_left = i * gob_width;
172
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800173 transfer_tile(bo, tmp, untiled, type, bytes_per_pixel, gob_top, gob_left,
Joe Kniss65705852017-06-29 15:02:46 -0700174 gob_size_pixels, tiled_last);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800175
176 offset += gob_size_bytes;
177 }
178 }
179}
180
Gurchetan Singh179687e2016-10-28 10:07:35 -0700181static int tegra_init(struct driver *drv)
182{
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800183 int ret;
184 struct format_metadata metadata;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700185 uint64_t flags = BO_USE_RENDER_MASK;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800186
187 metadata.tiling = NV_MEM_KIND_PITCH;
188 metadata.priority = 1;
189 metadata.modifier = DRM_FORMAT_MOD_NONE;
190
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700191 ret = drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
192 &metadata, flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800193 if (ret)
194 return ret;
195
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800196 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
197 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800198
199 flags &= ~BO_USE_SW_WRITE_OFTEN;
200 flags &= ~BO_USE_SW_READ_OFTEN;
201 flags &= ~BO_USE_LINEAR;
202
203 metadata.tiling = NV_MEM_KIND_C32_2CRA;
204 metadata.priority = 2;
205
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700206 ret = drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
207 &metadata, flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800208 if (ret)
209 return ret;
210
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800211 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_SCANOUT);
212 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800213 return 0;
Gurchetan Singh179687e2016-10-28 10:07:35 -0700214}
215
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800216static int tegra_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
217 uint32_t flags)
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800218{
219 uint32_t size, stride, block_height_log2 = 0;
220 enum nv_mem_kind kind = NV_MEM_KIND_PITCH;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700221 struct drm_tegra_gem_create gem_create;
222 int ret;
223
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800224 if (flags & (BO_USE_CURSOR | BO_USE_LINEAR | BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN))
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800225 compute_layout_linear(width, height, format, &stride, &size);
226 else
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800227 compute_layout_blocklinear(width, height, format, &kind, &block_height_log2,
228 &stride, &size);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800229
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700230 memset(&gem_create, 0, sizeof(gem_create));
231 gem_create.size = size;
232 gem_create.flags = 0;
233
Gurchetan Singh46faf6b2016-08-05 14:40:07 -0700234 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_TEGRA_GEM_CREATE, &gem_create);
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700235 if (ret) {
Gurchetan Singhcb1471b2017-05-15 14:33:16 -0700236 fprintf(stderr, "drv: DRM_IOCTL_TEGRA_GEM_CREATE failed (size=%zu)\n", size);
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700237 return ret;
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700238 }
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700239
Yuly Novikov96c7a3b2015-12-08 22:48:29 -0500240 bo->handles[0].u32 = gem_create.handle;
241 bo->offsets[0] = 0;
Gurchetan Singha40ca9e2016-08-29 19:51:45 -0700242 bo->total_size = bo->sizes[0] = size;
Yuly Novikov96c7a3b2015-12-08 22:48:29 -0500243 bo->strides[0] = stride;
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800244
245 if (kind != NV_MEM_KIND_PITCH) {
246 struct drm_tegra_gem_set_tiling gem_tile;
247
248 memset(&gem_tile, 0, sizeof(gem_tile));
Yuly Novikov96c7a3b2015-12-08 22:48:29 -0500249 gem_tile.handle = bo->handles[0].u32;
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800250 gem_tile.mode = DRM_TEGRA_GEM_TILING_MODE_BLOCK;
251 gem_tile.value = block_height_log2;
252
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800253 ret = drmCommandWriteRead(bo->drv->fd, DRM_TEGRA_GEM_SET_TILING, &gem_tile,
254 sizeof(gem_tile));
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800255 if (ret < 0) {
Gurchetan Singh46faf6b2016-08-05 14:40:07 -0700256 drv_gem_bo_destroy(bo);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800257 return ret;
258 }
259
260 /* Encode blocklinear parameters for EGLImage creation. */
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800261 bo->tiling = (kind & 0xff) | ((block_height_log2 & 0xf) << 8);
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800262 bo->format_modifiers[0] = fourcc_mod_code(NV, bo->tiling);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800263 }
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700264
265 return 0;
266}
267
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700268static int tegra_bo_import(struct bo *bo, struct drv_import_fd_data *data)
269{
270 int ret;
271 struct drm_tegra_gem_get_tiling gem_get_tiling;
272
273 ret = drv_prime_bo_import(bo, data);
274 if (ret)
275 return ret;
276
277 /* TODO(gsingh): export modifiers and get rid of backdoor tiling. */
278 memset(&gem_get_tiling, 0, sizeof(gem_get_tiling));
279 gem_get_tiling.handle = bo->handles[0].u32;
280
281 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_TEGRA_GEM_GET_TILING, &gem_get_tiling);
282 if (ret) {
283 drv_gem_bo_destroy(bo);
284 return ret;
285 }
286
287 /* NOTE(djmk): we only know about one tiled format, so if our drmIoctl call tells us we are
288 tiled, assume it is this format (NV_MEM_KIND_C32_2CRA) otherwise linear (KIND_PITCH). */
289 if (gem_get_tiling.mode == DRM_TEGRA_GEM_TILING_MODE_PITCH) {
290 bo->tiling = NV_MEM_KIND_PITCH;
291 } else if (gem_get_tiling.mode == DRM_TEGRA_GEM_TILING_MODE_BLOCK) {
292 bo->tiling = NV_MEM_KIND_C32_2CRA;
293 } else {
294 fprintf(stderr, "tegra_bo_import: unknown tile format %d", gem_get_tiling.mode);
295 drv_gem_bo_destroy(bo);
296 assert(0);
297 }
298
299 bo->format_modifiers[0] = fourcc_mod_code(NV, bo->tiling);
300 return 0;
301}
302
Joe Kniss65705852017-06-29 15:02:46 -0700303static void *tegra_bo_map(struct bo *bo, struct map_info *data, size_t plane, int prot)
Gurchetan Singhef920532016-08-12 16:38:25 -0700304{
305 int ret;
306 struct drm_tegra_gem_mmap gem_map;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800307 struct tegra_private_map_data *priv;
Gurchetan Singhef920532016-08-12 16:38:25 -0700308
309 memset(&gem_map, 0, sizeof(gem_map));
310 gem_map.handle = bo->handles[0].u32;
311
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800312 ret = drmCommandWriteRead(bo->drv->fd, DRM_TEGRA_GEM_MMAP, &gem_map, sizeof(gem_map));
Gurchetan Singhef920532016-08-12 16:38:25 -0700313 if (ret < 0) {
314 fprintf(stderr, "drv: DRM_TEGRA_GEM_MMAP failed\n");
315 return MAP_FAILED;
316 }
317
Joe Kniss65705852017-06-29 15:02:46 -0700318 void *addr = mmap(0, bo->total_size, prot, MAP_SHARED, bo->drv->fd, gem_map.offset);
Gurchetan Singh1a31e602016-10-06 10:58:00 -0700319 data->length = bo->total_size;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800320 if ((bo->tiling & 0xFF) == NV_MEM_KIND_C32_2CRA && addr != MAP_FAILED) {
321 priv = calloc(1, sizeof(*priv));
322 priv->untiled = calloc(1, bo->total_size);
323 priv->tiled = addr;
Joe Kniss65705852017-06-29 15:02:46 -0700324 priv->prot = prot;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800325 data->priv = priv;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800326 transfer_tiled_memory(bo, priv->tiled, priv->untiled, TEGRA_READ_TILED_BUFFER);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800327 addr = priv->untiled;
328 }
329
330 return addr;
331}
332
333static int tegra_bo_unmap(struct bo *bo, struct map_info *data)
334{
335 if (data->priv) {
336 struct tegra_private_map_data *priv = data->priv;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800337 data->addr = priv->tiled;
338 free(priv->untiled);
339 free(priv);
340 data->priv = NULL;
341 }
342
343 return munmap(data->addr, data->length);
Gurchetan Singhef920532016-08-12 16:38:25 -0700344}
345
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700346static int tegra_bo_flush(struct bo *bo, struct map_info *data)
347{
348 struct tegra_private_map_data *priv = data->priv;
349
350 if (priv && priv->prot & PROT_WRITE)
351 transfer_tiled_memory(bo, priv->tiled, priv->untiled,
352 TEGRA_WRITE_TILED_BUFFER);
353
354 return 0;
355}
356
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800357struct backend backend_tegra = {
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700358 .name = "tegra",
Gurchetan Singh179687e2016-10-28 10:07:35 -0700359 .init = tegra_init,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700360 .bo_create = tegra_bo_create,
Gurchetan Singh46faf6b2016-08-05 14:40:07 -0700361 .bo_destroy = drv_gem_bo_destroy,
Joe Kniss5f61c792017-06-28 14:06:24 -0700362 .bo_import = tegra_bo_import,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700363 .bo_map = tegra_bo_map,
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800364 .bo_unmap = tegra_bo_unmap,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700365 .bo_flush = tegra_bo_flush,
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700366};
367
368#endif