blob: df97461c3796ee7beff8e578f71636f791b49004 [file] [log] [blame]
Stéphane Marchesin25a26062014-09-12 16:18:59 -07001/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2014 The Chromium OS Authors. All rights reserved.
Stéphane Marchesin25a26062014-09-12 16:18:59 -07003 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Gurchetan Singh46faf6b2016-08-05 14:40:07 -07007#ifdef DRV_TEGRA
Stéphane Marchesin25a26062014-09-12 16:18:59 -07008
Joe Kniss5f61c792017-06-28 14:06:24 -07009#include <assert.h>
Stéphane Marchesin6ac299f2019-03-21 12:23:29 -070010#include <errno.h>
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -070011#include <stdio.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070012#include <string.h>
Gurchetan Singhef920532016-08-12 16:38:25 -070013#include <sys/mman.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070014#include <tegra_drm.h>
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080015#include <xf86drm.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070016
Gurchetan Singh46faf6b2016-08-05 14:40:07 -070017#include "drv_priv.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070018#include "helpers.h"
Yuly Novikov96c7a3b2015-12-08 22:48:29 -050019#include "util.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070020
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080021/*
22 * GOB (Group Of Bytes) is the basic unit of the blocklinear layout.
23 * GOBs are arranged to blocks, where the height of the block (measured
24 * in GOBs) is configurable.
25 */
26#define NV_BLOCKLINEAR_GOB_HEIGHT 8
27#define NV_BLOCKLINEAR_GOB_WIDTH 64
28#define NV_DEFAULT_BLOCK_HEIGHT_LOG2 4
29#define NV_PREFERRED_PAGE_SIZE (128 * 1024)
30
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080031// clang-format off
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080032enum nv_mem_kind
Stéphane Marchesin25a26062014-09-12 16:18:59 -070033{
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080034 NV_MEM_KIND_PITCH = 0,
Vince Hsu0fd11422016-05-19 17:46:08 +080035 NV_MEM_KIND_C32_2CRA = 0xdb,
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080036 NV_MEM_KIND_GENERIC_16Bx2 = 0xfe,
37};
38
Gurchetan Singh44d1fe42016-12-14 08:51:28 -080039enum tegra_map_type {
40 TEGRA_READ_TILED_BUFFER = 0,
41 TEGRA_WRITE_TILED_BUFFER = 1,
42};
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080043// clang-format on
Gurchetan Singh44d1fe42016-12-14 08:51:28 -080044
45struct tegra_private_map_data {
46 void *tiled;
47 void *untiled;
48};
49
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070050static const uint32_t render_target_formats[] = { DRM_FORMAT_ARGB8888, DRM_FORMAT_XRGB8888 };
Gurchetan Singh179687e2016-10-28 10:07:35 -070051
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080052static int compute_block_height_log2(int height)
53{
54 int block_height_log2 = NV_DEFAULT_BLOCK_HEIGHT_LOG2;
55
56 if (block_height_log2 > 0) {
57 /* Shrink, if a smaller block height could cover the whole
58 * surface height. */
59 int proposed = NV_BLOCKLINEAR_GOB_HEIGHT << (block_height_log2 - 1);
60 while (proposed >= height) {
61 block_height_log2--;
62 if (block_height_log2 == 0)
63 break;
64 proposed /= 2;
65 }
66 }
67 return block_height_log2;
68}
69
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080070static void compute_layout_blocklinear(int width, int height, int format, enum nv_mem_kind *kind,
71 uint32_t *block_height_log2, uint32_t *stride,
72 uint32_t *size)
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080073{
Gurchetan Singh83dc4fb2016-07-19 15:52:33 -070074 int pitch = drv_stride_from_format(format, width, 0);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080075
76 /* Align to blocklinear blocks. */
Yuly Novikov96c7a3b2015-12-08 22:48:29 -050077 pitch = ALIGN(pitch, NV_BLOCKLINEAR_GOB_WIDTH);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080078
79 /* Compute padded height. */
80 *block_height_log2 = compute_block_height_log2(height);
81 int block_height = 1 << *block_height_log2;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080082 int padded_height = ALIGN(height, NV_BLOCKLINEAR_GOB_HEIGHT * block_height);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080083
84 int bytes = pitch * padded_height;
85
86 /* Pad the allocation to the preferred page size.
87 * This will reduce the required page table size (see discussion in NV
88 * bug 1321091), and also acts as a WAR for NV bug 1325421.
89 */
Yuly Novikov96c7a3b2015-12-08 22:48:29 -050090 bytes = ALIGN(bytes, NV_PREFERRED_PAGE_SIZE);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080091
Vince Hsu0fd11422016-05-19 17:46:08 +080092 *kind = NV_MEM_KIND_C32_2CRA;
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080093 *stride = pitch;
94 *size = bytes;
95}
96
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080097static void compute_layout_linear(int width, int height, int format, uint32_t *stride,
98 uint32_t *size)
Lauri Peltonen7842d8f2014-12-17 23:01:37 -080099{
Gurchetan Singh5972eec2016-12-16 15:51:46 -0800100 *stride = ALIGN(drv_stride_from_format(format, width, 0), 64);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800101 *size = *stride * height;
102}
103
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800104static void transfer_tile(struct bo *bo, uint8_t *tiled, uint8_t *untiled, enum tegra_map_type type,
105 uint32_t bytes_per_pixel, uint32_t gob_top, uint32_t gob_left,
Joe Kniss65705852017-06-29 15:02:46 -0700106 uint32_t gob_size_pixels, uint8_t *tiled_last)
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800107{
108 uint8_t *tmp;
109 uint32_t x, y, k;
110 for (k = 0; k < gob_size_pixels; k++) {
111 /*
112 * Given the kth pixel starting from the tile specified by
113 * gob_top and gob_left, unswizzle to get the standard (x, y)
114 * representation.
115 */
116 x = gob_left + (((k >> 3) & 8) | ((k >> 1) & 4) | (k & 3));
117 y = gob_top + ((k >> 7 << 3) | ((k >> 3) & 6) | ((k >> 2) & 1));
118
Joe Kniss65705852017-06-29 15:02:46 -0700119 if (tiled >= tiled_last)
120 return;
121
Gurchetan Singh298b7572019-09-19 09:55:18 -0700122 if (x >= bo->meta.width || y >= bo->meta.height) {
Joe Kniss65705852017-06-29 15:02:46 -0700123 tiled += bytes_per_pixel;
124 continue;
125 }
126
Gurchetan Singh298b7572019-09-19 09:55:18 -0700127 tmp = untiled + y * bo->meta.strides[0] + x * bytes_per_pixel;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800128
129 if (type == TEGRA_READ_TILED_BUFFER)
130 memcpy(tmp, tiled, bytes_per_pixel);
131 else if (type == TEGRA_WRITE_TILED_BUFFER)
132 memcpy(tiled, tmp, bytes_per_pixel);
133
134 /* Move on to next pixel. */
135 tiled += bytes_per_pixel;
136 }
137}
138
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800139static void transfer_tiled_memory(struct bo *bo, uint8_t *tiled, uint8_t *untiled,
140 enum tegra_map_type type)
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800141{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800142 uint32_t gob_width, gob_height, gob_size_bytes, gob_size_pixels, gob_count_x, gob_count_y,
143 gob_top, gob_left;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800144 uint32_t i, j, offset;
Joe Kniss65705852017-06-29 15:02:46 -0700145 uint8_t *tmp, *tiled_last;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700146 uint32_t bytes_per_pixel = drv_stride_from_format(bo->meta.format, 1, 0);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800147
148 /*
149 * The blocklinear format consists of 8*(2^n) x 64 byte sized tiles,
150 * where 0 <= n <= 4.
151 */
152 gob_width = DIV_ROUND_UP(NV_BLOCKLINEAR_GOB_WIDTH, bytes_per_pixel);
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800153 gob_height = NV_BLOCKLINEAR_GOB_HEIGHT * (1 << NV_DEFAULT_BLOCK_HEIGHT_LOG2);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800154 /* Calculate the height from maximum possible gob height */
Gurchetan Singh298b7572019-09-19 09:55:18 -0700155 while (gob_height > NV_BLOCKLINEAR_GOB_HEIGHT && gob_height >= 2 * bo->meta.height)
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800156 gob_height /= 2;
157
158 gob_size_bytes = gob_height * NV_BLOCKLINEAR_GOB_WIDTH;
159 gob_size_pixels = gob_height * gob_width;
160
Gurchetan Singh298b7572019-09-19 09:55:18 -0700161 gob_count_x = DIV_ROUND_UP(bo->meta.strides[0], NV_BLOCKLINEAR_GOB_WIDTH);
162 gob_count_y = DIV_ROUND_UP(bo->meta.height, gob_height);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800163
Gurchetan Singh298b7572019-09-19 09:55:18 -0700164 tiled_last = tiled + bo->meta.total_size;
Joe Kniss65705852017-06-29 15:02:46 -0700165
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800166 offset = 0;
167 for (j = 0; j < gob_count_y; j++) {
168 gob_top = j * gob_height;
169 for (i = 0; i < gob_count_x; i++) {
170 tmp = tiled + offset;
171 gob_left = i * gob_width;
172
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800173 transfer_tile(bo, tmp, untiled, type, bytes_per_pixel, gob_top, gob_left,
Joe Kniss65705852017-06-29 15:02:46 -0700174 gob_size_pixels, tiled_last);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800175
176 offset += gob_size_bytes;
177 }
178 }
179}
180
Gurchetan Singh179687e2016-10-28 10:07:35 -0700181static int tegra_init(struct driver *drv)
182{
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800183 struct format_metadata metadata;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700184 uint64_t use_flags = BO_USE_RENDER_MASK;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800185
186 metadata.tiling = NV_MEM_KIND_PITCH;
187 metadata.priority = 1;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700188 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800189
Gurchetan Singhd3001452017-11-03 17:18:36 -0700190 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
191 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800192
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800193 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
194 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800195
Gurchetan Singha1892b22017-09-28 16:40:52 -0700196 use_flags &= ~BO_USE_SW_WRITE_OFTEN;
197 use_flags &= ~BO_USE_SW_READ_OFTEN;
198 use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800199
200 metadata.tiling = NV_MEM_KIND_C32_2CRA;
201 metadata.priority = 2;
202
Gurchetan Singhd3001452017-11-03 17:18:36 -0700203 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
204 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800205
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800206 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_SCANOUT);
207 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800208 return 0;
Gurchetan Singh179687e2016-10-28 10:07:35 -0700209}
210
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800211static int tegra_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
Gurchetan Singha1892b22017-09-28 16:40:52 -0700212 uint64_t use_flags)
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800213{
214 uint32_t size, stride, block_height_log2 = 0;
215 enum nv_mem_kind kind = NV_MEM_KIND_PITCH;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700216 struct drm_tegra_gem_create gem_create;
217 int ret;
218
Gurchetan Singha1892b22017-09-28 16:40:52 -0700219 if (use_flags &
220 (BO_USE_CURSOR | BO_USE_LINEAR | BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN))
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800221 compute_layout_linear(width, height, format, &stride, &size);
222 else
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800223 compute_layout_blocklinear(width, height, format, &kind, &block_height_log2,
224 &stride, &size);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800225
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700226 memset(&gem_create, 0, sizeof(gem_create));
227 gem_create.size = size;
228 gem_create.flags = 0;
229
Gurchetan Singh46faf6b2016-08-05 14:40:07 -0700230 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_TEGRA_GEM_CREATE, &gem_create);
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700231 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700232 drv_log("DRM_IOCTL_TEGRA_GEM_CREATE failed (size=%zu)\n", size);
Stéphane Marchesin6ac299f2019-03-21 12:23:29 -0700233 return -errno;
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700234 }
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700235
Yuly Novikov96c7a3b2015-12-08 22:48:29 -0500236 bo->handles[0].u32 = gem_create.handle;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700237 bo->meta.offsets[0] = 0;
238 bo->meta.total_size = bo->meta.sizes[0] = size;
239 bo->meta.strides[0] = stride;
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800240
241 if (kind != NV_MEM_KIND_PITCH) {
242 struct drm_tegra_gem_set_tiling gem_tile;
243
244 memset(&gem_tile, 0, sizeof(gem_tile));
Yuly Novikov96c7a3b2015-12-08 22:48:29 -0500245 gem_tile.handle = bo->handles[0].u32;
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800246 gem_tile.mode = DRM_TEGRA_GEM_TILING_MODE_BLOCK;
247 gem_tile.value = block_height_log2;
248
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800249 ret = drmCommandWriteRead(bo->drv->fd, DRM_TEGRA_GEM_SET_TILING, &gem_tile,
250 sizeof(gem_tile));
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800251 if (ret < 0) {
Gurchetan Singh46faf6b2016-08-05 14:40:07 -0700252 drv_gem_bo_destroy(bo);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800253 return ret;
254 }
255
256 /* Encode blocklinear parameters for EGLImage creation. */
Gurchetan Singh298b7572019-09-19 09:55:18 -0700257 bo->meta.tiling = (kind & 0xff) | ((block_height_log2 & 0xf) << 8);
258 bo->meta.format_modifiers[0] = fourcc_mod_code(NV, bo->meta.tiling);
Lauri Peltonen7842d8f2014-12-17 23:01:37 -0800259 }
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700260
261 return 0;
262}
263
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700264static int tegra_bo_import(struct bo *bo, struct drv_import_fd_data *data)
265{
266 int ret;
267 struct drm_tegra_gem_get_tiling gem_get_tiling;
268
269 ret = drv_prime_bo_import(bo, data);
270 if (ret)
271 return ret;
272
273 /* TODO(gsingh): export modifiers and get rid of backdoor tiling. */
274 memset(&gem_get_tiling, 0, sizeof(gem_get_tiling));
275 gem_get_tiling.handle = bo->handles[0].u32;
276
277 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_TEGRA_GEM_GET_TILING, &gem_get_tiling);
278 if (ret) {
279 drv_gem_bo_destroy(bo);
Stéphane Marchesin6ac299f2019-03-21 12:23:29 -0700280 return -errno;
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700281 }
282
283 /* NOTE(djmk): we only know about one tiled format, so if our drmIoctl call tells us we are
284 tiled, assume it is this format (NV_MEM_KIND_C32_2CRA) otherwise linear (KIND_PITCH). */
285 if (gem_get_tiling.mode == DRM_TEGRA_GEM_TILING_MODE_PITCH) {
Gurchetan Singh298b7572019-09-19 09:55:18 -0700286 bo->meta.tiling = NV_MEM_KIND_PITCH;
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700287 } else if (gem_get_tiling.mode == DRM_TEGRA_GEM_TILING_MODE_BLOCK) {
Gurchetan Singh298b7572019-09-19 09:55:18 -0700288 bo->meta.tiling = NV_MEM_KIND_C32_2CRA;
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700289 } else {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700290 drv_log("%s: unknown tile format %d\n", __func__, gem_get_tiling.mode);
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700291 drv_gem_bo_destroy(bo);
292 assert(0);
293 }
294
Gurchetan Singh298b7572019-09-19 09:55:18 -0700295 bo->meta.format_modifiers[0] = fourcc_mod_code(NV, bo->meta.tiling);
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700296 return 0;
297}
298
Gurchetan Singhee43c302017-11-14 18:20:27 -0800299static void *tegra_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Gurchetan Singhef920532016-08-12 16:38:25 -0700300{
301 int ret;
302 struct drm_tegra_gem_mmap gem_map;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800303 struct tegra_private_map_data *priv;
Gurchetan Singhef920532016-08-12 16:38:25 -0700304
305 memset(&gem_map, 0, sizeof(gem_map));
306 gem_map.handle = bo->handles[0].u32;
307
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800308 ret = drmCommandWriteRead(bo->drv->fd, DRM_TEGRA_GEM_MMAP, &gem_map, sizeof(gem_map));
Gurchetan Singhef920532016-08-12 16:38:25 -0700309 if (ret < 0) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700310 drv_log("DRM_TEGRA_GEM_MMAP failed\n");
Gurchetan Singhef920532016-08-12 16:38:25 -0700311 return MAP_FAILED;
312 }
313
Gurchetan Singh298b7572019-09-19 09:55:18 -0700314 void *addr = mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
Gurchetan Singhcfb88762017-09-28 17:14:50 -0700315 gem_map.offset);
Gurchetan Singh298b7572019-09-19 09:55:18 -0700316 vma->length = bo->meta.total_size;
317 if ((bo->meta.tiling & 0xFF) == NV_MEM_KIND_C32_2CRA && addr != MAP_FAILED) {
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800318 priv = calloc(1, sizeof(*priv));
Gurchetan Singh298b7572019-09-19 09:55:18 -0700319 priv->untiled = calloc(1, bo->meta.total_size);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800320 priv->tiled = addr;
Gurchetan Singhee43c302017-11-14 18:20:27 -0800321 vma->priv = priv;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800322 transfer_tiled_memory(bo, priv->tiled, priv->untiled, TEGRA_READ_TILED_BUFFER);
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800323 addr = priv->untiled;
324 }
325
326 return addr;
327}
328
Gurchetan Singhee43c302017-11-14 18:20:27 -0800329static int tegra_bo_unmap(struct bo *bo, struct vma *vma)
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800330{
Gurchetan Singhee43c302017-11-14 18:20:27 -0800331 if (vma->priv) {
332 struct tegra_private_map_data *priv = vma->priv;
333 vma->addr = priv->tiled;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800334 free(priv->untiled);
335 free(priv);
Gurchetan Singhee43c302017-11-14 18:20:27 -0800336 vma->priv = NULL;
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800337 }
338
Gurchetan Singhee43c302017-11-14 18:20:27 -0800339 return munmap(vma->addr, vma->length);
Gurchetan Singhef920532016-08-12 16:38:25 -0700340}
341
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700342static int tegra_bo_flush(struct bo *bo, struct mapping *mapping)
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700343{
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700344 struct tegra_private_map_data *priv = mapping->vma->priv;
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700345
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700346 if (priv && (mapping->vma->map_flags & BO_MAP_WRITE))
Gurchetan Singh2426d032017-09-28 15:12:01 -0700347 transfer_tiled_memory(bo, priv->tiled, priv->untiled, TEGRA_WRITE_TILED_BUFFER);
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700348
349 return 0;
350}
351
Gurchetan Singh3e9d3832017-10-31 10:36:25 -0700352const struct backend backend_tegra = {
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700353 .name = "tegra",
Gurchetan Singh179687e2016-10-28 10:07:35 -0700354 .init = tegra_init,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700355 .bo_create = tegra_bo_create,
Gurchetan Singh46faf6b2016-08-05 14:40:07 -0700356 .bo_destroy = drv_gem_bo_destroy,
Joe Kniss5f61c792017-06-28 14:06:24 -0700357 .bo_import = tegra_bo_import,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700358 .bo_map = tegra_bo_map,
Gurchetan Singh44d1fe42016-12-14 08:51:28 -0800359 .bo_unmap = tegra_bo_unmap,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700360 .bo_flush = tegra_bo_flush,
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700361};
362
363#endif