blob: d0c9db17dde6da7cb615b0a673b2c4dcfaf5cc22 [file] [log] [blame]
Stéphane Marchesin25a26062014-09-12 16:18:59 -07001/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2014 The Chromium OS Authors. All rights reserved.
Stéphane Marchesin25a26062014-09-12 16:18:59 -07003 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Gurchetan Singh46faf6b2016-08-05 14:40:07 -07007#ifdef DRV_I915
Stéphane Marchesin25a26062014-09-12 16:18:59 -07008
Kristian H. Kristensene8778f02018-04-04 14:21:41 -07009#include <assert.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070010#include <errno.h>
Gurchetan Singh82a8eed2017-01-03 13:01:37 -080011#include <i915_drm.h>
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070012#include <stdbool.h>
Gurchetan Singhcc015e82017-01-17 16:15:25 -080013#include <stdio.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070014#include <string.h>
Gurchetan Singhef920532016-08-12 16:38:25 -070015#include <sys/mman.h>
Gurchetan Singhcc35e692019-02-28 15:44:54 -080016#include <unistd.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070017#include <xf86drm.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070018
Gurchetan Singh46faf6b2016-08-05 14:40:07 -070019#include "drv_priv.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070020#include "helpers.h"
21#include "util.h"
22
Gurchetan Singh68af9c22017-01-18 13:48:11 -080023#define I915_CACHELINE_SIZE 64
24#define I915_CACHELINE_MASK (I915_CACHELINE_SIZE - 1)
25
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000026static const uint32_t scanout_render_formats[] = { DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR8888,
27 DRM_FORMAT_ARGB2101010, DRM_FORMAT_ARGB8888,
28 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR2101010,
29 DRM_FORMAT_XBGR8888, DRM_FORMAT_XRGB2101010,
30 DRM_FORMAT_XRGB8888 };
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080031
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000032static const uint32_t render_formats[] = { DRM_FORMAT_ABGR16161616F };
33
34static const uint32_t texture_only_formats[] = { DRM_FORMAT_R8, DRM_FORMAT_NV12, DRM_FORMAT_P010,
35 DRM_FORMAT_YVU420, DRM_FORMAT_YVU420_ANDROID };
Gurchetan Singh179687e2016-10-28 10:07:35 -070036
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080037struct i915_device {
Gurchetan Singh68af9c22017-01-18 13:48:11 -080038 uint32_t gen;
39 int32_t has_llc;
Stéphane Marchesin25a26062014-09-12 16:18:59 -070040};
41
Gurchetan Singh68af9c22017-01-18 13:48:11 -080042static uint32_t i915_get_gen(int device_id)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070043{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080044 const uint16_t gen3_ids[] = { 0x2582, 0x2592, 0x2772, 0x27A2, 0x27AE,
45 0x29C2, 0x29B2, 0x29D2, 0xA001, 0xA011 };
Stéphane Marchesina39dfde2014-09-15 15:38:25 -070046 unsigned i;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080047 for (i = 0; i < ARRAY_SIZE(gen3_ids); i++)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070048 if (gen3_ids[i] == device_id)
49 return 3;
50
51 return 4;
52}
53
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000054static uint64_t unset_flags(uint64_t current_flags, uint64_t mask)
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070055{
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000056 uint64_t value = current_flags & ~mask;
57 return value;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080058}
59
60static int i915_add_combinations(struct driver *drv)
61{
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080062 struct format_metadata metadata;
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000063 uint64_t render, scanout_and_render, texture_only;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070064
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000065 scanout_and_render = BO_USE_RENDER_MASK | BO_USE_SCANOUT;
66 render = BO_USE_RENDER_MASK;
67 texture_only = BO_USE_TEXTURE_MASK;
68 uint64_t linear_mask = BO_USE_RENDERSCRIPT | BO_USE_LINEAR | BO_USE_PROTECTED |
69 BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080070
71 metadata.tiling = I915_TILING_NONE;
72 metadata.priority = 1;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -070073 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080074
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000075 drv_add_combinations(drv, scanout_render_formats, ARRAY_SIZE(scanout_render_formats),
76 &metadata, scanout_and_render);
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080077
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000078 drv_add_combinations(drv, render_formats, ARRAY_SIZE(render_formats), &metadata, render);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070079
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000080 drv_add_combinations(drv, texture_only_formats, ARRAY_SIZE(texture_only_formats), &metadata,
81 texture_only);
82
83 drv_modify_linear_combinations(drv);
Hirokazu Honda3b8d4d02019-07-31 16:35:52 +090084 /*
85 * Chrome uses DMA-buf mmap to write to YV12 buffers, which are then accessed by the
86 * Video Encoder Accelerator (VEA). It could also support NV12 potentially in the future.
87 */
88 drv_modify_combination(drv, DRM_FORMAT_YVU420, &metadata, BO_USE_HW_VIDEO_ENCODER);
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000089 /* IPU3 camera ISP supports only NV12 output. */
David Stevens6116b312019-09-03 10:49:50 +090090 drv_modify_combination(drv, DRM_FORMAT_NV12, &metadata,
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +000091 BO_USE_HW_VIDEO_ENCODER | BO_USE_HW_VIDEO_DECODER |
92 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | BO_USE_SCANOUT);
Hirokazu Honda3b8d4d02019-07-31 16:35:52 +090093
Gurchetan Singh71bc6652018-09-17 17:42:05 -070094 /* Android CTS tests require this. */
95 drv_add_combination(drv, DRM_FORMAT_BGR888, &metadata, BO_USE_SW_MASK);
96
Tomasz Figad30c0a52017-07-05 17:50:18 +090097 /*
98 * R8 format is used for Android's HAL_PIXEL_FORMAT_BLOB and is used for JPEG snapshots
99 * from camera.
100 */
101 drv_modify_combination(drv, DRM_FORMAT_R8, &metadata,
Tomasz Figafd0b0162017-07-11 18:28:02 +0900102 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Tomasz Figad30c0a52017-07-05 17:50:18 +0900103
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +0000104 render = unset_flags(render, linear_mask);
105 scanout_and_render = unset_flags(scanout_and_render, linear_mask);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800106
107 metadata.tiling = I915_TILING_X;
108 metadata.priority = 2;
Tomasz Figae821cc22017-07-08 15:53:11 +0900109 metadata.modifier = I915_FORMAT_MOD_X_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800110
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +0000111 drv_add_combinations(drv, render_formats, ARRAY_SIZE(render_formats), &metadata, render);
112 drv_add_combinations(drv, scanout_render_formats, ARRAY_SIZE(scanout_render_formats),
113 &metadata, scanout_and_render);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700114
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800115 metadata.tiling = I915_TILING_Y;
116 metadata.priority = 3;
Tomasz Figae821cc22017-07-08 15:53:11 +0900117 metadata.modifier = I915_FORMAT_MOD_Y_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800118
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +0000119 scanout_and_render = unset_flags(scanout_and_render, BO_USE_SW_READ_RARELY | BO_USE_SW_WRITE_RARELY);
120/* Support y-tiled NV12 and P010 for libva */
121#ifdef I915_SCANOUT_Y_TILED
122 drv_add_combination(drv, DRM_FORMAT_NV12, &metadata,
123 BO_USE_TEXTURE | BO_USE_HW_VIDEO_DECODER | BO_USE_SCANOUT);
124#else
Gurchetan Singh86ddfdc2018-09-17 17:13:45 -0700125 drv_add_combination(drv, DRM_FORMAT_NV12, &metadata,
126 BO_USE_TEXTURE | BO_USE_HW_VIDEO_DECODER);
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +0000127#endif
128 scanout_and_render = unset_flags(scanout_and_render, BO_USE_SCANOUT);
Miguel Casascdb25542019-07-18 13:07:30 -0400129 drv_add_combination(drv, DRM_FORMAT_P010, &metadata,
130 BO_USE_TEXTURE | BO_USE_HW_VIDEO_DECODER);
Kristian H. Kristensen3cb5bba2018-04-04 16:10:42 -0700131
Ilja H. Friedelf39dcbc2020-02-26 02:50:51 +0000132 drv_add_combinations(drv, render_formats, ARRAY_SIZE(render_formats), &metadata, render);
133 drv_add_combinations(drv, scanout_render_formats, ARRAY_SIZE(scanout_render_formats),
134 &metadata, scanout_and_render);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800135 return 0;
136}
137
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800138static int i915_align_dimensions(struct bo *bo, uint32_t tiling, uint32_t *stride,
139 uint32_t *aligned_height)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700140{
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700141 struct i915_device *i915 = bo->drv->priv;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700142 uint32_t horizontal_alignment;
143 uint32_t vertical_alignment;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700144
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700145 switch (tiling) {
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700146 default:
147 case I915_TILING_NONE:
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700148 /*
149 * The Intel GPU doesn't need any alignment in linear mode,
150 * but libva requires the allocation stride to be aligned to
151 * 16 bytes and height to 4 rows. Further, we round up the
152 * horizontal alignment so that row start on a cache line (64
153 * bytes).
154 */
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700155 horizontal_alignment = 64;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700156 vertical_alignment = 4;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700157 break;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800158
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700159 case I915_TILING_X:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700160 horizontal_alignment = 512;
161 vertical_alignment = 8;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700162 break;
163
164 case I915_TILING_Y:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700165 if (i915->gen == 3) {
166 horizontal_alignment = 512;
167 vertical_alignment = 8;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800168 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700169 horizontal_alignment = 128;
170 vertical_alignment = 32;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700171 }
172 break;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700173 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800174
David Stevens793675a2019-09-25 11:17:48 +0900175 *aligned_height = ALIGN(*aligned_height, vertical_alignment);
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700176 if (i915->gen > 3) {
177 *stride = ALIGN(*stride, horizontal_alignment);
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800178 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700179 while (*stride > horizontal_alignment)
180 horizontal_alignment <<= 1;
181
182 *stride = horizontal_alignment;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800183 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800184
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700185 if (i915->gen <= 3 && *stride > 8192)
186 return -EINVAL;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800187
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700188 return 0;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700189}
190
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800191static void i915_clflush(void *start, size_t size)
192{
193 void *p = (void *)(((uintptr_t)start) & ~I915_CACHELINE_MASK);
194 void *end = (void *)((uintptr_t)start + size);
195
196 __builtin_ia32_mfence();
197 while (p < end) {
198 __builtin_ia32_clflush(p);
199 p = (void *)((uintptr_t)p + I915_CACHELINE_SIZE);
200 }
201}
202
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800203static int i915_init(struct driver *drv)
204{
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800205 int ret;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800206 int device_id;
207 struct i915_device *i915;
208 drm_i915_getparam_t get_param;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800209
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800210 i915 = calloc(1, sizeof(*i915));
211 if (!i915)
212 return -ENOMEM;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800213
214 memset(&get_param, 0, sizeof(get_param));
215 get_param.param = I915_PARAM_CHIPSET_ID;
216 get_param.value = &device_id;
217 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
218 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700219 drv_log("Failed to get I915_PARAM_CHIPSET_ID\n");
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800220 free(i915);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800221 return -EINVAL;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800222 }
223
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800224 i915->gen = i915_get_gen(device_id);
225
226 memset(&get_param, 0, sizeof(get_param));
227 get_param.param = I915_PARAM_HAS_LLC;
228 get_param.value = &i915->has_llc;
229 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
230 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700231 drv_log("Failed to get I915_PARAM_HAS_LLC\n");
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800232 free(i915);
233 return -EINVAL;
234 }
235
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800236 drv->priv = i915;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800237
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800238 return i915_add_combinations(drv);
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800239}
240
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700241static int i915_bo_from_format(struct bo *bo, uint32_t width, uint32_t height, uint32_t format)
242{
243 uint32_t offset;
244 size_t plane;
Gurchetan Singhcc35e692019-02-28 15:44:54 -0800245 int ret, pagesize;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700246
247 offset = 0;
Gurchetan Singhcc35e692019-02-28 15:44:54 -0800248 pagesize = getpagesize();
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700249 for (plane = 0; plane < drv_num_planes_from_format(format); plane++) {
250 uint32_t stride = drv_stride_from_format(format, width, plane);
251 uint32_t plane_height = drv_height_from_format(format, height, plane);
252
Gurchetan Singh298b7572019-09-19 09:55:18 -0700253 if (bo->meta.tiling != I915_TILING_NONE)
Gurchetan Singhcc35e692019-02-28 15:44:54 -0800254 assert(IS_ALIGNED(offset, pagesize));
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700255
Gurchetan Singh298b7572019-09-19 09:55:18 -0700256 ret = i915_align_dimensions(bo, bo->meta.tiling, &stride, &plane_height);
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700257 if (ret)
258 return ret;
259
Gurchetan Singh298b7572019-09-19 09:55:18 -0700260 bo->meta.strides[plane] = stride;
261 bo->meta.sizes[plane] = stride * plane_height;
262 bo->meta.offsets[plane] = offset;
263 offset += bo->meta.sizes[plane];
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700264 }
265
Gurchetan Singh298b7572019-09-19 09:55:18 -0700266 bo->meta.total_size = ALIGN(offset, pagesize);
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700267
268 return 0;
269}
270
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700271static int i915_bo_create_for_modifier(struct bo *bo, uint32_t width, uint32_t height,
272 uint32_t format, uint64_t modifier)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700273{
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700274 int ret;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800275 size_t plane;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800276 struct drm_i915_gem_create gem_create;
277 struct drm_i915_gem_set_tiling gem_set_tiling;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700278
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700279 switch (modifier) {
280 case DRM_FORMAT_MOD_LINEAR:
Gurchetan Singh298b7572019-09-19 09:55:18 -0700281 bo->meta.tiling = I915_TILING_NONE;
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700282 break;
283 case I915_FORMAT_MOD_X_TILED:
Gurchetan Singh298b7572019-09-19 09:55:18 -0700284 bo->meta.tiling = I915_TILING_X;
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700285 break;
286 case I915_FORMAT_MOD_Y_TILED:
Mark Yacoubc9565642020-02-07 11:02:22 -0500287 case I915_FORMAT_MOD_Y_TILED_CCS:
Gurchetan Singh298b7572019-09-19 09:55:18 -0700288 bo->meta.tiling = I915_TILING_Y;
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700289 break;
290 }
Owen Linbbb69fd2017-06-05 14:33:08 +0800291
Gurchetan Singh298b7572019-09-19 09:55:18 -0700292 bo->meta.format_modifiers[0] = modifier;
Kristian H. Kristensen2b8f89e2018-02-07 16:10:06 -0800293
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700294 if (format == DRM_FORMAT_YVU420_ANDROID) {
295 /*
296 * We only need to be able to use this as a linear texture,
297 * which doesn't put any HW restrictions on how we lay it
298 * out. The Android format does require the stride to be a
299 * multiple of 16 and expects the Cr and Cb stride to be
300 * ALIGN(Y_stride / 2, 16), which we can make happen by
301 * aligning to 32 bytes here.
302 */
303 uint32_t stride = ALIGN(width, 32);
304 drv_bo_from_format(bo, stride, height, format);
Mark Yacoubc9565642020-02-07 11:02:22 -0500305 } else if (modifier == I915_FORMAT_MOD_Y_TILED_CCS) {
306 /*
307 * For compressed surfaces, we need a color control surface
308 * (CCS). Color compression is only supported for Y tiled
309 * surfaces, and for each 32x16 tiles in the main surface we
310 * need a tile in the control surface. Y tiles are 128 bytes
311 * wide and 32 lines tall and we use that to first compute the
312 * width and height in tiles of the main surface. stride and
313 * height are already multiples of 128 and 32, respectively:
314 */
315 uint32_t stride = drv_stride_from_format(format, width, 0);
316 uint32_t width_in_tiles = DIV_ROUND_UP(stride, 128);
317 uint32_t height_in_tiles = DIV_ROUND_UP(height, 32);
318 uint32_t size = width_in_tiles * height_in_tiles * 4096;
319 uint32_t offset = 0;
320
321 bo->meta.strides[0] = width_in_tiles * 128;
322 bo->meta.sizes[0] = size;
323 bo->meta.offsets[0] = offset;
324 offset += size;
325
326 /*
327 * Now, compute the width and height in tiles of the control
328 * surface by dividing and rounding up.
329 */
330 uint32_t ccs_width_in_tiles = DIV_ROUND_UP(width_in_tiles, 32);
331 uint32_t ccs_height_in_tiles = DIV_ROUND_UP(height_in_tiles, 16);
332 uint32_t ccs_size = ccs_width_in_tiles * ccs_height_in_tiles * 4096;
333
334 /*
335 * With stride and height aligned to y tiles, offset is
336 * already a multiple of 4096, which is the required alignment
337 * of the CCS.
338 */
339 bo->meta.strides[1] = ccs_width_in_tiles * 128;
340 bo->meta.sizes[1] = ccs_size;
341 bo->meta.offsets[1] = offset;
342 offset += ccs_size;
343
344 bo->meta.num_planes = 2;
345 bo->meta.total_size = offset;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700346 } else {
347 i915_bo_from_format(bo, width, height, format);
348 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800349
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800350 memset(&gem_create, 0, sizeof(gem_create));
Gurchetan Singh298b7572019-09-19 09:55:18 -0700351 gem_create.size = bo->meta.total_size;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800352
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800353 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_CREATE, &gem_create);
354 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700355 drv_log("DRM_IOCTL_I915_GEM_CREATE failed (size=%llu)\n", gem_create.size);
Stéphane Marchesin6ac299f2019-03-21 12:23:29 -0700356 return -errno;
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700357 }
Gurchetan Singh83dc4fb2016-07-19 15:52:33 -0700358
Gurchetan Singh298b7572019-09-19 09:55:18 -0700359 for (plane = 0; plane < bo->meta.num_planes; plane++)
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800360 bo->handles[plane].u32 = gem_create.handle;
Daniel Nicoara1de26dc2014-09-25 18:53:19 -0400361
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800362 memset(&gem_set_tiling, 0, sizeof(gem_set_tiling));
363 gem_set_tiling.handle = bo->handles[0].u32;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700364 gem_set_tiling.tiling_mode = bo->meta.tiling;
365 gem_set_tiling.stride = bo->meta.strides[0];
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700366
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800367 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_TILING, &gem_set_tiling);
368 if (ret) {
369 struct drm_gem_close gem_close;
370 memset(&gem_close, 0, sizeof(gem_close));
371 gem_close.handle = bo->handles[0].u32;
372 drmIoctl(bo->drv->fd, DRM_IOCTL_GEM_CLOSE, &gem_close);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800373
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700374 drv_log("DRM_IOCTL_I915_GEM_SET_TILING failed with %d\n", errno);
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700375 return -errno;
376 }
377
378 return 0;
379}
380
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700381static int i915_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
382 uint64_t use_flags)
383{
384 struct combination *combo;
385
386 combo = drv_get_combination(bo->drv, format, use_flags);
387 if (!combo)
388 return -EINVAL;
389
390 return i915_bo_create_for_modifier(bo, width, height, format, combo->metadata.modifier);
391}
392
393static int i915_bo_create_with_modifiers(struct bo *bo, uint32_t width, uint32_t height,
394 uint32_t format, const uint64_t *modifiers, uint32_t count)
395{
396 static const uint64_t modifier_order[] = {
Mark Yacoubc9565642020-02-07 11:02:22 -0500397 I915_FORMAT_MOD_Y_TILED_CCS,
Gurchetan Singh2b1d6892018-09-17 16:58:16 -0700398 I915_FORMAT_MOD_Y_TILED,
399 I915_FORMAT_MOD_X_TILED,
400 DRM_FORMAT_MOD_LINEAR,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700401 };
402 uint64_t modifier;
403
404 modifier = drv_pick_modifier(modifiers, count, modifier_order, ARRAY_SIZE(modifier_order));
405
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700406 return i915_bo_create_for_modifier(bo, width, height, format, modifier);
407}
408
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800409static void i915_close(struct driver *drv)
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800410{
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800411 free(drv->priv);
412 drv->priv = NULL;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800413}
414
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800415static int i915_bo_import(struct bo *bo, struct drv_import_fd_data *data)
416{
417 int ret;
418 struct drm_i915_gem_get_tiling gem_get_tiling;
419
420 ret = drv_prime_bo_import(bo, data);
421 if (ret)
422 return ret;
423
424 /* TODO(gsingh): export modifiers and get rid of backdoor tiling. */
425 memset(&gem_get_tiling, 0, sizeof(gem_get_tiling));
426 gem_get_tiling.handle = bo->handles[0].u32;
427
428 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_GET_TILING, &gem_get_tiling);
429 if (ret) {
Joe Kniss9e5d12a2017-06-29 11:54:22 -0700430 drv_gem_bo_destroy(bo);
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700431 drv_log("DRM_IOCTL_I915_GEM_GET_TILING failed.\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800432 return ret;
433 }
434
Gurchetan Singh298b7572019-09-19 09:55:18 -0700435 bo->meta.tiling = gem_get_tiling.tiling_mode;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800436 return 0;
437}
438
Gurchetan Singhee43c302017-11-14 18:20:27 -0800439static void *i915_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Gurchetan Singhef920532016-08-12 16:38:25 -0700440{
441 int ret;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800442 void *addr;
Gurchetan Singhef920532016-08-12 16:38:25 -0700443
Mark Yacoubc9565642020-02-07 11:02:22 -0500444 if (bo->meta.format_modifiers[0] == I915_FORMAT_MOD_Y_TILED_CCS)
445 return MAP_FAILED;
446
Gurchetan Singh298b7572019-09-19 09:55:18 -0700447 if (bo->meta.tiling == I915_TILING_NONE) {
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800448 struct drm_i915_gem_mmap gem_map;
449 memset(&gem_map, 0, sizeof(gem_map));
Gurchetan Singhef920532016-08-12 16:38:25 -0700450
Tomasz Figa39eb9512018-11-01 00:45:31 +0900451 /* TODO(b/118799155): We don't seem to have a good way to
452 * detect the use cases for which WC mapping is really needed.
453 * The current heuristic seems overly coarse and may be slowing
454 * down some other use cases unnecessarily.
455 *
456 * For now, care must be taken not to use WC mappings for
457 * Renderscript and camera use cases, as they're
458 * performance-sensitive. */
Gurchetan Singh298b7572019-09-19 09:55:18 -0700459 if ((bo->meta.use_flags & BO_USE_SCANOUT) &&
460 !(bo->meta.use_flags &
Tomasz Figa39eb9512018-11-01 00:45:31 +0900461 (BO_USE_RENDERSCRIPT | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)))
Gurchetan Singh5af20232017-09-19 15:10:58 -0700462 gem_map.flags = I915_MMAP_WC;
463
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800464 gem_map.handle = bo->handles[0].u32;
465 gem_map.offset = 0;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700466 gem_map.size = bo->meta.total_size;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800467
468 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP, &gem_map);
469 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700470 drv_log("DRM_IOCTL_I915_GEM_MMAP failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800471 return MAP_FAILED;
472 }
473
474 addr = (void *)(uintptr_t)gem_map.addr_ptr;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800475 } else {
476 struct drm_i915_gem_mmap_gtt gem_map;
477 memset(&gem_map, 0, sizeof(gem_map));
478
479 gem_map.handle = bo->handles[0].u32;
480
481 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP_GTT, &gem_map);
482 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700483 drv_log("DRM_IOCTL_I915_GEM_MMAP_GTT failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800484 return MAP_FAILED;
485 }
486
Gurchetan Singh298b7572019-09-19 09:55:18 -0700487 addr = mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED,
488 bo->drv->fd, gem_map.offset);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800489 }
490
491 if (addr == MAP_FAILED) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700492 drv_log("i915 GEM mmap failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800493 return addr;
494 }
495
Gurchetan Singh298b7572019-09-19 09:55:18 -0700496 vma->length = bo->meta.total_size;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800497 return addr;
498}
Gurchetan Singh1a31e602016-10-06 10:58:00 -0700499
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700500static int i915_bo_invalidate(struct bo *bo, struct mapping *mapping)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700501{
502 int ret;
503 struct drm_i915_gem_set_domain set_domain;
504
505 memset(&set_domain, 0, sizeof(set_domain));
506 set_domain.handle = bo->handles[0].u32;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700507 if (bo->meta.tiling == I915_TILING_NONE) {
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700508 set_domain.read_domains = I915_GEM_DOMAIN_CPU;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700509 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700510 set_domain.write_domain = I915_GEM_DOMAIN_CPU;
511 } else {
512 set_domain.read_domains = I915_GEM_DOMAIN_GTT;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700513 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700514 set_domain.write_domain = I915_GEM_DOMAIN_GTT;
515 }
516
517 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_DOMAIN, &set_domain);
518 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700519 drv_log("DRM_IOCTL_I915_GEM_SET_DOMAIN with %d\n", ret);
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700520 return ret;
521 }
522
523 return 0;
524}
525
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700526static int i915_bo_flush(struct bo *bo, struct mapping *mapping)
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800527{
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800528 struct i915_device *i915 = bo->drv->priv;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700529 if (!i915->has_llc && bo->meta.tiling == I915_TILING_NONE)
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700530 i915_clflush(mapping->vma->addr, mapping->vma->length);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800531
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700532 return 0;
Gurchetan Singhef920532016-08-12 16:38:25 -0700533}
534
Gurchetan Singh0d44d482019-06-04 19:39:51 -0700535static uint32_t i915_resolve_format(struct driver *drv, uint32_t format, uint64_t use_flags)
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700536{
537 switch (format) {
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800538 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Tomasz Figad30c0a52017-07-05 17:50:18 +0900539 /* KBL camera subsystem requires NV12. */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700540 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE))
Tomasz Figad30c0a52017-07-05 17:50:18 +0900541 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700542 /*HACK: See b/28671744 */
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800543 return DRM_FORMAT_XBGR8888;
544 case DRM_FORMAT_FLEX_YCbCr_420_888:
Tomasz Figab92e4f82017-06-22 16:52:43 +0900545 /*
546 * KBL camera subsystem requires NV12. Our other use cases
547 * don't care:
548 * - Hardware video supports NV12,
549 * - USB Camera HALv3 supports NV12,
550 * - USB Camera HALv1 doesn't use this format.
551 * Moreover, NV12 is preferred for video, due to overlay
552 * support on SKL+.
553 */
554 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700555 default:
556 return format;
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700557 }
558}
559
Gurchetan Singh3e9d3832017-10-31 10:36:25 -0700560const struct backend backend_i915 = {
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700561 .name = "i915",
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700562 .init = i915_init,
563 .close = i915_close,
564 .bo_create = i915_bo_create,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700565 .bo_create_with_modifiers = i915_bo_create_with_modifiers,
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800566 .bo_destroy = drv_gem_bo_destroy,
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800567 .bo_import = i915_bo_import,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700568 .bo_map = i915_bo_map,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700569 .bo_unmap = drv_bo_munmap,
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700570 .bo_invalidate = i915_bo_invalidate,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700571 .bo_flush = i915_bo_flush,
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700572 .resolve_format = i915_resolve_format,
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700573};
574
575#endif