blob: ec0d8fdebb51d2deef1841dddb4a0054207faad5 [file] [log] [blame]
Stéphane Marchesin25a26062014-09-12 16:18:59 -07001/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2014 The Chromium OS Authors. All rights reserved.
Stéphane Marchesin25a26062014-09-12 16:18:59 -07003 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Gurchetan Singh46faf6b2016-08-05 14:40:07 -07007#ifdef DRV_I915
Stéphane Marchesin25a26062014-09-12 16:18:59 -07008
Kristian H. Kristensene8778f02018-04-04 14:21:41 -07009#include <assert.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070010#include <errno.h>
Gurchetan Singh82a8eed2017-01-03 13:01:37 -080011#include <i915_drm.h>
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070012#include <stdbool.h>
Gurchetan Singhcc015e82017-01-17 16:15:25 -080013#include <stdio.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070014#include <string.h>
Gurchetan Singhef920532016-08-12 16:38:25 -070015#include <sys/mman.h>
Gurchetan Singhcc35e692019-02-28 15:44:54 -080016#include <unistd.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070017#include <xf86drm.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070018
Gurchetan Singh46faf6b2016-08-05 14:40:07 -070019#include "drv_priv.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070020#include "helpers.h"
21#include "util.h"
22
Gurchetan Singh68af9c22017-01-18 13:48:11 -080023#define I915_CACHELINE_SIZE 64
24#define I915_CACHELINE_MASK (I915_CACHELINE_SIZE - 1)
25
Nataraj Deshpande586e2d62019-08-21 15:19:46 -070026static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR16161616F, DRM_FORMAT_ABGR2101010,
Gurchetan Singh298b7572019-09-19 09:55:18 -070027 DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB1555,
Nataraj Deshpande586e2d62019-08-21 15:19:46 -070028 DRM_FORMAT_ARGB2101010, DRM_FORMAT_ARGB8888,
Gurchetan Singh298b7572019-09-19 09:55:18 -070029 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR2101010,
30 DRM_FORMAT_XBGR8888, DRM_FORMAT_XRGB1555,
Nataraj Deshpande586e2d62019-08-21 15:19:46 -070031 DRM_FORMAT_XRGB2101010, DRM_FORMAT_XRGB8888 };
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080032
Tomasz Figab92e4f82017-06-22 16:52:43 +090033static const uint32_t tileable_texture_source_formats[] = { DRM_FORMAT_GR88, DRM_FORMAT_R8,
34 DRM_FORMAT_UYVY, DRM_FORMAT_YUYV };
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070035
Tomasz Figab92e4f82017-06-22 16:52:43 +090036static const uint32_t texture_source_formats[] = { DRM_FORMAT_YVU420, DRM_FORMAT_YVU420_ANDROID,
Gurchetan Singh39490e92019-05-28 17:49:09 -070037 DRM_FORMAT_NV12, DRM_FORMAT_P010 };
Gurchetan Singh179687e2016-10-28 10:07:35 -070038
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080039struct i915_device {
Gurchetan Singh68af9c22017-01-18 13:48:11 -080040 uint32_t gen;
41 int32_t has_llc;
Stéphane Marchesin25a26062014-09-12 16:18:59 -070042};
43
Gurchetan Singh68af9c22017-01-18 13:48:11 -080044static uint32_t i915_get_gen(int device_id)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070045{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080046 const uint16_t gen3_ids[] = { 0x2582, 0x2592, 0x2772, 0x27A2, 0x27AE,
47 0x29C2, 0x29B2, 0x29D2, 0xA001, 0xA011 };
Stéphane Marchesina39dfde2014-09-15 15:38:25 -070048 unsigned i;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080049 for (i = 0; i < ARRAY_SIZE(gen3_ids); i++)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070050 if (gen3_ids[i] == device_id)
51 return 3;
52
53 return 4;
54}
55
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070056/*
57 * We allow allocation of ARGB formats for SCANOUT if the corresponding XRGB
58 * formats supports it. It's up to the caller (chrome ozone) to ultimately not
59 * scan out ARGB if the display controller only supports XRGB, but we'll allow
60 * the allocation of the bo here.
61 */
62static bool format_compatible(const struct combination *combo, uint32_t format)
63{
64 if (combo->format == format)
65 return true;
66
67 switch (format) {
68 case DRM_FORMAT_XRGB8888:
69 return combo->format == DRM_FORMAT_ARGB8888;
70 case DRM_FORMAT_XBGR8888:
71 return combo->format == DRM_FORMAT_ABGR8888;
72 case DRM_FORMAT_RGBX8888:
73 return combo->format == DRM_FORMAT_RGBA8888;
74 case DRM_FORMAT_BGRX8888:
75 return combo->format == DRM_FORMAT_BGRA8888;
76 default:
77 return false;
78 }
79}
80
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080081static int i915_add_kms_item(struct driver *drv, const struct kms_item *item)
82{
83 uint32_t i;
84 struct combination *combo;
85
86 /*
87 * Older hardware can't scanout Y-tiled formats. Newer devices can, and
88 * report this functionality via format modifiers.
89 */
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -070090 for (i = 0; i < drv_array_size(drv->combos); i++) {
91 combo = (struct combination *)drv_array_at_idx(drv->combos, i);
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070092 if (!format_compatible(combo, item->format))
Tomasz Figae821cc22017-07-08 15:53:11 +090093 continue;
94
Gurchetan Singhd118a0e2018-01-12 23:31:50 +000095 if (item->modifier == DRM_FORMAT_MOD_LINEAR &&
Tomasz Figae821cc22017-07-08 15:53:11 +090096 combo->metadata.tiling == I915_TILING_X) {
97 /*
98 * FIXME: drv_query_kms() does not report the available modifiers
99 * yet, but we know that all hardware can scanout from X-tiled
100 * buffers, so let's add this to our combinations, except for
101 * cursor, which must not be tiled.
102 */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700103 combo->use_flags |= item->use_flags & ~BO_USE_CURSOR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800104 }
Tomasz Figae821cc22017-07-08 15:53:11 +0900105
Kristian H. Kristensen3cb5bba2018-04-04 16:10:42 -0700106 /* If we can scanout NV12, we support all tiling modes. */
107 if (item->format == DRM_FORMAT_NV12)
108 combo->use_flags |= item->use_flags;
109
Tomasz Figae821cc22017-07-08 15:53:11 +0900110 if (combo->metadata.modifier == item->modifier)
Gurchetan Singha1892b22017-09-28 16:40:52 -0700111 combo->use_flags |= item->use_flags;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800112 }
113
114 return 0;
115}
116
117static int i915_add_combinations(struct driver *drv)
118{
119 int ret;
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700120 uint32_t i;
121 struct drv_array *kms_items;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800122 struct format_metadata metadata;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700123 uint64_t render_use_flags, texture_use_flags;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700124
Gurchetan Singha1892b22017-09-28 16:40:52 -0700125 render_use_flags = BO_USE_RENDER_MASK;
126 texture_use_flags = BO_USE_TEXTURE_MASK;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800127
128 metadata.tiling = I915_TILING_NONE;
129 metadata.priority = 1;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700130 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800131
Gurchetan Singhd3001452017-11-03 17:18:36 -0700132 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
133 &metadata, render_use_flags);
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800134
Gurchetan Singhd3001452017-11-03 17:18:36 -0700135 drv_add_combinations(drv, texture_source_formats, ARRAY_SIZE(texture_source_formats),
136 &metadata, texture_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700137
Gurchetan Singhd3001452017-11-03 17:18:36 -0700138 drv_add_combinations(drv, tileable_texture_source_formats,
139 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
140 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800141
Hirokazu Honda3b8d4d02019-07-31 16:35:52 +0900142 /*
143 * Chrome uses DMA-buf mmap to write to YV12 buffers, which are then accessed by the
144 * Video Encoder Accelerator (VEA). It could also support NV12 potentially in the future.
145 */
146 drv_modify_combination(drv, DRM_FORMAT_YVU420, &metadata, BO_USE_HW_VIDEO_ENCODER);
David Stevens6116b312019-09-03 10:49:50 +0900147 drv_modify_combination(drv, DRM_FORMAT_NV12, &metadata,
148 BO_USE_HW_VIDEO_ENCODER | BO_USE_HW_VIDEO_DECODER);
Hirokazu Honda3b8d4d02019-07-31 16:35:52 +0900149
Gurchetan Singh71bc6652018-09-17 17:42:05 -0700150 /* Android CTS tests require this. */
151 drv_add_combination(drv, DRM_FORMAT_BGR888, &metadata, BO_USE_SW_MASK);
152
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800153 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
154 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800155
Tomasz Figad30c0a52017-07-05 17:50:18 +0900156 /* IPU3 camera ISP supports only NV12 output. */
157 drv_modify_combination(drv, DRM_FORMAT_NV12, &metadata,
Tomasz Figafd0b0162017-07-11 18:28:02 +0900158 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Tomasz Figad30c0a52017-07-05 17:50:18 +0900159 /*
160 * R8 format is used for Android's HAL_PIXEL_FORMAT_BLOB and is used for JPEG snapshots
161 * from camera.
162 */
163 drv_modify_combination(drv, DRM_FORMAT_R8, &metadata,
Tomasz Figafd0b0162017-07-11 18:28:02 +0900164 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Tomasz Figad30c0a52017-07-05 17:50:18 +0900165
Gurchetan Singha1892b22017-09-28 16:40:52 -0700166 render_use_flags &= ~BO_USE_RENDERSCRIPT;
167 render_use_flags &= ~BO_USE_SW_WRITE_OFTEN;
168 render_use_flags &= ~BO_USE_SW_READ_OFTEN;
169 render_use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh2b1d6892018-09-17 16:58:16 -0700170 render_use_flags &= ~BO_USE_PROTECTED;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700171
Gurchetan Singha1892b22017-09-28 16:40:52 -0700172 texture_use_flags &= ~BO_USE_RENDERSCRIPT;
173 texture_use_flags &= ~BO_USE_SW_WRITE_OFTEN;
174 texture_use_flags &= ~BO_USE_SW_READ_OFTEN;
175 texture_use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh2b1d6892018-09-17 16:58:16 -0700176 texture_use_flags &= ~BO_USE_PROTECTED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800177
178 metadata.tiling = I915_TILING_X;
179 metadata.priority = 2;
Tomasz Figae821cc22017-07-08 15:53:11 +0900180 metadata.modifier = I915_FORMAT_MOD_X_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800181
Gurchetan Singhd3001452017-11-03 17:18:36 -0700182 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
183 &metadata, render_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700184
Gurchetan Singhd3001452017-11-03 17:18:36 -0700185 drv_add_combinations(drv, tileable_texture_source_formats,
186 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
187 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800188
189 metadata.tiling = I915_TILING_Y;
190 metadata.priority = 3;
Tomasz Figae821cc22017-07-08 15:53:11 +0900191 metadata.modifier = I915_FORMAT_MOD_Y_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800192
Gurchetan Singhd3001452017-11-03 17:18:36 -0700193 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
194 &metadata, render_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700195
Gurchetan Singhd3001452017-11-03 17:18:36 -0700196 drv_add_combinations(drv, tileable_texture_source_formats,
197 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
198 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800199
Miguel Casascdb25542019-07-18 13:07:30 -0400200 /* Support y-tiled NV12 and P010 for libva */
Gurchetan Singh86ddfdc2018-09-17 17:13:45 -0700201 drv_add_combination(drv, DRM_FORMAT_NV12, &metadata,
202 BO_USE_TEXTURE | BO_USE_HW_VIDEO_DECODER);
Miguel Casascdb25542019-07-18 13:07:30 -0400203 drv_add_combination(drv, DRM_FORMAT_P010, &metadata,
204 BO_USE_TEXTURE | BO_USE_HW_VIDEO_DECODER);
Kristian H. Kristensen3cb5bba2018-04-04 16:10:42 -0700205
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700206 kms_items = drv_query_kms(drv);
207 if (!kms_items)
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800208 return 0;
209
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700210 for (i = 0; i < drv_array_size(kms_items); i++) {
211 ret = i915_add_kms_item(drv, (struct kms_item *)drv_array_at_idx(kms_items, i));
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800212 if (ret) {
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700213 drv_array_destroy(kms_items);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800214 return ret;
215 }
216 }
217
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700218 drv_array_destroy(kms_items);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800219 return 0;
220}
221
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800222static int i915_align_dimensions(struct bo *bo, uint32_t tiling, uint32_t *stride,
223 uint32_t *aligned_height)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700224{
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700225 struct i915_device *i915 = bo->drv->priv;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700226 uint32_t horizontal_alignment;
227 uint32_t vertical_alignment;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700228
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700229 switch (tiling) {
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700230 default:
231 case I915_TILING_NONE:
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700232 /*
233 * The Intel GPU doesn't need any alignment in linear mode,
234 * but libva requires the allocation stride to be aligned to
235 * 16 bytes and height to 4 rows. Further, we round up the
236 * horizontal alignment so that row start on a cache line (64
237 * bytes).
238 */
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700239 horizontal_alignment = 64;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700240 vertical_alignment = 4;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700241 break;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800242
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700243 case I915_TILING_X:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700244 horizontal_alignment = 512;
245 vertical_alignment = 8;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700246 break;
247
248 case I915_TILING_Y:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700249 if (i915->gen == 3) {
250 horizontal_alignment = 512;
251 vertical_alignment = 8;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800252 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700253 horizontal_alignment = 128;
254 vertical_alignment = 32;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700255 }
256 break;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700257 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800258
David Stevens793675a2019-09-25 11:17:48 +0900259 *aligned_height = ALIGN(*aligned_height, vertical_alignment);
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700260 if (i915->gen > 3) {
261 *stride = ALIGN(*stride, horizontal_alignment);
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800262 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700263 while (*stride > horizontal_alignment)
264 horizontal_alignment <<= 1;
265
266 *stride = horizontal_alignment;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800267 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800268
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700269 if (i915->gen <= 3 && *stride > 8192)
270 return -EINVAL;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800271
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700272 return 0;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700273}
274
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800275static void i915_clflush(void *start, size_t size)
276{
277 void *p = (void *)(((uintptr_t)start) & ~I915_CACHELINE_MASK);
278 void *end = (void *)((uintptr_t)start + size);
279
280 __builtin_ia32_mfence();
281 while (p < end) {
282 __builtin_ia32_clflush(p);
283 p = (void *)((uintptr_t)p + I915_CACHELINE_SIZE);
284 }
285}
286
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800287static int i915_init(struct driver *drv)
288{
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800289 int ret;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800290 int device_id;
291 struct i915_device *i915;
292 drm_i915_getparam_t get_param;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800293
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800294 i915 = calloc(1, sizeof(*i915));
295 if (!i915)
296 return -ENOMEM;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800297
298 memset(&get_param, 0, sizeof(get_param));
299 get_param.param = I915_PARAM_CHIPSET_ID;
300 get_param.value = &device_id;
301 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
302 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700303 drv_log("Failed to get I915_PARAM_CHIPSET_ID\n");
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800304 free(i915);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800305 return -EINVAL;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800306 }
307
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800308 i915->gen = i915_get_gen(device_id);
309
310 memset(&get_param, 0, sizeof(get_param));
311 get_param.param = I915_PARAM_HAS_LLC;
312 get_param.value = &i915->has_llc;
313 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
314 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700315 drv_log("Failed to get I915_PARAM_HAS_LLC\n");
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800316 free(i915);
317 return -EINVAL;
318 }
319
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800320 drv->priv = i915;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800321
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800322 return i915_add_combinations(drv);
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800323}
324
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700325static int i915_bo_from_format(struct bo *bo, uint32_t width, uint32_t height, uint32_t format)
326{
327 uint32_t offset;
328 size_t plane;
Gurchetan Singhcc35e692019-02-28 15:44:54 -0800329 int ret, pagesize;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700330
331 offset = 0;
Gurchetan Singhcc35e692019-02-28 15:44:54 -0800332 pagesize = getpagesize();
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700333 for (plane = 0; plane < drv_num_planes_from_format(format); plane++) {
334 uint32_t stride = drv_stride_from_format(format, width, plane);
335 uint32_t plane_height = drv_height_from_format(format, height, plane);
336
Gurchetan Singh298b7572019-09-19 09:55:18 -0700337 if (bo->meta.tiling != I915_TILING_NONE)
Gurchetan Singhcc35e692019-02-28 15:44:54 -0800338 assert(IS_ALIGNED(offset, pagesize));
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700339
Gurchetan Singh298b7572019-09-19 09:55:18 -0700340 ret = i915_align_dimensions(bo, bo->meta.tiling, &stride, &plane_height);
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700341 if (ret)
342 return ret;
343
Gurchetan Singh298b7572019-09-19 09:55:18 -0700344 bo->meta.strides[plane] = stride;
345 bo->meta.sizes[plane] = stride * plane_height;
346 bo->meta.offsets[plane] = offset;
347 offset += bo->meta.sizes[plane];
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700348 }
349
Gurchetan Singh298b7572019-09-19 09:55:18 -0700350 bo->meta.total_size = ALIGN(offset, pagesize);
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700351
352 return 0;
353}
354
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700355static int i915_bo_create_for_modifier(struct bo *bo, uint32_t width, uint32_t height,
356 uint32_t format, uint64_t modifier)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700357{
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700358 int ret;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800359 size_t plane;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800360 struct drm_i915_gem_create gem_create;
361 struct drm_i915_gem_set_tiling gem_set_tiling;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700362
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700363 switch (modifier) {
364 case DRM_FORMAT_MOD_LINEAR:
Gurchetan Singh298b7572019-09-19 09:55:18 -0700365 bo->meta.tiling = I915_TILING_NONE;
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700366 break;
367 case I915_FORMAT_MOD_X_TILED:
Gurchetan Singh298b7572019-09-19 09:55:18 -0700368 bo->meta.tiling = I915_TILING_X;
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700369 break;
370 case I915_FORMAT_MOD_Y_TILED:
Gurchetan Singh298b7572019-09-19 09:55:18 -0700371 bo->meta.tiling = I915_TILING_Y;
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700372 break;
373 }
Owen Linbbb69fd2017-06-05 14:33:08 +0800374
Gurchetan Singh298b7572019-09-19 09:55:18 -0700375 bo->meta.format_modifiers[0] = modifier;
Kristian H. Kristensen2b8f89e2018-02-07 16:10:06 -0800376
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700377 if (format == DRM_FORMAT_YVU420_ANDROID) {
378 /*
379 * We only need to be able to use this as a linear texture,
380 * which doesn't put any HW restrictions on how we lay it
381 * out. The Android format does require the stride to be a
382 * multiple of 16 and expects the Cr and Cb stride to be
383 * ALIGN(Y_stride / 2, 16), which we can make happen by
384 * aligning to 32 bytes here.
385 */
386 uint32_t stride = ALIGN(width, 32);
387 drv_bo_from_format(bo, stride, height, format);
388 } else {
389 i915_bo_from_format(bo, width, height, format);
390 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800391
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800392 memset(&gem_create, 0, sizeof(gem_create));
Gurchetan Singh298b7572019-09-19 09:55:18 -0700393 gem_create.size = bo->meta.total_size;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800394
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800395 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_CREATE, &gem_create);
396 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700397 drv_log("DRM_IOCTL_I915_GEM_CREATE failed (size=%llu)\n", gem_create.size);
Stéphane Marchesin6ac299f2019-03-21 12:23:29 -0700398 return -errno;
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700399 }
Gurchetan Singh83dc4fb2016-07-19 15:52:33 -0700400
Gurchetan Singh298b7572019-09-19 09:55:18 -0700401 for (plane = 0; plane < bo->meta.num_planes; plane++)
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800402 bo->handles[plane].u32 = gem_create.handle;
Daniel Nicoara1de26dc2014-09-25 18:53:19 -0400403
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800404 memset(&gem_set_tiling, 0, sizeof(gem_set_tiling));
405 gem_set_tiling.handle = bo->handles[0].u32;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700406 gem_set_tiling.tiling_mode = bo->meta.tiling;
407 gem_set_tiling.stride = bo->meta.strides[0];
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700408
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800409 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_TILING, &gem_set_tiling);
410 if (ret) {
411 struct drm_gem_close gem_close;
412 memset(&gem_close, 0, sizeof(gem_close));
413 gem_close.handle = bo->handles[0].u32;
414 drmIoctl(bo->drv->fd, DRM_IOCTL_GEM_CLOSE, &gem_close);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800415
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700416 drv_log("DRM_IOCTL_I915_GEM_SET_TILING failed with %d\n", errno);
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700417 return -errno;
418 }
419
420 return 0;
421}
422
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700423static int i915_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
424 uint64_t use_flags)
425{
426 struct combination *combo;
427
428 combo = drv_get_combination(bo->drv, format, use_flags);
429 if (!combo)
430 return -EINVAL;
431
432 return i915_bo_create_for_modifier(bo, width, height, format, combo->metadata.modifier);
433}
434
435static int i915_bo_create_with_modifiers(struct bo *bo, uint32_t width, uint32_t height,
436 uint32_t format, const uint64_t *modifiers, uint32_t count)
437{
438 static const uint64_t modifier_order[] = {
Gurchetan Singh2b1d6892018-09-17 16:58:16 -0700439 I915_FORMAT_MOD_Y_TILED,
440 I915_FORMAT_MOD_X_TILED,
441 DRM_FORMAT_MOD_LINEAR,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700442 };
443 uint64_t modifier;
444
445 modifier = drv_pick_modifier(modifiers, count, modifier_order, ARRAY_SIZE(modifier_order));
446
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700447 return i915_bo_create_for_modifier(bo, width, height, format, modifier);
448}
449
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800450static void i915_close(struct driver *drv)
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800451{
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800452 free(drv->priv);
453 drv->priv = NULL;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800454}
455
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800456static int i915_bo_import(struct bo *bo, struct drv_import_fd_data *data)
457{
458 int ret;
459 struct drm_i915_gem_get_tiling gem_get_tiling;
460
461 ret = drv_prime_bo_import(bo, data);
462 if (ret)
463 return ret;
464
465 /* TODO(gsingh): export modifiers and get rid of backdoor tiling. */
466 memset(&gem_get_tiling, 0, sizeof(gem_get_tiling));
467 gem_get_tiling.handle = bo->handles[0].u32;
468
469 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_GET_TILING, &gem_get_tiling);
470 if (ret) {
Joe Kniss9e5d12a2017-06-29 11:54:22 -0700471 drv_gem_bo_destroy(bo);
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700472 drv_log("DRM_IOCTL_I915_GEM_GET_TILING failed.\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800473 return ret;
474 }
475
Gurchetan Singh298b7572019-09-19 09:55:18 -0700476 bo->meta.tiling = gem_get_tiling.tiling_mode;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800477 return 0;
478}
479
Gurchetan Singhee43c302017-11-14 18:20:27 -0800480static void *i915_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Gurchetan Singhef920532016-08-12 16:38:25 -0700481{
482 int ret;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800483 void *addr;
Gurchetan Singhef920532016-08-12 16:38:25 -0700484
Gurchetan Singh298b7572019-09-19 09:55:18 -0700485 if (bo->meta.tiling == I915_TILING_NONE) {
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800486 struct drm_i915_gem_mmap gem_map;
487 memset(&gem_map, 0, sizeof(gem_map));
Gurchetan Singhef920532016-08-12 16:38:25 -0700488
Tomasz Figa39eb9512018-11-01 00:45:31 +0900489 /* TODO(b/118799155): We don't seem to have a good way to
490 * detect the use cases for which WC mapping is really needed.
491 * The current heuristic seems overly coarse and may be slowing
492 * down some other use cases unnecessarily.
493 *
494 * For now, care must be taken not to use WC mappings for
495 * Renderscript and camera use cases, as they're
496 * performance-sensitive. */
Gurchetan Singh298b7572019-09-19 09:55:18 -0700497 if ((bo->meta.use_flags & BO_USE_SCANOUT) &&
498 !(bo->meta.use_flags &
Tomasz Figa39eb9512018-11-01 00:45:31 +0900499 (BO_USE_RENDERSCRIPT | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)))
Gurchetan Singh5af20232017-09-19 15:10:58 -0700500 gem_map.flags = I915_MMAP_WC;
501
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800502 gem_map.handle = bo->handles[0].u32;
503 gem_map.offset = 0;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700504 gem_map.size = bo->meta.total_size;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800505
506 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP, &gem_map);
507 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700508 drv_log("DRM_IOCTL_I915_GEM_MMAP failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800509 return MAP_FAILED;
510 }
511
512 addr = (void *)(uintptr_t)gem_map.addr_ptr;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800513 } else {
514 struct drm_i915_gem_mmap_gtt gem_map;
515 memset(&gem_map, 0, sizeof(gem_map));
516
517 gem_map.handle = bo->handles[0].u32;
518
519 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP_GTT, &gem_map);
520 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700521 drv_log("DRM_IOCTL_I915_GEM_MMAP_GTT failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800522 return MAP_FAILED;
523 }
524
Gurchetan Singh298b7572019-09-19 09:55:18 -0700525 addr = mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED,
526 bo->drv->fd, gem_map.offset);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800527 }
528
529 if (addr == MAP_FAILED) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700530 drv_log("i915 GEM mmap failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800531 return addr;
532 }
533
Gurchetan Singh298b7572019-09-19 09:55:18 -0700534 vma->length = bo->meta.total_size;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800535 return addr;
536}
Gurchetan Singh1a31e602016-10-06 10:58:00 -0700537
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700538static int i915_bo_invalidate(struct bo *bo, struct mapping *mapping)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700539{
540 int ret;
541 struct drm_i915_gem_set_domain set_domain;
542
543 memset(&set_domain, 0, sizeof(set_domain));
544 set_domain.handle = bo->handles[0].u32;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700545 if (bo->meta.tiling == I915_TILING_NONE) {
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700546 set_domain.read_domains = I915_GEM_DOMAIN_CPU;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700547 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700548 set_domain.write_domain = I915_GEM_DOMAIN_CPU;
549 } else {
550 set_domain.read_domains = I915_GEM_DOMAIN_GTT;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700551 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700552 set_domain.write_domain = I915_GEM_DOMAIN_GTT;
553 }
554
555 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_DOMAIN, &set_domain);
556 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700557 drv_log("DRM_IOCTL_I915_GEM_SET_DOMAIN with %d\n", ret);
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700558 return ret;
559 }
560
561 return 0;
562}
563
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700564static int i915_bo_flush(struct bo *bo, struct mapping *mapping)
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800565{
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800566 struct i915_device *i915 = bo->drv->priv;
Gurchetan Singh298b7572019-09-19 09:55:18 -0700567 if (!i915->has_llc && bo->meta.tiling == I915_TILING_NONE)
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700568 i915_clflush(mapping->vma->addr, mapping->vma->length);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800569
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700570 return 0;
Gurchetan Singhef920532016-08-12 16:38:25 -0700571}
572
Gurchetan Singh0d44d482019-06-04 19:39:51 -0700573static uint32_t i915_resolve_format(struct driver *drv, uint32_t format, uint64_t use_flags)
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700574{
575 switch (format) {
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800576 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Tomasz Figad30c0a52017-07-05 17:50:18 +0900577 /* KBL camera subsystem requires NV12. */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700578 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE))
Tomasz Figad30c0a52017-07-05 17:50:18 +0900579 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700580 /*HACK: See b/28671744 */
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800581 return DRM_FORMAT_XBGR8888;
582 case DRM_FORMAT_FLEX_YCbCr_420_888:
Tomasz Figab92e4f82017-06-22 16:52:43 +0900583 /*
584 * KBL camera subsystem requires NV12. Our other use cases
585 * don't care:
586 * - Hardware video supports NV12,
587 * - USB Camera HALv3 supports NV12,
588 * - USB Camera HALv1 doesn't use this format.
589 * Moreover, NV12 is preferred for video, due to overlay
590 * support on SKL+.
591 */
592 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700593 default:
594 return format;
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700595 }
596}
597
Gurchetan Singh3e9d3832017-10-31 10:36:25 -0700598const struct backend backend_i915 = {
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700599 .name = "i915",
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700600 .init = i915_init,
601 .close = i915_close,
602 .bo_create = i915_bo_create,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700603 .bo_create_with_modifiers = i915_bo_create_with_modifiers,
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800604 .bo_destroy = drv_gem_bo_destroy,
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800605 .bo_import = i915_bo_import,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700606 .bo_map = i915_bo_map,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700607 .bo_unmap = drv_bo_munmap,
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700608 .bo_invalidate = i915_bo_invalidate,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700609 .bo_flush = i915_bo_flush,
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700610 .resolve_format = i915_resolve_format,
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700611};
612
613#endif