blob: ff883214da7f78366c71f40913dedcb7076651ef [file] [log] [blame]
Stéphane Marchesin25a26062014-09-12 16:18:59 -07001/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2014 The Chromium OS Authors. All rights reserved.
Stéphane Marchesin25a26062014-09-12 16:18:59 -07003 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Gurchetan Singh46faf6b2016-08-05 14:40:07 -07007#ifdef DRV_I915
Stéphane Marchesin25a26062014-09-12 16:18:59 -07008
Kristian H. Kristensene8778f02018-04-04 14:21:41 -07009#include <assert.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070010#include <errno.h>
Gurchetan Singh82a8eed2017-01-03 13:01:37 -080011#include <i915_drm.h>
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070012#include <stdbool.h>
Gurchetan Singhcc015e82017-01-17 16:15:25 -080013#include <stdio.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070014#include <string.h>
Gurchetan Singhef920532016-08-12 16:38:25 -070015#include <sys/mman.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070016#include <xf86drm.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070017
Gurchetan Singh46faf6b2016-08-05 14:40:07 -070018#include "drv_priv.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070019#include "helpers.h"
20#include "util.h"
21
Gurchetan Singh68af9c22017-01-18 13:48:11 -080022#define I915_CACHELINE_SIZE 64
23#define I915_CACHELINE_MASK (I915_CACHELINE_SIZE - 1)
24
Gurchetan Singh292da532018-04-25 17:36:59 -070025static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB1555,
26 DRM_FORMAT_ARGB8888, DRM_FORMAT_RGB565,
27 DRM_FORMAT_XBGR2101010, DRM_FORMAT_XBGR8888,
28 DRM_FORMAT_XBGR16161616, DRM_FORMAT_XRGB1555,
29 DRM_FORMAT_XRGB2101010, DRM_FORMAT_XRGB8888 };
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080030
Tomasz Figab92e4f82017-06-22 16:52:43 +090031static const uint32_t tileable_texture_source_formats[] = { DRM_FORMAT_GR88, DRM_FORMAT_R8,
32 DRM_FORMAT_UYVY, DRM_FORMAT_YUYV };
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070033
Tomasz Figab92e4f82017-06-22 16:52:43 +090034static const uint32_t texture_source_formats[] = { DRM_FORMAT_YVU420, DRM_FORMAT_YVU420_ANDROID,
35 DRM_FORMAT_NV12 };
Gurchetan Singh179687e2016-10-28 10:07:35 -070036
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080037struct i915_device {
Gurchetan Singh68af9c22017-01-18 13:48:11 -080038 uint32_t gen;
39 int32_t has_llc;
Stéphane Marchesin25a26062014-09-12 16:18:59 -070040};
41
Gurchetan Singh68af9c22017-01-18 13:48:11 -080042static uint32_t i915_get_gen(int device_id)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070043{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080044 const uint16_t gen3_ids[] = { 0x2582, 0x2592, 0x2772, 0x27A2, 0x27AE,
45 0x29C2, 0x29B2, 0x29D2, 0xA001, 0xA011 };
Stéphane Marchesina39dfde2014-09-15 15:38:25 -070046 unsigned i;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080047 for (i = 0; i < ARRAY_SIZE(gen3_ids); i++)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070048 if (gen3_ids[i] == device_id)
49 return 3;
50
51 return 4;
52}
53
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070054/*
55 * We allow allocation of ARGB formats for SCANOUT if the corresponding XRGB
56 * formats supports it. It's up to the caller (chrome ozone) to ultimately not
57 * scan out ARGB if the display controller only supports XRGB, but we'll allow
58 * the allocation of the bo here.
59 */
60static bool format_compatible(const struct combination *combo, uint32_t format)
61{
62 if (combo->format == format)
63 return true;
64
65 switch (format) {
66 case DRM_FORMAT_XRGB8888:
67 return combo->format == DRM_FORMAT_ARGB8888;
68 case DRM_FORMAT_XBGR8888:
69 return combo->format == DRM_FORMAT_ABGR8888;
70 case DRM_FORMAT_RGBX8888:
71 return combo->format == DRM_FORMAT_RGBA8888;
72 case DRM_FORMAT_BGRX8888:
73 return combo->format == DRM_FORMAT_BGRA8888;
74 default:
75 return false;
76 }
77}
78
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080079static int i915_add_kms_item(struct driver *drv, const struct kms_item *item)
80{
81 uint32_t i;
82 struct combination *combo;
83
84 /*
85 * Older hardware can't scanout Y-tiled formats. Newer devices can, and
86 * report this functionality via format modifiers.
87 */
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -070088 for (i = 0; i < drv_array_size(drv->combos); i++) {
89 combo = (struct combination *)drv_array_at_idx(drv->combos, i);
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070090 if (!format_compatible(combo, item->format))
Tomasz Figae821cc22017-07-08 15:53:11 +090091 continue;
92
Gurchetan Singhd118a0e2018-01-12 23:31:50 +000093 if (item->modifier == DRM_FORMAT_MOD_LINEAR &&
Tomasz Figae821cc22017-07-08 15:53:11 +090094 combo->metadata.tiling == I915_TILING_X) {
95 /*
96 * FIXME: drv_query_kms() does not report the available modifiers
97 * yet, but we know that all hardware can scanout from X-tiled
98 * buffers, so let's add this to our combinations, except for
99 * cursor, which must not be tiled.
100 */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700101 combo->use_flags |= item->use_flags & ~BO_USE_CURSOR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800102 }
Tomasz Figae821cc22017-07-08 15:53:11 +0900103
104 if (combo->metadata.modifier == item->modifier)
Gurchetan Singha1892b22017-09-28 16:40:52 -0700105 combo->use_flags |= item->use_flags;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800106 }
107
108 return 0;
109}
110
111static int i915_add_combinations(struct driver *drv)
112{
113 int ret;
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700114 uint32_t i;
115 struct drv_array *kms_items;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800116 struct format_metadata metadata;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700117 uint64_t render_use_flags, texture_use_flags;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700118
Gurchetan Singha1892b22017-09-28 16:40:52 -0700119 render_use_flags = BO_USE_RENDER_MASK;
120 texture_use_flags = BO_USE_TEXTURE_MASK;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800121
122 metadata.tiling = I915_TILING_NONE;
123 metadata.priority = 1;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700124 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800125
Gurchetan Singhd3001452017-11-03 17:18:36 -0700126 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
127 &metadata, render_use_flags);
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800128
Gurchetan Singhd3001452017-11-03 17:18:36 -0700129 drv_add_combinations(drv, texture_source_formats, ARRAY_SIZE(texture_source_formats),
130 &metadata, texture_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700131
Gurchetan Singhd3001452017-11-03 17:18:36 -0700132 drv_add_combinations(drv, tileable_texture_source_formats,
133 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
134 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800135
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800136 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
137 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800138
Tomasz Figad30c0a52017-07-05 17:50:18 +0900139 /* IPU3 camera ISP supports only NV12 output. */
140 drv_modify_combination(drv, DRM_FORMAT_NV12, &metadata,
Tomasz Figafd0b0162017-07-11 18:28:02 +0900141 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Tomasz Figad30c0a52017-07-05 17:50:18 +0900142 /*
143 * R8 format is used for Android's HAL_PIXEL_FORMAT_BLOB and is used for JPEG snapshots
144 * from camera.
145 */
146 drv_modify_combination(drv, DRM_FORMAT_R8, &metadata,
Tomasz Figafd0b0162017-07-11 18:28:02 +0900147 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Tomasz Figad30c0a52017-07-05 17:50:18 +0900148
Gurchetan Singha1892b22017-09-28 16:40:52 -0700149 render_use_flags &= ~BO_USE_RENDERSCRIPT;
150 render_use_flags &= ~BO_USE_SW_WRITE_OFTEN;
151 render_use_flags &= ~BO_USE_SW_READ_OFTEN;
152 render_use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700153
Gurchetan Singha1892b22017-09-28 16:40:52 -0700154 texture_use_flags &= ~BO_USE_RENDERSCRIPT;
155 texture_use_flags &= ~BO_USE_SW_WRITE_OFTEN;
156 texture_use_flags &= ~BO_USE_SW_READ_OFTEN;
157 texture_use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800158
159 metadata.tiling = I915_TILING_X;
160 metadata.priority = 2;
Tomasz Figae821cc22017-07-08 15:53:11 +0900161 metadata.modifier = I915_FORMAT_MOD_X_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800162
Gurchetan Singhd3001452017-11-03 17:18:36 -0700163 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
164 &metadata, render_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700165
Gurchetan Singhd3001452017-11-03 17:18:36 -0700166 drv_add_combinations(drv, tileable_texture_source_formats,
167 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
168 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800169
170 metadata.tiling = I915_TILING_Y;
171 metadata.priority = 3;
Tomasz Figae821cc22017-07-08 15:53:11 +0900172 metadata.modifier = I915_FORMAT_MOD_Y_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800173
Gurchetan Singhd3001452017-11-03 17:18:36 -0700174 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
175 &metadata, render_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700176
Gurchetan Singhd3001452017-11-03 17:18:36 -0700177 drv_add_combinations(drv, tileable_texture_source_formats,
178 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
179 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800180
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700181 kms_items = drv_query_kms(drv);
182 if (!kms_items)
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800183 return 0;
184
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700185 for (i = 0; i < drv_array_size(kms_items); i++) {
186 ret = i915_add_kms_item(drv, (struct kms_item *)drv_array_at_idx(kms_items, i));
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800187 if (ret) {
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700188 drv_array_destroy(kms_items);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800189 return ret;
190 }
191 }
192
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700193 drv_array_destroy(kms_items);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800194 return 0;
195}
196
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800197static int i915_align_dimensions(struct bo *bo, uint32_t tiling, uint32_t *stride,
198 uint32_t *aligned_height)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700199{
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700200 struct i915_device *i915 = bo->drv->priv;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700201 uint32_t horizontal_alignment;
202 uint32_t vertical_alignment;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700203
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700204 switch (tiling) {
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700205 default:
206 case I915_TILING_NONE:
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700207 /*
208 * The Intel GPU doesn't need any alignment in linear mode,
209 * but libva requires the allocation stride to be aligned to
210 * 16 bytes and height to 4 rows. Further, we round up the
211 * horizontal alignment so that row start on a cache line (64
212 * bytes).
213 */
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700214 horizontal_alignment = 64;
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700215 vertical_alignment = 4;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700216 break;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800217
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700218 case I915_TILING_X:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700219 horizontal_alignment = 512;
220 vertical_alignment = 8;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700221 break;
222
223 case I915_TILING_Y:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700224 if (i915->gen == 3) {
225 horizontal_alignment = 512;
226 vertical_alignment = 8;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800227 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700228 horizontal_alignment = 128;
229 vertical_alignment = 32;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700230 }
231 break;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700232 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800233
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700234 *aligned_height = ALIGN(bo->height, vertical_alignment);
235 if (i915->gen > 3) {
236 *stride = ALIGN(*stride, horizontal_alignment);
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800237 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700238 while (*stride > horizontal_alignment)
239 horizontal_alignment <<= 1;
240
241 *stride = horizontal_alignment;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800242 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800243
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700244 if (i915->gen <= 3 && *stride > 8192)
245 return -EINVAL;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800246
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700247 return 0;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700248}
249
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800250static void i915_clflush(void *start, size_t size)
251{
252 void *p = (void *)(((uintptr_t)start) & ~I915_CACHELINE_MASK);
253 void *end = (void *)((uintptr_t)start + size);
254
255 __builtin_ia32_mfence();
256 while (p < end) {
257 __builtin_ia32_clflush(p);
258 p = (void *)((uintptr_t)p + I915_CACHELINE_SIZE);
259 }
260}
261
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800262static int i915_init(struct driver *drv)
263{
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800264 int ret;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800265 int device_id;
266 struct i915_device *i915;
267 drm_i915_getparam_t get_param;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800268
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800269 i915 = calloc(1, sizeof(*i915));
270 if (!i915)
271 return -ENOMEM;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800272
273 memset(&get_param, 0, sizeof(get_param));
274 get_param.param = I915_PARAM_CHIPSET_ID;
275 get_param.value = &device_id;
276 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
277 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700278 drv_log("Failed to get I915_PARAM_CHIPSET_ID\n");
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800279 free(i915);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800280 return -EINVAL;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800281 }
282
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800283 i915->gen = i915_get_gen(device_id);
284
285 memset(&get_param, 0, sizeof(get_param));
286 get_param.param = I915_PARAM_HAS_LLC;
287 get_param.value = &i915->has_llc;
288 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
289 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700290 drv_log("Failed to get I915_PARAM_HAS_LLC\n");
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800291 free(i915);
292 return -EINVAL;
293 }
294
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800295 drv->priv = i915;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800296
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800297 return i915_add_combinations(drv);
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800298}
299
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700300static int i915_bo_from_format(struct bo *bo, uint32_t width, uint32_t height, uint32_t format)
301{
302 uint32_t offset;
303 size_t plane;
304 int ret;
305
306 offset = 0;
307 for (plane = 0; plane < drv_num_planes_from_format(format); plane++) {
308 uint32_t stride = drv_stride_from_format(format, width, plane);
309 uint32_t plane_height = drv_height_from_format(format, height, plane);
310
311 if (bo->tiling != I915_TILING_NONE)
312 assert(IS_ALIGNED(offset, 4096));
313
314 ret = i915_align_dimensions(bo, bo->tiling, &stride, &plane_height);
315 if (ret)
316 return ret;
317
318 bo->strides[plane] = stride;
319 bo->sizes[plane] = stride * plane_height;
320 bo->offsets[plane] = offset;
321 offset += bo->sizes[plane];
322 }
323
324 bo->total_size = offset;
325
326 return 0;
327}
328
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700329static int i915_bo_create_for_modifier(struct bo *bo, uint32_t width, uint32_t height,
330 uint32_t format, uint64_t modifier)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700331{
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700332 int ret;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800333 size_t plane;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800334 struct drm_i915_gem_create gem_create;
335 struct drm_i915_gem_set_tiling gem_set_tiling;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700336
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700337 switch (modifier) {
338 case DRM_FORMAT_MOD_LINEAR:
339 bo->tiling = I915_TILING_NONE;
340 break;
341 case I915_FORMAT_MOD_X_TILED:
342 bo->tiling = I915_TILING_X;
343 break;
344 case I915_FORMAT_MOD_Y_TILED:
345 bo->tiling = I915_TILING_Y;
346 break;
347 }
Owen Linbbb69fd2017-06-05 14:33:08 +0800348
Kristian H. Kristensen2b8f89e2018-02-07 16:10:06 -0800349 bo->format_modifiers[0] = modifier;
350
Kristian H. Kristensene8778f02018-04-04 14:21:41 -0700351 if (format == DRM_FORMAT_YVU420_ANDROID) {
352 /*
353 * We only need to be able to use this as a linear texture,
354 * which doesn't put any HW restrictions on how we lay it
355 * out. The Android format does require the stride to be a
356 * multiple of 16 and expects the Cr and Cb stride to be
357 * ALIGN(Y_stride / 2, 16), which we can make happen by
358 * aligning to 32 bytes here.
359 */
360 uint32_t stride = ALIGN(width, 32);
361 drv_bo_from_format(bo, stride, height, format);
362 } else {
363 i915_bo_from_format(bo, width, height, format);
364 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800365
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800366 memset(&gem_create, 0, sizeof(gem_create));
367 gem_create.size = bo->total_size;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800368
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800369 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_CREATE, &gem_create);
370 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700371 drv_log("DRM_IOCTL_I915_GEM_CREATE failed (size=%llu)\n", gem_create.size);
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800372 return ret;
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700373 }
Gurchetan Singh83dc4fb2016-07-19 15:52:33 -0700374
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800375 for (plane = 0; plane < bo->num_planes; plane++)
376 bo->handles[plane].u32 = gem_create.handle;
Daniel Nicoara1de26dc2014-09-25 18:53:19 -0400377
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800378 memset(&gem_set_tiling, 0, sizeof(gem_set_tiling));
379 gem_set_tiling.handle = bo->handles[0].u32;
380 gem_set_tiling.tiling_mode = bo->tiling;
381 gem_set_tiling.stride = bo->strides[0];
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700382
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800383 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_TILING, &gem_set_tiling);
384 if (ret) {
385 struct drm_gem_close gem_close;
386 memset(&gem_close, 0, sizeof(gem_close));
387 gem_close.handle = bo->handles[0].u32;
388 drmIoctl(bo->drv->fd, DRM_IOCTL_GEM_CLOSE, &gem_close);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800389
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700390 drv_log("DRM_IOCTL_I915_GEM_SET_TILING failed with %d\n", errno);
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700391 return -errno;
392 }
393
394 return 0;
395}
396
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700397static int i915_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
398 uint64_t use_flags)
399{
400 struct combination *combo;
401
402 combo = drv_get_combination(bo->drv, format, use_flags);
403 if (!combo)
404 return -EINVAL;
405
406 return i915_bo_create_for_modifier(bo, width, height, format, combo->metadata.modifier);
407}
408
409static int i915_bo_create_with_modifiers(struct bo *bo, uint32_t width, uint32_t height,
410 uint32_t format, const uint64_t *modifiers, uint32_t count)
411{
412 static const uint64_t modifier_order[] = {
Gurchetan Singhee43c302017-11-14 18:20:27 -0800413 I915_FORMAT_MOD_Y_TILED,
414 I915_FORMAT_MOD_X_TILED,
415 DRM_FORMAT_MOD_LINEAR,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700416 };
417 uint64_t modifier;
418
419 modifier = drv_pick_modifier(modifiers, count, modifier_order, ARRAY_SIZE(modifier_order));
420
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700421 return i915_bo_create_for_modifier(bo, width, height, format, modifier);
422}
423
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800424static void i915_close(struct driver *drv)
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800425{
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800426 free(drv->priv);
427 drv->priv = NULL;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800428}
429
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800430static int i915_bo_import(struct bo *bo, struct drv_import_fd_data *data)
431{
432 int ret;
433 struct drm_i915_gem_get_tiling gem_get_tiling;
434
435 ret = drv_prime_bo_import(bo, data);
436 if (ret)
437 return ret;
438
439 /* TODO(gsingh): export modifiers and get rid of backdoor tiling. */
440 memset(&gem_get_tiling, 0, sizeof(gem_get_tiling));
441 gem_get_tiling.handle = bo->handles[0].u32;
442
443 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_GET_TILING, &gem_get_tiling);
444 if (ret) {
Joe Kniss9e5d12a2017-06-29 11:54:22 -0700445 drv_gem_bo_destroy(bo);
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700446 drv_log("DRM_IOCTL_I915_GEM_GET_TILING failed.\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800447 return ret;
448 }
449
450 bo->tiling = gem_get_tiling.tiling_mode;
451 return 0;
452}
453
Gurchetan Singhee43c302017-11-14 18:20:27 -0800454static void *i915_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Gurchetan Singhef920532016-08-12 16:38:25 -0700455{
456 int ret;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800457 void *addr;
Gurchetan Singhef920532016-08-12 16:38:25 -0700458
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800459 if (bo->tiling == I915_TILING_NONE) {
460 struct drm_i915_gem_mmap gem_map;
461 memset(&gem_map, 0, sizeof(gem_map));
Gurchetan Singhef920532016-08-12 16:38:25 -0700462
Gurchetan Singha1892b22017-09-28 16:40:52 -0700463 if ((bo->use_flags & BO_USE_SCANOUT) && !(bo->use_flags & BO_USE_RENDERSCRIPT))
Gurchetan Singh5af20232017-09-19 15:10:58 -0700464 gem_map.flags = I915_MMAP_WC;
465
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800466 gem_map.handle = bo->handles[0].u32;
467 gem_map.offset = 0;
468 gem_map.size = bo->total_size;
469
470 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP, &gem_map);
471 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700472 drv_log("DRM_IOCTL_I915_GEM_MMAP failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800473 return MAP_FAILED;
474 }
475
476 addr = (void *)(uintptr_t)gem_map.addr_ptr;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800477 } else {
478 struct drm_i915_gem_mmap_gtt gem_map;
479 memset(&gem_map, 0, sizeof(gem_map));
480
481 gem_map.handle = bo->handles[0].u32;
482
483 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP_GTT, &gem_map);
484 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700485 drv_log("DRM_IOCTL_I915_GEM_MMAP_GTT failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800486 return MAP_FAILED;
487 }
488
Gurchetan Singhcfb88762017-09-28 17:14:50 -0700489 addr = mmap(0, bo->total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
490 gem_map.offset);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800491 }
492
493 if (addr == MAP_FAILED) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700494 drv_log("i915 GEM mmap failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800495 return addr;
496 }
497
Gurchetan Singhee43c302017-11-14 18:20:27 -0800498 vma->length = bo->total_size;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800499 return addr;
500}
Gurchetan Singh1a31e602016-10-06 10:58:00 -0700501
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700502static int i915_bo_invalidate(struct bo *bo, struct mapping *mapping)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700503{
504 int ret;
505 struct drm_i915_gem_set_domain set_domain;
506
507 memset(&set_domain, 0, sizeof(set_domain));
508 set_domain.handle = bo->handles[0].u32;
509 if (bo->tiling == I915_TILING_NONE) {
510 set_domain.read_domains = I915_GEM_DOMAIN_CPU;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700511 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700512 set_domain.write_domain = I915_GEM_DOMAIN_CPU;
513 } else {
514 set_domain.read_domains = I915_GEM_DOMAIN_GTT;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700515 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700516 set_domain.write_domain = I915_GEM_DOMAIN_GTT;
517 }
518
519 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_DOMAIN, &set_domain);
520 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700521 drv_log("DRM_IOCTL_I915_GEM_SET_DOMAIN with %d\n", ret);
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700522 return ret;
523 }
524
525 return 0;
526}
527
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700528static int i915_bo_flush(struct bo *bo, struct mapping *mapping)
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800529{
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800530 struct i915_device *i915 = bo->drv->priv;
531 if (!i915->has_llc && bo->tiling == I915_TILING_NONE)
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700532 i915_clflush(mapping->vma->addr, mapping->vma->length);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800533
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700534 return 0;
Gurchetan Singhef920532016-08-12 16:38:25 -0700535}
536
Gurchetan Singha1892b22017-09-28 16:40:52 -0700537static uint32_t i915_resolve_format(uint32_t format, uint64_t use_flags)
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700538{
539 switch (format) {
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800540 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Tomasz Figad30c0a52017-07-05 17:50:18 +0900541 /* KBL camera subsystem requires NV12. */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700542 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE))
Tomasz Figad30c0a52017-07-05 17:50:18 +0900543 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700544 /*HACK: See b/28671744 */
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800545 return DRM_FORMAT_XBGR8888;
546 case DRM_FORMAT_FLEX_YCbCr_420_888:
Tomasz Figab92e4f82017-06-22 16:52:43 +0900547 /*
548 * KBL camera subsystem requires NV12. Our other use cases
549 * don't care:
550 * - Hardware video supports NV12,
551 * - USB Camera HALv3 supports NV12,
552 * - USB Camera HALv1 doesn't use this format.
553 * Moreover, NV12 is preferred for video, due to overlay
554 * support on SKL+.
555 */
556 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700557 default:
558 return format;
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700559 }
560}
561
Gurchetan Singh3e9d3832017-10-31 10:36:25 -0700562const struct backend backend_i915 = {
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700563 .name = "i915",
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700564 .init = i915_init,
565 .close = i915_close,
566 .bo_create = i915_bo_create,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700567 .bo_create_with_modifiers = i915_bo_create_with_modifiers,
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800568 .bo_destroy = drv_gem_bo_destroy,
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800569 .bo_import = i915_bo_import,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700570 .bo_map = i915_bo_map,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700571 .bo_unmap = drv_bo_munmap,
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700572 .bo_invalidate = i915_bo_invalidate,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700573 .bo_flush = i915_bo_flush,
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700574 .resolve_format = i915_resolve_format,
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700575};
576
577#endif