blob: fc877cb80dba7c09963bd9845f0d4d8b136c011f [file] [log] [blame]
Stéphane Marchesin25a26062014-09-12 16:18:59 -07001/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2014 The Chromium OS Authors. All rights reserved.
Stéphane Marchesin25a26062014-09-12 16:18:59 -07003 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Gurchetan Singh46faf6b2016-08-05 14:40:07 -07007#ifdef DRV_I915
Stéphane Marchesin25a26062014-09-12 16:18:59 -07008
9#include <errno.h>
Gurchetan Singh82a8eed2017-01-03 13:01:37 -080010#include <i915_drm.h>
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070011#include <stdbool.h>
Gurchetan Singhcc015e82017-01-17 16:15:25 -080012#include <stdio.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070013#include <string.h>
Gurchetan Singhef920532016-08-12 16:38:25 -070014#include <sys/mman.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070015#include <xf86drm.h>
Stéphane Marchesin25a26062014-09-12 16:18:59 -070016
Gurchetan Singh46faf6b2016-08-05 14:40:07 -070017#include "drv_priv.h"
Stéphane Marchesin25a26062014-09-12 16:18:59 -070018#include "helpers.h"
19#include "util.h"
20
Gurchetan Singh68af9c22017-01-18 13:48:11 -080021#define I915_CACHELINE_SIZE 64
22#define I915_CACHELINE_MASK (I915_CACHELINE_SIZE - 1)
23
Gurchetan Singh29ed8d22017-10-31 10:39:43 -070024static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB1555,
25 DRM_FORMAT_ARGB8888, DRM_FORMAT_RGB565,
Daniele Castagna7a2df902017-10-18 16:15:44 -040026 DRM_FORMAT_XBGR2101010, DRM_FORMAT_XBGR8888,
Gurchetan Singh29ed8d22017-10-31 10:39:43 -070027 DRM_FORMAT_XRGB1555, DRM_FORMAT_XRGB2101010,
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070028 DRM_FORMAT_XRGB8888 };
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080029
Tomasz Figab92e4f82017-06-22 16:52:43 +090030static const uint32_t tileable_texture_source_formats[] = { DRM_FORMAT_GR88, DRM_FORMAT_R8,
31 DRM_FORMAT_UYVY, DRM_FORMAT_YUYV };
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -070032
Tomasz Figab92e4f82017-06-22 16:52:43 +090033static const uint32_t texture_source_formats[] = { DRM_FORMAT_YVU420, DRM_FORMAT_YVU420_ANDROID,
34 DRM_FORMAT_NV12 };
Gurchetan Singh179687e2016-10-28 10:07:35 -070035
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080036struct i915_device {
Gurchetan Singh68af9c22017-01-18 13:48:11 -080037 uint32_t gen;
38 int32_t has_llc;
Stéphane Marchesin25a26062014-09-12 16:18:59 -070039};
40
Gurchetan Singh68af9c22017-01-18 13:48:11 -080041static uint32_t i915_get_gen(int device_id)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070042{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080043 const uint16_t gen3_ids[] = { 0x2582, 0x2592, 0x2772, 0x27A2, 0x27AE,
44 0x29C2, 0x29B2, 0x29D2, 0xA001, 0xA011 };
Stéphane Marchesina39dfde2014-09-15 15:38:25 -070045 unsigned i;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080046 for (i = 0; i < ARRAY_SIZE(gen3_ids); i++)
Stéphane Marchesin25a26062014-09-12 16:18:59 -070047 if (gen3_ids[i] == device_id)
48 return 3;
49
50 return 4;
51}
52
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070053/*
54 * We allow allocation of ARGB formats for SCANOUT if the corresponding XRGB
55 * formats supports it. It's up to the caller (chrome ozone) to ultimately not
56 * scan out ARGB if the display controller only supports XRGB, but we'll allow
57 * the allocation of the bo here.
58 */
59static bool format_compatible(const struct combination *combo, uint32_t format)
60{
61 if (combo->format == format)
62 return true;
63
64 switch (format) {
65 case DRM_FORMAT_XRGB8888:
66 return combo->format == DRM_FORMAT_ARGB8888;
67 case DRM_FORMAT_XBGR8888:
68 return combo->format == DRM_FORMAT_ABGR8888;
69 case DRM_FORMAT_RGBX8888:
70 return combo->format == DRM_FORMAT_RGBA8888;
71 case DRM_FORMAT_BGRX8888:
72 return combo->format == DRM_FORMAT_BGRA8888;
73 default:
74 return false;
75 }
76}
77
Gurchetan Singh6b41fb52017-03-01 20:14:39 -080078static int i915_add_kms_item(struct driver *drv, const struct kms_item *item)
79{
80 uint32_t i;
81 struct combination *combo;
82
83 /*
84 * Older hardware can't scanout Y-tiled formats. Newer devices can, and
85 * report this functionality via format modifiers.
86 */
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -070087 for (i = 0; i < drv_array_size(drv->combos); i++) {
88 combo = (struct combination *)drv_array_at_idx(drv->combos, i);
Kristian H. Kristensen9c3fb322018-04-11 15:55:13 -070089 if (!format_compatible(combo, item->format))
Tomasz Figae821cc22017-07-08 15:53:11 +090090 continue;
91
Gurchetan Singhd118a0e2018-01-12 23:31:50 +000092 if (item->modifier == DRM_FORMAT_MOD_LINEAR &&
Tomasz Figae821cc22017-07-08 15:53:11 +090093 combo->metadata.tiling == I915_TILING_X) {
94 /*
95 * FIXME: drv_query_kms() does not report the available modifiers
96 * yet, but we know that all hardware can scanout from X-tiled
97 * buffers, so let's add this to our combinations, except for
98 * cursor, which must not be tiled.
99 */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700100 combo->use_flags |= item->use_flags & ~BO_USE_CURSOR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800101 }
Tomasz Figae821cc22017-07-08 15:53:11 +0900102
103 if (combo->metadata.modifier == item->modifier)
Gurchetan Singha1892b22017-09-28 16:40:52 -0700104 combo->use_flags |= item->use_flags;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800105 }
106
107 return 0;
108}
109
110static int i915_add_combinations(struct driver *drv)
111{
112 int ret;
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700113 uint32_t i;
114 struct drv_array *kms_items;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800115 struct format_metadata metadata;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700116 uint64_t render_use_flags, texture_use_flags;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700117
Gurchetan Singha1892b22017-09-28 16:40:52 -0700118 render_use_flags = BO_USE_RENDER_MASK;
119 texture_use_flags = BO_USE_TEXTURE_MASK;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800120
121 metadata.tiling = I915_TILING_NONE;
122 metadata.priority = 1;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700123 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800124
Gurchetan Singhd3001452017-11-03 17:18:36 -0700125 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
126 &metadata, render_use_flags);
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800127
Gurchetan Singhd3001452017-11-03 17:18:36 -0700128 drv_add_combinations(drv, texture_source_formats, ARRAY_SIZE(texture_source_formats),
129 &metadata, texture_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700130
Gurchetan Singhd3001452017-11-03 17:18:36 -0700131 drv_add_combinations(drv, tileable_texture_source_formats,
132 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
133 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800134
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800135 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
136 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800137
Tomasz Figad30c0a52017-07-05 17:50:18 +0900138 /* IPU3 camera ISP supports only NV12 output. */
139 drv_modify_combination(drv, DRM_FORMAT_NV12, &metadata,
Tomasz Figafd0b0162017-07-11 18:28:02 +0900140 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Tomasz Figad30c0a52017-07-05 17:50:18 +0900141 /*
142 * R8 format is used for Android's HAL_PIXEL_FORMAT_BLOB and is used for JPEG snapshots
143 * from camera.
144 */
145 drv_modify_combination(drv, DRM_FORMAT_R8, &metadata,
Tomasz Figafd0b0162017-07-11 18:28:02 +0900146 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Tomasz Figad30c0a52017-07-05 17:50:18 +0900147
Gurchetan Singha1892b22017-09-28 16:40:52 -0700148 render_use_flags &= ~BO_USE_RENDERSCRIPT;
149 render_use_flags &= ~BO_USE_SW_WRITE_OFTEN;
150 render_use_flags &= ~BO_USE_SW_READ_OFTEN;
151 render_use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700152
Gurchetan Singha1892b22017-09-28 16:40:52 -0700153 texture_use_flags &= ~BO_USE_RENDERSCRIPT;
154 texture_use_flags &= ~BO_USE_SW_WRITE_OFTEN;
155 texture_use_flags &= ~BO_USE_SW_READ_OFTEN;
156 texture_use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800157
158 metadata.tiling = I915_TILING_X;
159 metadata.priority = 2;
Tomasz Figae821cc22017-07-08 15:53:11 +0900160 metadata.modifier = I915_FORMAT_MOD_X_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800161
Gurchetan Singhd3001452017-11-03 17:18:36 -0700162 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
163 &metadata, render_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700164
Gurchetan Singhd3001452017-11-03 17:18:36 -0700165 drv_add_combinations(drv, tileable_texture_source_formats,
166 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
167 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800168
169 metadata.tiling = I915_TILING_Y;
170 metadata.priority = 3;
Tomasz Figae821cc22017-07-08 15:53:11 +0900171 metadata.modifier = I915_FORMAT_MOD_Y_TILED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800172
Gurchetan Singhd3001452017-11-03 17:18:36 -0700173 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
174 &metadata, render_use_flags);
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700175
Gurchetan Singhd3001452017-11-03 17:18:36 -0700176 drv_add_combinations(drv, tileable_texture_source_formats,
177 ARRAY_SIZE(tileable_texture_source_formats), &metadata,
178 texture_use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800179
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700180 kms_items = drv_query_kms(drv);
181 if (!kms_items)
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800182 return 0;
183
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700184 for (i = 0; i < drv_array_size(kms_items); i++) {
185 ret = i915_add_kms_item(drv, (struct kms_item *)drv_array_at_idx(kms_items, i));
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800186 if (ret) {
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700187 drv_array_destroy(kms_items);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800188 return ret;
189 }
190 }
191
Gurchetan Singhbc9a87d2017-11-03 17:17:35 -0700192 drv_array_destroy(kms_items);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800193 return 0;
194}
195
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800196static int i915_align_dimensions(struct bo *bo, uint32_t tiling, uint32_t *stride,
197 uint32_t *aligned_height)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700198{
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700199 struct i915_device *i915 = bo->drv->priv;
200 uint32_t horizontal_alignment = 4;
201 uint32_t vertical_alignment = 4;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700202
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700203 switch (tiling) {
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700204 default:
205 case I915_TILING_NONE:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700206 horizontal_alignment = 64;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700207 break;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800208
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700209 case I915_TILING_X:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700210 horizontal_alignment = 512;
211 vertical_alignment = 8;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700212 break;
213
214 case I915_TILING_Y:
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700215 if (i915->gen == 3) {
216 horizontal_alignment = 512;
217 vertical_alignment = 8;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800218 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700219 horizontal_alignment = 128;
220 vertical_alignment = 32;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700221 }
222 break;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700223 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800224
Tomasz Figa33615a52017-07-29 15:37:58 +0900225 /*
226 * The alignment calculated above is based on the full size luma plane and to have chroma
227 * planes properly aligned with subsampled formats, we need to multiply luma alignment by
228 * subsampling factor.
229 */
230 switch (bo->format) {
231 case DRM_FORMAT_YVU420_ANDROID:
232 case DRM_FORMAT_YVU420:
233 horizontal_alignment *= 2;
Gurchetan Singh7dcdff12017-09-14 13:04:11 -0700234 /* Fall through */
Tomasz Figa33615a52017-07-29 15:37:58 +0900235 case DRM_FORMAT_NV12:
236 vertical_alignment *= 2;
237 break;
238 }
239
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700240 *aligned_height = ALIGN(bo->height, vertical_alignment);
241 if (i915->gen > 3) {
242 *stride = ALIGN(*stride, horizontal_alignment);
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800243 } else {
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700244 while (*stride > horizontal_alignment)
245 horizontal_alignment <<= 1;
246
247 *stride = horizontal_alignment;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800248 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800249
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700250 if (i915->gen <= 3 && *stride > 8192)
251 return -EINVAL;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800252
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700253 return 0;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700254}
255
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800256static void i915_clflush(void *start, size_t size)
257{
258 void *p = (void *)(((uintptr_t)start) & ~I915_CACHELINE_MASK);
259 void *end = (void *)((uintptr_t)start + size);
260
261 __builtin_ia32_mfence();
262 while (p < end) {
263 __builtin_ia32_clflush(p);
264 p = (void *)((uintptr_t)p + I915_CACHELINE_SIZE);
265 }
266}
267
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800268static int i915_init(struct driver *drv)
269{
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800270 int ret;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800271 int device_id;
272 struct i915_device *i915;
273 drm_i915_getparam_t get_param;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800274
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800275 i915 = calloc(1, sizeof(*i915));
276 if (!i915)
277 return -ENOMEM;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800278
279 memset(&get_param, 0, sizeof(get_param));
280 get_param.param = I915_PARAM_CHIPSET_ID;
281 get_param.value = &device_id;
282 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
283 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700284 drv_log("Failed to get I915_PARAM_CHIPSET_ID\n");
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800285 free(i915);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800286 return -EINVAL;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800287 }
288
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800289 i915->gen = i915_get_gen(device_id);
290
291 memset(&get_param, 0, sizeof(get_param));
292 get_param.param = I915_PARAM_HAS_LLC;
293 get_param.value = &i915->has_llc;
294 ret = drmIoctl(drv->fd, DRM_IOCTL_I915_GETPARAM, &get_param);
295 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700296 drv_log("Failed to get I915_PARAM_HAS_LLC\n");
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800297 free(i915);
298 return -EINVAL;
299 }
300
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800301 drv->priv = i915;
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800302
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800303 return i915_add_combinations(drv);
Gurchetan Singh3eb8d8f2017-01-03 13:36:13 -0800304}
305
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700306static int i915_bo_create_for_modifier(struct bo *bo, uint32_t width, uint32_t height,
307 uint32_t format, uint64_t modifier)
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700308{
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700309 int ret;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800310 size_t plane;
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700311 uint32_t stride;
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800312 struct drm_i915_gem_create gem_create;
313 struct drm_i915_gem_set_tiling gem_set_tiling;
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700314
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700315 switch (modifier) {
316 case DRM_FORMAT_MOD_LINEAR:
317 bo->tiling = I915_TILING_NONE;
318 break;
319 case I915_FORMAT_MOD_X_TILED:
320 bo->tiling = I915_TILING_X;
321 break;
322 case I915_FORMAT_MOD_Y_TILED:
323 bo->tiling = I915_TILING_Y;
324 break;
325 }
Owen Linbbb69fd2017-06-05 14:33:08 +0800326
Kristian H. Kristensen2b8f89e2018-02-07 16:10:06 -0800327 bo->format_modifiers[0] = modifier;
328
Owen Linbbb69fd2017-06-05 14:33:08 +0800329 stride = drv_stride_from_format(format, width, 0);
Gurchetan Singh507f5dd2017-03-16 13:14:30 -0700330
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800331 ret = i915_align_dimensions(bo, bo->tiling, &stride, &height);
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700332 if (ret)
333 return ret;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800334
Owen Linbbb69fd2017-06-05 14:33:08 +0800335 /*
Tomasz Figad846de62017-07-29 15:47:54 +0900336 * HAL_PIXEL_FORMAT_YV12 requires the buffer height not be aligned, but we need to keep
337 * total size as with aligned height to ensure enough padding space after each plane to
338 * satisfy GPU alignment requirements.
339 *
340 * We do it by first calling drv_bo_from_format() with aligned height and
341 * DRM_FORMAT_YVU420, which allows height alignment, saving the total size it calculates
342 * and then calling it again with requested parameters.
343 *
344 * This relies on the fact that i965 driver uses separate surfaces for each plane and
345 * contents of padding bytes is not affected, as it is only used to satisfy GPU cache
346 * requests.
347 *
348 * This is enforced by Mesa in src/intel/isl/isl_gen8.c, inside
349 * isl_gen8_choose_image_alignment_el(), which is used for GEN9 and GEN8.
Owen Linbbb69fd2017-06-05 14:33:08 +0800350 */
Tomasz Figad846de62017-07-29 15:47:54 +0900351 if (format == DRM_FORMAT_YVU420_ANDROID) {
352 uint32_t unaligned_height = bo->height;
353 size_t total_size;
Owen Linbbb69fd2017-06-05 14:33:08 +0800354
Tomasz Figad846de62017-07-29 15:47:54 +0900355 drv_bo_from_format(bo, stride, height, DRM_FORMAT_YVU420);
356 total_size = bo->total_size;
357 drv_bo_from_format(bo, stride, unaligned_height, format);
358 bo->total_size = total_size;
359 } else {
360 drv_bo_from_format(bo, stride, height, format);
361 }
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800362
Tomasz Figa581f3a52017-07-23 15:02:19 +0900363 /*
364 * Quoting Mesa ISL library:
365 *
366 * - For linear surfaces, additional padding of 64 bytes is required at
367 * the bottom of the surface. This is in addition to the padding
368 * required above.
369 */
370 if (bo->tiling == I915_TILING_NONE)
371 bo->total_size += 64;
372
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800373 memset(&gem_create, 0, sizeof(gem_create));
374 gem_create.size = bo->total_size;
Stéphane Marchesin5d867a42014-11-24 17:09:49 -0800375
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800376 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_CREATE, &gem_create);
377 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700378 drv_log("DRM_IOCTL_I915_GEM_CREATE failed (size=%llu)\n", gem_create.size);
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800379 return ret;
Ilja H. Friedelf9d2ab72015-04-09 14:08:36 -0700380 }
Gurchetan Singh83dc4fb2016-07-19 15:52:33 -0700381
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800382 for (plane = 0; plane < bo->num_planes; plane++)
383 bo->handles[plane].u32 = gem_create.handle;
Daniel Nicoara1de26dc2014-09-25 18:53:19 -0400384
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800385 memset(&gem_set_tiling, 0, sizeof(gem_set_tiling));
386 gem_set_tiling.handle = bo->handles[0].u32;
387 gem_set_tiling.tiling_mode = bo->tiling;
388 gem_set_tiling.stride = bo->strides[0];
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700389
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800390 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_TILING, &gem_set_tiling);
391 if (ret) {
392 struct drm_gem_close gem_close;
393 memset(&gem_close, 0, sizeof(gem_close));
394 gem_close.handle = bo->handles[0].u32;
395 drmIoctl(bo->drv->fd, DRM_IOCTL_GEM_CLOSE, &gem_close);
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800396
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700397 drv_log("DRM_IOCTL_I915_GEM_SET_TILING failed with %d\n", errno);
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700398 return -errno;
399 }
400
401 return 0;
402}
403
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700404static int i915_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
405 uint64_t use_flags)
406{
407 struct combination *combo;
408
409 combo = drv_get_combination(bo->drv, format, use_flags);
410 if (!combo)
411 return -EINVAL;
412
413 return i915_bo_create_for_modifier(bo, width, height, format, combo->metadata.modifier);
414}
415
416static int i915_bo_create_with_modifiers(struct bo *bo, uint32_t width, uint32_t height,
417 uint32_t format, const uint64_t *modifiers, uint32_t count)
418{
419 static const uint64_t modifier_order[] = {
Gurchetan Singhee43c302017-11-14 18:20:27 -0800420 I915_FORMAT_MOD_Y_TILED,
421 I915_FORMAT_MOD_X_TILED,
422 DRM_FORMAT_MOD_LINEAR,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700423 };
424 uint64_t modifier;
425
426 modifier = drv_pick_modifier(modifiers, count, modifier_order, ARRAY_SIZE(modifier_order));
427
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700428 return i915_bo_create_for_modifier(bo, width, height, format, modifier);
429}
430
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800431static void i915_close(struct driver *drv)
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800432{
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800433 free(drv->priv);
434 drv->priv = NULL;
Gurchetan Singh82a8eed2017-01-03 13:01:37 -0800435}
436
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800437static int i915_bo_import(struct bo *bo, struct drv_import_fd_data *data)
438{
439 int ret;
440 struct drm_i915_gem_get_tiling gem_get_tiling;
441
442 ret = drv_prime_bo_import(bo, data);
443 if (ret)
444 return ret;
445
446 /* TODO(gsingh): export modifiers and get rid of backdoor tiling. */
447 memset(&gem_get_tiling, 0, sizeof(gem_get_tiling));
448 gem_get_tiling.handle = bo->handles[0].u32;
449
450 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_GET_TILING, &gem_get_tiling);
451 if (ret) {
Joe Kniss9e5d12a2017-06-29 11:54:22 -0700452 drv_gem_bo_destroy(bo);
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700453 drv_log("DRM_IOCTL_I915_GEM_GET_TILING failed.\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800454 return ret;
455 }
456
457 bo->tiling = gem_get_tiling.tiling_mode;
458 return 0;
459}
460
Gurchetan Singhee43c302017-11-14 18:20:27 -0800461static void *i915_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Gurchetan Singhef920532016-08-12 16:38:25 -0700462{
463 int ret;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800464 void *addr;
Gurchetan Singhef920532016-08-12 16:38:25 -0700465
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800466 if (bo->tiling == I915_TILING_NONE) {
467 struct drm_i915_gem_mmap gem_map;
468 memset(&gem_map, 0, sizeof(gem_map));
Gurchetan Singhef920532016-08-12 16:38:25 -0700469
Gurchetan Singha1892b22017-09-28 16:40:52 -0700470 if ((bo->use_flags & BO_USE_SCANOUT) && !(bo->use_flags & BO_USE_RENDERSCRIPT))
Gurchetan Singh5af20232017-09-19 15:10:58 -0700471 gem_map.flags = I915_MMAP_WC;
472
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800473 gem_map.handle = bo->handles[0].u32;
474 gem_map.offset = 0;
475 gem_map.size = bo->total_size;
476
477 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP, &gem_map);
478 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700479 drv_log("DRM_IOCTL_I915_GEM_MMAP failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800480 return MAP_FAILED;
481 }
482
483 addr = (void *)(uintptr_t)gem_map.addr_ptr;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800484 } else {
485 struct drm_i915_gem_mmap_gtt gem_map;
486 memset(&gem_map, 0, sizeof(gem_map));
487
488 gem_map.handle = bo->handles[0].u32;
489
490 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_MMAP_GTT, &gem_map);
491 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700492 drv_log("DRM_IOCTL_I915_GEM_MMAP_GTT failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800493 return MAP_FAILED;
494 }
495
Gurchetan Singhcfb88762017-09-28 17:14:50 -0700496 addr = mmap(0, bo->total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
497 gem_map.offset);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800498 }
499
500 if (addr == MAP_FAILED) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700501 drv_log("i915 GEM mmap failed\n");
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800502 return addr;
503 }
504
Gurchetan Singhee43c302017-11-14 18:20:27 -0800505 vma->length = bo->total_size;
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800506 return addr;
507}
Gurchetan Singh1a31e602016-10-06 10:58:00 -0700508
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700509static int i915_bo_invalidate(struct bo *bo, struct mapping *mapping)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700510{
511 int ret;
512 struct drm_i915_gem_set_domain set_domain;
513
514 memset(&set_domain, 0, sizeof(set_domain));
515 set_domain.handle = bo->handles[0].u32;
516 if (bo->tiling == I915_TILING_NONE) {
517 set_domain.read_domains = I915_GEM_DOMAIN_CPU;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700518 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700519 set_domain.write_domain = I915_GEM_DOMAIN_CPU;
520 } else {
521 set_domain.read_domains = I915_GEM_DOMAIN_GTT;
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700522 if (mapping->vma->map_flags & BO_MAP_WRITE)
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700523 set_domain.write_domain = I915_GEM_DOMAIN_GTT;
524 }
525
526 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_I915_GEM_SET_DOMAIN, &set_domain);
527 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700528 drv_log("DRM_IOCTL_I915_GEM_SET_DOMAIN with %d\n", ret);
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700529 return ret;
530 }
531
532 return 0;
533}
534
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700535static int i915_bo_flush(struct bo *bo, struct mapping *mapping)
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800536{
Gurchetan Singh68af9c22017-01-18 13:48:11 -0800537 struct i915_device *i915 = bo->drv->priv;
538 if (!i915->has_llc && bo->tiling == I915_TILING_NONE)
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700539 i915_clflush(mapping->vma->addr, mapping->vma->length);
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800540
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700541 return 0;
Gurchetan Singhef920532016-08-12 16:38:25 -0700542}
543
Gurchetan Singha1892b22017-09-28 16:40:52 -0700544static uint32_t i915_resolve_format(uint32_t format, uint64_t use_flags)
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700545{
546 switch (format) {
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800547 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Tomasz Figad30c0a52017-07-05 17:50:18 +0900548 /* KBL camera subsystem requires NV12. */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700549 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE))
Tomasz Figad30c0a52017-07-05 17:50:18 +0900550 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700551 /*HACK: See b/28671744 */
Gurchetan Singhf3b22da2016-11-21 10:46:38 -0800552 return DRM_FORMAT_XBGR8888;
553 case DRM_FORMAT_FLEX_YCbCr_420_888:
Tomasz Figab92e4f82017-06-22 16:52:43 +0900554 /*
555 * KBL camera subsystem requires NV12. Our other use cases
556 * don't care:
557 * - Hardware video supports NV12,
558 * - USB Camera HALv3 supports NV12,
559 * - USB Camera HALv1 doesn't use this format.
560 * Moreover, NV12 is preferred for video, due to overlay
561 * support on SKL+.
562 */
563 return DRM_FORMAT_NV12;
Gurchetan Singhd6fb5772016-08-29 19:13:51 -0700564 default:
565 return format;
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700566 }
567}
568
Gurchetan Singh3e9d3832017-10-31 10:36:25 -0700569const struct backend backend_i915 = {
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700570 .name = "i915",
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700571 .init = i915_init,
572 .close = i915_close,
573 .bo_create = i915_bo_create,
Kristian H. Kristensen6061eab2017-10-03 13:53:19 -0700574 .bo_create_with_modifiers = i915_bo_create_with_modifiers,
Gurchetan Singhcc015e82017-01-17 16:15:25 -0800575 .bo_destroy = drv_gem_bo_destroy,
Gurchetan Singhfcad5ad2017-01-05 20:39:31 -0800576 .bo_import = i915_bo_import,
Gurchetan Singhd7c84fd2016-08-16 18:18:24 -0700577 .bo_map = i915_bo_map,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700578 .bo_unmap = drv_bo_munmap,
Gurchetan Singh2d1877f2017-10-10 14:12:46 -0700579 .bo_invalidate = i915_bo_invalidate,
Gurchetan Singh8e02e052017-09-14 14:18:43 -0700580 .bo_flush = i915_bo_flush,
Gurchetan Singhbfba8c22016-08-16 17:57:10 -0700581 .resolve_format = i915_resolve_format,
Stéphane Marchesin25a26062014-09-12 16:18:59 -0700582};
583
584#endif