blob: 74687a05474633ccc84f7b9e887af8c2717006db [file] [log] [blame]
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001/*
2 * Copyright 2017 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Jason Macnak1de7f662020-01-24 15:05:57 -08007#include <assert.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -07008#include <errno.h>
David Stevens0fe561f2020-10-28 16:06:38 +09009#include <stdatomic.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070010#include <stdint.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070011#include <string.h>
12#include <sys/mman.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070013#include <xf86drm.h>
14
15#include "drv_priv.h"
Gurchetan Singh9f3110b2020-04-03 15:15:30 -070016#include "external/virgl_hw.h"
17#include "external/virgl_protocol.h"
18#include "external/virtgpu_drm.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070019#include "helpers.h"
20#include "util.h"
Gurchetan Singh73c141e2021-01-21 14:51:19 -080021#include "virtgpu.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070022
Zach Reizner85c4c5f2017-10-04 13:15:57 -070023#define PIPE_TEXTURE_2D 2
24
Lepton Wu249e8632018-04-05 12:50:03 -070025#define MESA_LLVMPIPE_TILE_ORDER 6
26#define MESA_LLVMPIPE_TILE_SIZE (1 << MESA_LLVMPIPE_TILE_ORDER)
27
Zach Reizner85c4c5f2017-10-04 13:15:57 -070028static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888,
Gurchetan Singh71bc6652018-09-17 17:42:05 -070029 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888,
30 DRM_FORMAT_XRGB8888 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -070031
Jason Macnak1de7f662020-01-24 15:05:57 -080032static const uint32_t dumb_texture_source_formats[] = {
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000033 DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_YVU420,
34 DRM_FORMAT_NV12, DRM_FORMAT_NV21, DRM_FORMAT_YVU420_ANDROID,
35 DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
Jason Macnak1de7f662020-01-24 15:05:57 -080036};
Lepton Wu249e8632018-04-05 12:50:03 -070037
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000038static const uint32_t texture_source_formats[] = {
39 DRM_FORMAT_NV12, DRM_FORMAT_NV21, DRM_FORMAT_R8, DRM_FORMAT_R16,
40 DRM_FORMAT_RG88, DRM_FORMAT_YVU420_ANDROID, DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
41};
Zach Reizner85c4c5f2017-10-04 13:15:57 -070042
Gurchetan Singh73c141e2021-01-21 14:51:19 -080043extern struct virtgpu_param params[];
44
45struct virgl_priv {
Lepton Wueebce652020-02-26 15:13:34 -080046 int caps_is_v2;
Jason Macnakddf4ec02020-02-03 16:36:46 -080047 union virgl_caps caps;
Jason Macnak1de7f662020-01-24 15:05:57 -080048 int host_gbm_enabled;
David Stevens0fe561f2020-10-28 16:06:38 +090049 atomic_int next_blob_id;
Lepton Wu249e8632018-04-05 12:50:03 -070050};
51
Kansho Nishidad97877b2019-06-14 18:28:18 +090052static uint32_t translate_format(uint32_t drm_fourcc)
Zach Reizner85c4c5f2017-10-04 13:15:57 -070053{
54 switch (drm_fourcc) {
Jason Macnak1de7f662020-01-24 15:05:57 -080055 case DRM_FORMAT_BGR888:
56 case DRM_FORMAT_RGB888:
57 return VIRGL_FORMAT_R8G8B8_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070058 case DRM_FORMAT_XRGB8888:
59 return VIRGL_FORMAT_B8G8R8X8_UNORM;
60 case DRM_FORMAT_ARGB8888:
61 return VIRGL_FORMAT_B8G8R8A8_UNORM;
62 case DRM_FORMAT_XBGR8888:
63 return VIRGL_FORMAT_R8G8B8X8_UNORM;
64 case DRM_FORMAT_ABGR8888:
65 return VIRGL_FORMAT_R8G8B8A8_UNORM;
Jason Macnak1de7f662020-01-24 15:05:57 -080066 case DRM_FORMAT_ABGR16161616F:
Lepton Wufef113c2020-10-30 16:29:26 -070067 return VIRGL_FORMAT_R16G16B16A16_FLOAT;
Nataraj Deshpande450e5762021-06-30 12:10:55 -070068 case DRM_FORMAT_ABGR2101010:
69 return VIRGL_FORMAT_R10G10B10A2_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070070 case DRM_FORMAT_RGB565:
71 return VIRGL_FORMAT_B5G6R5_UNORM;
72 case DRM_FORMAT_R8:
73 return VIRGL_FORMAT_R8_UNORM;
Jason Macnak6e200ea2021-02-11 19:34:57 -080074 case DRM_FORMAT_R16:
75 return VIRGL_FORMAT_R16_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070076 case DRM_FORMAT_RG88:
77 return VIRGL_FORMAT_R8G8_UNORM;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -070078 case DRM_FORMAT_NV12:
79 return VIRGL_FORMAT_NV12;
Jason Macnak1de7f662020-01-24 15:05:57 -080080 case DRM_FORMAT_NV21:
81 return VIRGL_FORMAT_NV21;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -070082 case DRM_FORMAT_YVU420:
83 case DRM_FORMAT_YVU420_ANDROID:
84 return VIRGL_FORMAT_YV12;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070085 default:
Jason Macnak6e200ea2021-02-11 19:34:57 -080086 drv_log("Unhandled format:%d\n", drm_fourcc);
Zach Reizner85c4c5f2017-10-04 13:15:57 -070087 return 0;
88 }
89}
90
Gurchetan Singh73c141e2021-01-21 14:51:19 -080091static bool virgl_bitmask_supports_format(struct virgl_supported_format_mask *supported,
92 uint32_t drm_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -080093{
94 uint32_t virgl_format = translate_format(drm_format);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -080095 if (!virgl_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -080096 return false;
Jason Macnakddf4ec02020-02-03 16:36:46 -080097
98 uint32_t bitmask_index = virgl_format / 32;
99 uint32_t bit_index = virgl_format % 32;
100 return supported->bitmask[bitmask_index] & (1 << bit_index);
101}
102
Jason Macnak1de7f662020-01-24 15:05:57 -0800103// The metadata generated here for emulated buffers is slightly different than the metadata
104// generated by drv_bo_from_format. In order to simplify transfers in the flush and invalidate
105// functions below, the emulated buffers are oversized. For example, ignoring stride alignment
106// requirements to demonstrate, a 6x6 YUV420 image buffer might have the following layout from
107// drv_bo_from_format:
108//
109// | Y | Y | Y | Y | Y | Y |
110// | Y | Y | Y | Y | Y | Y |
111// | Y | Y | Y | Y | Y | Y |
112// | Y | Y | Y | Y | Y | Y |
113// | Y | Y | Y | Y | Y | Y |
114// | Y | Y | Y | Y | Y | Y |
115// | U | U | U | U | U | U |
116// | U | U | U | V | V | V |
117// | V | V | V | V | V | V |
118//
119// where each plane immediately follows the previous plane in memory. This layout makes it
120// difficult to compute the transfers needed for example when the middle 2x2 region of the
121// image is locked and needs to be flushed/invalidated.
122//
123// Emulated multi-plane buffers instead have a layout of:
124//
125// | Y | Y | Y | Y | Y | Y |
126// | Y | Y | Y | Y | Y | Y |
127// | Y | Y | Y | Y | Y | Y |
128// | Y | Y | Y | Y | Y | Y |
129// | Y | Y | Y | Y | Y | Y |
130// | Y | Y | Y | Y | Y | Y |
131// | U | U | U | | | |
132// | U | U | U | | | |
133// | U | U | U | | | |
134// | V | V | V | | | |
135// | V | V | V | | | |
136// | V | V | V | | | |
137//
138// where each plane is placed as a sub-image (albeit with a very large stride) in order to
139// simplify transfers into 3 sub-image transfers for the above example.
140//
141// Additional note: the V-plane is not placed to the right of the U-plane due to some
142// observed failures in media framework code which assumes the V-plane is not
143// "row-interlaced" with the U-plane.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800144static void virgl_get_emulated_metadata(const struct bo *bo, struct bo_metadata *metadata)
Jason Macnak1de7f662020-01-24 15:05:57 -0800145{
146 uint32_t y_plane_height;
147 uint32_t c_plane_height;
148 uint32_t original_width = bo->meta.width;
149 uint32_t original_height = bo->meta.height;
150
151 metadata->format = DRM_FORMAT_R8;
152 switch (bo->meta.format) {
153 case DRM_FORMAT_NV12:
154 case DRM_FORMAT_NV21:
155 // Bi-planar
156 metadata->num_planes = 2;
157
158 y_plane_height = original_height;
159 c_plane_height = DIV_ROUND_UP(original_height, 2);
160
161 metadata->width = original_width;
162 metadata->height = y_plane_height + c_plane_height;
163
164 // Y-plane (full resolution)
165 metadata->strides[0] = metadata->width;
166 metadata->offsets[0] = 0;
167 metadata->sizes[0] = metadata->width * y_plane_height;
168
169 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
170 metadata->strides[1] = metadata->width;
171 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
172 metadata->sizes[1] = metadata->width * c_plane_height;
173
174 metadata->total_size = metadata->width * metadata->height;
175 break;
176 case DRM_FORMAT_YVU420:
177 case DRM_FORMAT_YVU420_ANDROID:
178 // Tri-planar
179 metadata->num_planes = 3;
180
181 y_plane_height = original_height;
182 c_plane_height = DIV_ROUND_UP(original_height, 2);
183
184 metadata->width = ALIGN(original_width, 32);
185 metadata->height = y_plane_height + (2 * c_plane_height);
186
187 // Y-plane (full resolution)
188 metadata->strides[0] = metadata->width;
189 metadata->offsets[0] = 0;
190 metadata->sizes[0] = metadata->width * original_height;
191
192 // Cb-plane (half resolution, placed below Y-plane)
193 metadata->strides[1] = metadata->width;
194 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
195 metadata->sizes[1] = metadata->width * c_plane_height;
196
197 // Cr-plane (half resolution, placed below Cb-plane)
198 metadata->strides[2] = metadata->width;
199 metadata->offsets[2] = metadata->offsets[1] + metadata->sizes[1];
200 metadata->sizes[2] = metadata->width * c_plane_height;
201
202 metadata->total_size = metadata->width * metadata->height;
203 break;
204 default:
205 break;
206 }
207}
208
209struct virtio_transfers_params {
210 size_t xfers_needed;
211 struct rectangle xfer_boxes[DRV_MAX_PLANES];
212};
213
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800214static void virgl_get_emulated_transfers_params(const struct bo *bo,
215 const struct rectangle *transfer_box,
216 struct virtio_transfers_params *xfer_params)
Jason Macnak1de7f662020-01-24 15:05:57 -0800217{
218 uint32_t y_plane_height;
219 uint32_t c_plane_height;
220 struct bo_metadata emulated_metadata;
221
222 if (transfer_box->x == 0 && transfer_box->y == 0 && transfer_box->width == bo->meta.width &&
223 transfer_box->height == bo->meta.height) {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800224 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800225
226 xfer_params->xfers_needed = 1;
227 xfer_params->xfer_boxes[0].x = 0;
228 xfer_params->xfer_boxes[0].y = 0;
229 xfer_params->xfer_boxes[0].width = emulated_metadata.width;
230 xfer_params->xfer_boxes[0].height = emulated_metadata.height;
231
232 return;
233 }
234
235 switch (bo->meta.format) {
236 case DRM_FORMAT_NV12:
237 case DRM_FORMAT_NV21:
238 // Bi-planar
239 xfer_params->xfers_needed = 2;
240
241 y_plane_height = bo->meta.height;
242 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
243
244 // Y-plane (full resolution)
245 xfer_params->xfer_boxes[0].x = transfer_box->x;
246 xfer_params->xfer_boxes[0].y = transfer_box->y;
247 xfer_params->xfer_boxes[0].width = transfer_box->width;
248 xfer_params->xfer_boxes[0].height = transfer_box->height;
249
250 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
251 xfer_params->xfer_boxes[1].x = transfer_box->x;
252 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
253 xfer_params->xfer_boxes[1].width = transfer_box->width;
254 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
255
256 break;
257 case DRM_FORMAT_YVU420:
258 case DRM_FORMAT_YVU420_ANDROID:
259 // Tri-planar
260 xfer_params->xfers_needed = 3;
261
262 y_plane_height = bo->meta.height;
263 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
264
265 // Y-plane (full resolution)
266 xfer_params->xfer_boxes[0].x = transfer_box->x;
267 xfer_params->xfer_boxes[0].y = transfer_box->y;
268 xfer_params->xfer_boxes[0].width = transfer_box->width;
269 xfer_params->xfer_boxes[0].height = transfer_box->height;
270
271 // Cb-plane (half resolution, placed below Y-plane)
272 xfer_params->xfer_boxes[1].x = transfer_box->x;
273 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
274 xfer_params->xfer_boxes[1].width = DIV_ROUND_UP(transfer_box->width, 2);
275 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
276
277 // Cr-plane (half resolution, placed below Cb-plane)
278 xfer_params->xfer_boxes[2].x = transfer_box->x;
279 xfer_params->xfer_boxes[2].y = transfer_box->y + y_plane_height + c_plane_height;
280 xfer_params->xfer_boxes[2].width = DIV_ROUND_UP(transfer_box->width, 2);
281 xfer_params->xfer_boxes[2].height = DIV_ROUND_UP(transfer_box->height, 2);
282
283 break;
284 }
285}
286
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800287static bool virgl_supports_combination_natively(struct driver *drv, uint32_t drm_format,
288 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800289{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800290 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800291
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800292 if (priv->caps.max_version == 0)
Jason Macnak1de7f662020-01-24 15:05:57 -0800293 return true;
Jason Macnak1de7f662020-01-24 15:05:57 -0800294
295 if ((use_flags & BO_USE_RENDERING) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800296 !virgl_bitmask_supports_format(&priv->caps.v1.render, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800297 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800298
299 if ((use_flags & BO_USE_TEXTURE) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800300 !virgl_bitmask_supports_format(&priv->caps.v1.sampler, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800301 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800302
303 if ((use_flags & BO_USE_SCANOUT) && priv->caps_is_v2 &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800304 !virgl_bitmask_supports_format(&priv->caps.v2.scanout, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800305 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800306
307 return true;
308}
309
310// For virtio backends that do not support formats natively (e.g. multi-planar formats are not
311// supported in virglrenderer when gbm is unavailable on the host machine), whether or not the
312// format and usage combination can be handled as a blob (byte buffer).
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800313static bool virgl_supports_combination_through_emulation(struct driver *drv, uint32_t drm_format,
314 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800315{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800316 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800317
318 // Only enable emulation on non-gbm virtio backends.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800319 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800320 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800321
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800322 if (use_flags & (BO_USE_RENDERING | BO_USE_SCANOUT))
Jason Macnak1de7f662020-01-24 15:05:57 -0800323 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800324
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800325 if (!virgl_supports_combination_natively(drv, DRM_FORMAT_R8, use_flags))
Jason Macnak1de7f662020-01-24 15:05:57 -0800326 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800327
328 return drm_format == DRM_FORMAT_NV12 || drm_format == DRM_FORMAT_NV21 ||
329 drm_format == DRM_FORMAT_YVU420 || drm_format == DRM_FORMAT_YVU420_ANDROID;
330}
331
Jason Macnakddf4ec02020-02-03 16:36:46 -0800332// Adds the given buffer combination to the list of supported buffer combinations if the
333// combination is supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800334static void virgl_add_combination(struct driver *drv, uint32_t drm_format,
335 struct format_metadata *metadata, uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800336{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800337 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800338
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800339 if (params[param_3d].value && priv->caps.max_version >= 1) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800340 if ((use_flags & BO_USE_SCANOUT) && priv->caps_is_v2 &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800341 !virgl_supports_combination_natively(drv, drm_format, use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800342 drv_log("Scanout format: %d\n", drm_format);
343 use_flags &= ~BO_USE_SCANOUT;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800344 }
345
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800346 if (!virgl_supports_combination_natively(drv, drm_format, use_flags) &&
347 !virgl_supports_combination_through_emulation(drv, drm_format, use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800348 drv_log("Skipping unsupported combination format:%d\n", drm_format);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800349 return;
350 }
351 }
352
353 drv_add_combination(drv, drm_format, metadata, use_flags);
354}
355
356// Adds each given buffer combination to the list of supported buffer combinations if the
357// combination supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800358static void virgl_add_combinations(struct driver *drv, const uint32_t *drm_formats,
359 uint32_t num_formats, struct format_metadata *metadata,
360 uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800361{
362 uint32_t i;
363
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800364 for (i = 0; i < num_formats; i++)
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800365 virgl_add_combination(drv, drm_formats[i], metadata, use_flags);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800366}
367
Lepton Wu249e8632018-04-05 12:50:03 -0700368static int virtio_dumb_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
369 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700370{
Gurchetan Singh298b7572019-09-19 09:55:18 -0700371 if (bo->meta.format != DRM_FORMAT_R8) {
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900372 width = ALIGN(width, MESA_LLVMPIPE_TILE_SIZE);
373 height = ALIGN(height, MESA_LLVMPIPE_TILE_SIZE);
374 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700375
Dominik Behr6e6dc492019-10-09 15:43:52 -0700376 return drv_dumb_bo_create_ex(bo, width, height, format, use_flags, BO_QUIRK_DUMB32BPP);
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700377}
378
Lepton Wudbab0832019-04-19 12:26:39 -0700379static inline void handle_flag(uint64_t *flag, uint64_t check_flag, uint32_t *bind,
380 uint32_t virgl_bind)
381{
382 if ((*flag) & check_flag) {
383 (*flag) &= ~check_flag;
384 (*bind) |= virgl_bind;
385 }
386}
387
David Stevenscf280482020-12-21 11:43:44 +0900388static uint32_t compute_virgl_bind_flags(uint64_t use_flags, uint32_t format)
Lepton Wudbab0832019-04-19 12:26:39 -0700389{
Kansho Nishidad97877b2019-06-14 18:28:18 +0900390 /* In crosvm, VIRGL_BIND_SHARED means minigbm will allocate, not virglrenderer. */
391 uint32_t bind = VIRGL_BIND_SHARED;
Lepton Wudbab0832019-04-19 12:26:39 -0700392
393 handle_flag(&use_flags, BO_USE_TEXTURE, &bind, VIRGL_BIND_SAMPLER_VIEW);
394 handle_flag(&use_flags, BO_USE_RENDERING, &bind, VIRGL_BIND_RENDER_TARGET);
395 handle_flag(&use_flags, BO_USE_SCANOUT, &bind, VIRGL_BIND_SCANOUT);
David Stevens55a6cf92019-09-03 10:45:33 +0900396 handle_flag(&use_flags, BO_USE_CURSOR, &bind, VIRGL_BIND_CURSOR);
397 handle_flag(&use_flags, BO_USE_LINEAR, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000398 handle_flag(&use_flags, BO_USE_GPU_DATA_BUFFER, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangd3a73ff2021-07-08 05:48:01 +0000399 handle_flag(&use_flags, BO_USE_FRONT_RENDERING, &bind, VIRGL_BIND_LINEAR);
David Stevens55a6cf92019-09-03 10:45:33 +0900400
David Stevens23de4e22020-05-15 14:15:35 +0900401 if (use_flags & BO_USE_PROTECTED) {
402 handle_flag(&use_flags, BO_USE_PROTECTED, &bind, VIRGL_BIND_MINIGBM_PROTECTED);
403 } else {
404 // Make sure we don't set both flags, since that could be mistaken for
405 // protected. Give OFTEN priority over RARELY.
406 if (use_flags & BO_USE_SW_READ_OFTEN) {
407 handle_flag(&use_flags, BO_USE_SW_READ_OFTEN, &bind,
408 VIRGL_BIND_MINIGBM_SW_READ_OFTEN);
409 } else {
410 handle_flag(&use_flags, BO_USE_SW_READ_RARELY, &bind,
411 VIRGL_BIND_MINIGBM_SW_READ_RARELY);
412 }
413 if (use_flags & BO_USE_SW_WRITE_OFTEN) {
414 handle_flag(&use_flags, BO_USE_SW_WRITE_OFTEN, &bind,
415 VIRGL_BIND_MINIGBM_SW_WRITE_OFTEN);
416 } else {
417 handle_flag(&use_flags, BO_USE_SW_WRITE_RARELY, &bind,
418 VIRGL_BIND_MINIGBM_SW_WRITE_RARELY);
419 }
420 }
David Stevens55a6cf92019-09-03 10:45:33 +0900421
David Stevens23de4e22020-05-15 14:15:35 +0900422 handle_flag(&use_flags, BO_USE_CAMERA_WRITE, &bind, VIRGL_BIND_MINIGBM_CAMERA_WRITE);
423 handle_flag(&use_flags, BO_USE_CAMERA_READ, &bind, VIRGL_BIND_MINIGBM_CAMERA_READ);
424 handle_flag(&use_flags, BO_USE_HW_VIDEO_DECODER, &bind,
425 VIRGL_BIND_MINIGBM_HW_VIDEO_DECODER);
426 handle_flag(&use_flags, BO_USE_HW_VIDEO_ENCODER, &bind,
427 VIRGL_BIND_MINIGBM_HW_VIDEO_ENCODER);
David Stevens55a6cf92019-09-03 10:45:33 +0900428
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800429 if (use_flags)
Lepton Wudbab0832019-04-19 12:26:39 -0700430 drv_log("Unhandled bo use flag: %llx\n", (unsigned long long)use_flags);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900431
Lepton Wudbab0832019-04-19 12:26:39 -0700432 return bind;
433}
434
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800435static int virgl_3d_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
436 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700437{
438 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800439 size_t i;
Kansho Nishidad97877b2019-06-14 18:28:18 +0900440 uint32_t stride;
Gurchetan Singh99644382020-10-07 15:28:11 -0700441 struct drm_virtgpu_resource_create res_create = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800442 struct bo_metadata emulated_metadata;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700443
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800444 if (virgl_supports_combination_natively(bo->drv, format, use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800445 stride = drv_stride_from_format(format, width, 0);
446 drv_bo_from_format(bo, stride, height, format);
447 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800448 assert(virgl_supports_combination_through_emulation(bo->drv, format, use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800449
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800450 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800451
452 format = emulated_metadata.format;
453 width = emulated_metadata.width;
454 height = emulated_metadata.height;
455 for (i = 0; i < emulated_metadata.num_planes; i++) {
456 bo->meta.strides[i] = emulated_metadata.strides[i];
457 bo->meta.offsets[i] = emulated_metadata.offsets[i];
458 bo->meta.sizes[i] = emulated_metadata.sizes[i];
459 }
460 bo->meta.total_size = emulated_metadata.total_size;
461 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700462
Kansho Nishidad97877b2019-06-14 18:28:18 +0900463 /*
464 * Setting the target is intended to ensure this resource gets bound as a 2D
465 * texture in the host renderer's GL state. All of these resource properties are
466 * sent unchanged by the kernel to the host, which in turn sends them unchanged to
467 * virglrenderer. When virglrenderer makes a resource, it will convert the target
468 * enum to the equivalent one in GL and then bind the resource to that target.
469 */
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700470
Kansho Nishidad97877b2019-06-14 18:28:18 +0900471 res_create.target = PIPE_TEXTURE_2D;
472 res_create.format = translate_format(format);
David Stevenscf280482020-12-21 11:43:44 +0900473 res_create.bind = compute_virgl_bind_flags(use_flags, format);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900474 res_create.width = width;
475 res_create.height = height;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700476
Kansho Nishidad97877b2019-06-14 18:28:18 +0900477 /* For virgl 3D */
478 res_create.depth = 1;
479 res_create.array_size = 1;
480 res_create.last_level = 0;
481 res_create.nr_samples = 0;
482
Gurchetan Singh298b7572019-09-19 09:55:18 -0700483 res_create.size = ALIGN(bo->meta.total_size, PAGE_SIZE); // PAGE_SIZE = 0x1000
Kansho Nishidad97877b2019-06-14 18:28:18 +0900484 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE, &res_create);
485 if (ret) {
486 drv_log("DRM_IOCTL_VIRTGPU_RESOURCE_CREATE failed with %s\n", strerror(errno));
487 return ret;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700488 }
489
Gurchetan Singh298b7572019-09-19 09:55:18 -0700490 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
Kansho Nishidad97877b2019-06-14 18:28:18 +0900491 bo->handles[plane].u32 = res_create.bo_handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700492
493 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700494}
495
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800496static void *virgl_3d_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700497{
498 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700499 struct drm_virtgpu_map gem_map = { 0 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700500
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700501 gem_map.handle = bo->handles[0].u32;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700502 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_MAP, &gem_map);
503 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700504 drv_log("DRM_IOCTL_VIRTGPU_MAP failed with %s\n", strerror(errno));
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700505 return MAP_FAILED;
506 }
507
Gurchetan Singh298b7572019-09-19 09:55:18 -0700508 vma->length = bo->meta.total_size;
509 return mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700510 gem_map.offset);
511}
512
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800513static int virgl_get_caps(struct driver *drv, union virgl_caps *caps, int *caps_is_v2)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800514{
515 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700516 struct drm_virtgpu_get_caps cap_args = { 0 };
Jason Macnakddf4ec02020-02-03 16:36:46 -0800517
Lepton Wueebce652020-02-26 15:13:34 -0800518 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800519 cap_args.addr = (unsigned long long)caps;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800520 if (params[param_capset_fix].value) {
Lepton Wueebce652020-02-26 15:13:34 -0800521 *caps_is_v2 = 1;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800522 cap_args.cap_set_id = 2;
523 cap_args.size = sizeof(union virgl_caps);
524 } else {
525 cap_args.cap_set_id = 1;
526 cap_args.size = sizeof(struct virgl_caps_v1);
527 }
528
529 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
530 if (ret) {
531 drv_log("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Lepton Wueebce652020-02-26 15:13:34 -0800532 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800533
534 // Fallback to v1
535 cap_args.cap_set_id = 1;
536 cap_args.size = sizeof(struct virgl_caps_v1);
537
538 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800539 if (ret)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800540 drv_log("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Jason Macnakddf4ec02020-02-03 16:36:46 -0800541 }
542
543 return ret;
544}
545
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800546static void virgl_init_params_and_caps(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700547{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800548 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
549 if (params[param_3d].value) {
550 virgl_get_caps(drv, &priv->caps, &priv->caps_is_v2);
Lepton Wu249e8632018-04-05 12:50:03 -0700551
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800552 // We use two criteria to determine whether host minigbm is used on the host for
553 // swapchain allocations.
554 //
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800555 // (1) Host minigbm is only available via virglrenderer, and only virglrenderer
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800556 // advertises capabilities.
557 // (2) Only host minigbm doesn't emulate YUV formats. Checking this is a bit of a
558 // proxy, but it works.
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800559 priv->host_gbm_enabled =
560 priv->caps.max_version > 0 &&
561 virgl_supports_combination_natively(drv, DRM_FORMAT_NV12, BO_USE_TEXTURE);
Lepton Wu249e8632018-04-05 12:50:03 -0700562 }
Jason Macnak1de7f662020-01-24 15:05:57 -0800563}
564
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800565static int virgl_init(struct driver *drv)
Jason Macnak1de7f662020-01-24 15:05:57 -0800566{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800567 struct virgl_priv *priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800568
569 priv = calloc(1, sizeof(*priv));
570 drv->priv = priv;
571
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800572 virgl_init_params_and_caps(drv);
Jason Macnak1de7f662020-01-24 15:05:57 -0800573
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800574 if (params[param_3d].value) {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700575 /* This doesn't mean host can scanout everything, it just means host
576 * hypervisor can show it. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800577 virgl_add_combinations(drv, render_target_formats,
578 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
579 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
580 virgl_add_combinations(drv, texture_source_formats,
581 ARRAY_SIZE(texture_source_formats), &LINEAR_METADATA,
582 BO_USE_TEXTURE_MASK);
Yiwei Zhang9fa17e72021-09-17 22:11:29 +0000583 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
584 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
585 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER |
586 BO_USE_SCANOUT);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700587 } else {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700588 /* Virtio primary plane only allows this format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800589 virgl_add_combination(drv, DRM_FORMAT_XRGB8888, &LINEAR_METADATA,
590 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700591 /* Virtio cursor plane only allows this format and Chrome cannot live without
592 * ARGB888 renderable format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800593 virgl_add_combination(drv, DRM_FORMAT_ARGB8888, &LINEAR_METADATA,
594 BO_USE_RENDER_MASK | BO_USE_CURSOR);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700595 /* Android needs more, but they cannot be bound as scanouts anymore after
596 * "drm/virtio: fix DRM_FORMAT_* handling" */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800597 virgl_add_combinations(drv, render_target_formats,
598 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
599 BO_USE_RENDER_MASK);
600 virgl_add_combinations(drv, dumb_texture_source_formats,
601 ARRAY_SIZE(dumb_texture_source_formats), &LINEAR_METADATA,
602 BO_USE_TEXTURE_MASK);
Yiwei Zhang9fa17e72021-09-17 22:11:29 +0000603 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
604 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
605 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700606 }
Lepton Wu249e8632018-04-05 12:50:03 -0700607
Gurchetan Singh71bc6652018-09-17 17:42:05 -0700608 /* Android CTS tests require this. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800609 virgl_add_combination(drv, DRM_FORMAT_RGB888, &LINEAR_METADATA, BO_USE_SW_MASK);
610 virgl_add_combination(drv, DRM_FORMAT_BGR888, &LINEAR_METADATA, BO_USE_SW_MASK);
Jason Macnak2ce35772021-06-08 06:45:45 -0700611 virgl_add_combination(drv, DRM_FORMAT_P010, &LINEAR_METADATA,
612 BO_USE_SW_MASK | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900613 drv_modify_combination(drv, DRM_FORMAT_R8, &LINEAR_METADATA,
David Staessens04b7e242020-05-28 15:47:15 +0900614 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER |
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000615 BO_USE_HW_VIDEO_ENCODER | BO_USE_GPU_DATA_BUFFER);
David Stevens519978f2020-12-11 14:09:56 +0900616
617 if (!priv->host_gbm_enabled) {
618 drv_modify_combination(drv, DRM_FORMAT_ABGR8888, &LINEAR_METADATA,
619 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
620 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
621 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &LINEAR_METADATA,
622 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
623 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
624 drv_modify_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA,
625 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
626 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
627 drv_modify_combination(drv, DRM_FORMAT_R16, &LINEAR_METADATA,
628 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
629 BO_USE_HW_VIDEO_DECODER);
630 drv_modify_combination(drv, DRM_FORMAT_YVU420, &LINEAR_METADATA,
631 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
632 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
633 drv_modify_combination(drv, DRM_FORMAT_YVU420_ANDROID, &LINEAR_METADATA,
634 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
635 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
636 }
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900637
Lepton Wu249e8632018-04-05 12:50:03 -0700638 return drv_modify_linear_combinations(drv);
639}
640
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800641static void virgl_close(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700642{
643 free(drv->priv);
644 drv->priv = NULL;
645}
646
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800647static int virgl_bo_create_blob(struct driver *drv, struct bo *bo)
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700648{
649 int ret;
650 uint32_t stride;
David Stevens0fe561f2020-10-28 16:06:38 +0900651 uint32_t cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700652 uint32_t cmd[VIRGL_PIPE_RES_CREATE_SIZE + 1] = { 0 };
653 struct drm_virtgpu_resource_create_blob drm_rc_blob = { 0 };
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800654 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700655
David Stevensd3f07bd2020-09-25 18:52:26 +0900656 uint32_t blob_flags = VIRTGPU_BLOB_FLAG_USE_SHAREABLE;
657 if (bo->meta.use_flags & BO_USE_SW_MASK)
658 blob_flags |= VIRTGPU_BLOB_FLAG_USE_MAPPABLE;
David Stevens1b252e22021-08-03 16:48:17 +0900659
660 // For now, all blob use cases are cross device. When we add wider
661 // support for blobs, we can revisit making this unconditional.
662 blob_flags |= VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE;
David Stevensb42624c2020-09-10 10:50:26 +0900663
David Stevens0fe561f2020-10-28 16:06:38 +0900664 cur_blob_id = atomic_fetch_add(&priv->next_blob_id, 1);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700665 stride = drv_stride_from_format(bo->meta.format, bo->meta.width, 0);
666 drv_bo_from_format(bo, stride, bo->meta.height, bo->meta.format);
667 bo->meta.total_size = ALIGN(bo->meta.total_size, PAGE_SIZE);
David Stevensb42624c2020-09-10 10:50:26 +0900668 bo->meta.tiling = blob_flags;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700669
670 cmd[0] = VIRGL_CMD0(VIRGL_CCMD_PIPE_RESOURCE_CREATE, 0, VIRGL_PIPE_RES_CREATE_SIZE);
671 cmd[VIRGL_PIPE_RES_CREATE_TARGET] = PIPE_TEXTURE_2D;
672 cmd[VIRGL_PIPE_RES_CREATE_WIDTH] = bo->meta.width;
673 cmd[VIRGL_PIPE_RES_CREATE_HEIGHT] = bo->meta.height;
674 cmd[VIRGL_PIPE_RES_CREATE_FORMAT] = translate_format(bo->meta.format);
David Stevenscf280482020-12-21 11:43:44 +0900675 cmd[VIRGL_PIPE_RES_CREATE_BIND] =
676 compute_virgl_bind_flags(bo->meta.use_flags, bo->meta.format);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700677 cmd[VIRGL_PIPE_RES_CREATE_DEPTH] = 1;
David Stevens0fe561f2020-10-28 16:06:38 +0900678 cmd[VIRGL_PIPE_RES_CREATE_BLOB_ID] = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700679
680 drm_rc_blob.cmd = (uint64_t)&cmd;
681 drm_rc_blob.cmd_size = 4 * (VIRGL_PIPE_RES_CREATE_SIZE + 1);
682 drm_rc_blob.size = bo->meta.total_size;
683 drm_rc_blob.blob_mem = VIRTGPU_BLOB_MEM_HOST3D;
David Stevensb42624c2020-09-10 10:50:26 +0900684 drm_rc_blob.blob_flags = blob_flags;
David Stevens0fe561f2020-10-28 16:06:38 +0900685 drm_rc_blob.blob_id = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700686
687 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &drm_rc_blob);
688 if (ret < 0) {
689 drv_log("DRM_VIRTGPU_RESOURCE_CREATE_BLOB failed with %s\n", strerror(errno));
690 return -errno;
691 }
692
693 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
694 bo->handles[plane].u32 = drm_rc_blob.bo_handle;
695
696 return 0;
697}
698
699static bool should_use_blob(struct driver *drv, uint32_t format, uint64_t use_flags)
700{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800701 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700702
703 // TODO(gurchetansingh): remove once all minigbm users are blob-safe
704#ifndef VIRTIO_GPU_NEXT
705 return false;
706#endif
707
708 // Only use blob when host gbm is available
709 if (!priv->host_gbm_enabled)
710 return false;
711
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000712 // Use regular resources if only the GPU needs efficient access. Blob resource is a better
713 // fit for BO_USE_GPU_DATA_BUFFER which is mapped to VIRGL_BIND_LINEAR.
714 if (!(use_flags & (BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN | BO_USE_LINEAR |
715 BO_USE_NON_GPU_HW | BO_USE_GPU_DATA_BUFFER)))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700716 return false;
717
David Stevensd3f07bd2020-09-25 18:52:26 +0900718 switch (format) {
David Stevensd3f07bd2020-09-25 18:52:26 +0900719 case DRM_FORMAT_R8:
720 // Formats with strictly defined strides are supported
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700721 return true;
David Stevensc6df2b22021-08-10 19:02:09 +0900722 case DRM_FORMAT_YVU420_ANDROID:
David Stevensd3f07bd2020-09-25 18:52:26 +0900723 case DRM_FORMAT_NV12:
724 // Knowing buffer metadata at buffer creation isn't yet supported, so buffers
725 // can't be properly mapped into the guest.
726 return (use_flags & BO_USE_SW_MASK) == 0;
727 default:
728 return false;
729 }
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700730}
731
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800732static int virgl_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
733 uint64_t use_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700734{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800735 if (params[param_resource_blob].value && params[param_host_visible].value &&
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700736 should_use_blob(bo->drv, format, use_flags))
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800737 return virgl_bo_create_blob(bo->drv, bo);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700738
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800739 if (params[param_3d].value)
740 return virgl_3d_bo_create(bo, width, height, format, use_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700741 else
742 return virtio_dumb_bo_create(bo, width, height, format, use_flags);
743}
744
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800745static int virgl_bo_destroy(struct bo *bo)
Lepton Wu249e8632018-04-05 12:50:03 -0700746{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800747 if (params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700748 return drv_gem_bo_destroy(bo);
749 else
750 return drv_dumb_bo_destroy(bo);
751}
752
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800753static void *virgl_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700754{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800755 if (params[param_3d].value)
756 return virgl_3d_bo_map(bo, vma, plane, map_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700757 else
758 return drv_dumb_bo_map(bo, vma, plane, map_flags);
759}
760
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800761static int virgl_bo_invalidate(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700762{
763 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800764 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -0700765 struct drm_virtgpu_3d_transfer_from_host xfer = { 0 };
766 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800767 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800768 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
David Stevens9fe8c202020-12-21 18:47:55 +0900769 uint64_t host_write_flags;
Lepton Wu249e8632018-04-05 12:50:03 -0700770
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800771 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700772 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700773
David Stevens9fe8c202020-12-21 18:47:55 +0900774 // Invalidate is only necessary if the host writes to the buffer. The encoder and
775 // decoder flags don't differentiate between input and output buffers, but we can
776 // use the format to determine whether this buffer could be encoder/decoder output.
777 host_write_flags = BO_USE_RENDERING | BO_USE_CAMERA_WRITE;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800778 if (bo->meta.format == DRM_FORMAT_R8)
David Stevens9fe8c202020-12-21 18:47:55 +0900779 host_write_flags |= BO_USE_HW_VIDEO_ENCODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800780 else
David Stevens9fe8c202020-12-21 18:47:55 +0900781 host_write_flags |= BO_USE_HW_VIDEO_DECODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800782
David Stevens9fe8c202020-12-21 18:47:55 +0900783 if ((bo->meta.use_flags & host_write_flags) == 0)
David Stevens4d5358d2019-10-24 14:59:31 +0900784 return 0;
785
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800786 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700787 return 0;
788
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700789 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700790
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700791 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700792 /*
793 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
794 * images
795 */
796 if (bo->meta.num_planes == 1) {
797 xfer.offset =
798 (bo->meta.strides[0] * mapping->rect.y) +
799 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
800 }
801 }
802
David Stevensbaab6c82020-02-26 17:14:43 +0900803 if ((bo->meta.use_flags & BO_USE_RENDERING) == 0) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800804 // Unfortunately, the kernel doesn't actually pass the guest layer_stride
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800805 // and guest stride to the host (compare virgl.h and virtgpu_drm.h).
Jason Macnak1de7f662020-01-24 15:05:57 -0800806 // For gbm based resources, we can work around this by using the level field
807 // to pass the stride to virglrenderer's gbm transfer code. However, we need
808 // to avoid doing this for resources which don't rely on that transfer code,
809 // which is resources with the BO_USE_RENDERING flag set.
David Stevensbaab6c82020-02-26 17:14:43 +0900810 // TODO(b/145993887): Send also stride when the patches are landed
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800811 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800812 xfer.level = bo->meta.strides[0];
David Stevensbaab6c82020-02-26 17:14:43 +0900813 }
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700814
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800815 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800816 xfer_params.xfers_needed = 1;
817 xfer_params.xfer_boxes[0] = mapping->rect;
818 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800819 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
820 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800821
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800822 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -0800823 }
824
825 for (i = 0; i < xfer_params.xfers_needed; i++) {
826 xfer.box.x = xfer_params.xfer_boxes[i].x;
827 xfer.box.y = xfer_params.xfer_boxes[i].y;
828 xfer.box.w = xfer_params.xfer_boxes[i].width;
829 xfer.box.h = xfer_params.xfer_boxes[i].height;
830 xfer.box.d = 1;
831
832 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST, &xfer);
833 if (ret) {
834 drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST failed with %s\n",
835 strerror(errno));
836 return -errno;
837 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700838 }
839
David Stevens4d5358d2019-10-24 14:59:31 +0900840 // The transfer needs to complete before invalidate returns so that any host changes
841 // are visible and to ensure the host doesn't overwrite subsequent guest changes.
842 // TODO(b/136733358): Support returning fences from transfers
David Stevens4d5358d2019-10-24 14:59:31 +0900843 waitcmd.handle = mapping->vma->handle;
844 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
845 if (ret) {
846 drv_log("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
847 return -errno;
848 }
849
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700850 return 0;
851}
852
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800853static int virgl_bo_flush(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700854{
855 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800856 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -0700857 struct drm_virtgpu_3d_transfer_to_host xfer = { 0 };
858 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800859 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800860 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
Lepton Wu249e8632018-04-05 12:50:03 -0700861
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800862 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700863 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700864
865 if (!(mapping->vma->map_flags & BO_MAP_WRITE))
866 return 0;
867
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800868 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700869 return 0;
870
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700871 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700872
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700873 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700874 /*
875 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
876 * images
877 */
878 if (bo->meta.num_planes == 1) {
879 xfer.offset =
880 (bo->meta.strides[0] * mapping->rect.y) +
881 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
882 }
883 }
884
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700885 // Unfortunately, the kernel doesn't actually pass the guest layer_stride and
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800886 // guest stride to the host (compare virgl.h and virtgpu_drm.h). We can use
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700887 // the level to work around this.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800888 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800889 xfer.level = bo->meta.strides[0];
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700890
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800891 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800892 xfer_params.xfers_needed = 1;
893 xfer_params.xfer_boxes[0] = mapping->rect;
894 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800895 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
896 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800897
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800898 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -0800899 }
900
901 for (i = 0; i < xfer_params.xfers_needed; i++) {
902 xfer.box.x = xfer_params.xfer_boxes[i].x;
903 xfer.box.y = xfer_params.xfer_boxes[i].y;
904 xfer.box.w = xfer_params.xfer_boxes[i].width;
905 xfer.box.h = xfer_params.xfer_boxes[i].height;
906 xfer.box.d = 1;
907
908 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST, &xfer);
909 if (ret) {
910 drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST failed with %s\n",
911 strerror(errno));
912 return -errno;
913 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700914 }
915
David Stevensbaab6c82020-02-26 17:14:43 +0900916 // If the buffer is only accessed by the host GPU, then the flush is ordered
917 // with subsequent commands. However, if other host hardware can access the
918 // buffer, we need to wait for the transfer to complete for consistency.
919 // TODO(b/136733358): Support returning fences from transfers
920 if (bo->meta.use_flags & BO_USE_NON_GPU_HW) {
David Stevensbaab6c82020-02-26 17:14:43 +0900921 waitcmd.handle = mapping->vma->handle;
922
923 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
924 if (ret) {
925 drv_log("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
926 return -errno;
927 }
928 }
929
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700930 return 0;
931}
932
Yiwei Zhang9f390d92021-09-21 20:42:29 +0000933static uint32_t virgl_resolve_format(uint32_t format, uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700934{
935 switch (format) {
936 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900937 /* Camera subsystem requires NV12. */
938 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE))
939 return DRM_FORMAT_NV12;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700940 /*HACK: See b/28671744 */
941 return DRM_FORMAT_XBGR8888;
Lepton Wu249e8632018-04-05 12:50:03 -0700942 case DRM_FORMAT_FLEX_YCbCr_420_888:
Gurchetan Singhf5d280d2019-06-04 19:43:41 -0700943 /*
944 * All of our host drivers prefer NV12 as their flexible media format.
945 * If that changes, this will need to be modified.
946 */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800947 if (params[param_3d].value)
Gurchetan Singhf5d280d2019-06-04 19:43:41 -0700948 return DRM_FORMAT_NV12;
949 else
Jason Macnak1de7f662020-01-24 15:05:57 -0800950 return DRM_FORMAT_YVU420_ANDROID;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700951 default:
952 return format;
953 }
954}
Yiwei Zhangc1413ea2021-09-17 08:20:21 +0000955
956static uint64_t virgl_resolve_use_flags(uint32_t format, uint64_t use_flags)
957{
958 if (format == DRM_FORMAT_YVU420_ANDROID) {
959 use_flags &= ~BO_USE_SCANOUT;
960 /*
961 * HACK: See b/172389166. This is for HAL_PIXEL_FORMAT_YV12 buffers allocated by
962 * arcvm. None of our platforms can display YV12, so we can treat as a SW buffer.
963 * Remove once this can be intelligently resolved in the guest. Also see
964 * gbm_bo_create.
965 */
966 use_flags |= BO_USE_LINEAR;
967 return use_flags;
968 }
969
970 if (!params[param_3d].value && format != DRM_FORMAT_XRGB8888)
971 return use_flags & ~BO_USE_SCANOUT;
972
973 return use_flags;
974}
975
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800976static int virgl_resource_info(struct bo *bo, uint32_t strides[DRV_MAX_PLANES],
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +0000977 uint32_t offsets[DRV_MAX_PLANES], uint64_t *format_modifier)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -0700978{
979 int ret;
Chia-I Wu2e41f632021-01-11 11:08:21 -0800980 struct drm_virtgpu_resource_info_cros res_info = { 0 };
Gurchetan Singhbc4f0232019-06-27 20:05:54 -0700981
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800982 if (!params[param_3d].value)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -0700983 return 0;
984
Gurchetan Singhbc4f0232019-06-27 20:05:54 -0700985 res_info.bo_handle = bo->handles[0].u32;
Chia-I Wu50855622021-01-12 12:38:09 -0800986 res_info.type = VIRTGPU_RESOURCE_INFO_TYPE_EXTENDED;
Chia-I Wu2e41f632021-01-11 11:08:21 -0800987 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_INFO_CROS, &res_info);
Gurchetan Singhbc4f0232019-06-27 20:05:54 -0700988 if (ret) {
989 drv_log("DRM_IOCTL_VIRTGPU_RESOURCE_INFO failed with %s\n", strerror(errno));
990 return ret;
991 }
992
Yiwei Zhangf58616e2021-08-26 05:54:15 +0000993 for (uint32_t plane = 0; plane < DRV_MAX_PLANES; plane++) {
Gurchetan Singhbc4f0232019-06-27 20:05:54 -0700994 /*
995 * Currently, kernel v4.14 (Betty) doesn't have the extended resource info
996 * ioctl.
997 */
Yiwei Zhangf58616e2021-08-26 05:54:15 +0000998 if (!res_info.strides[plane])
999 break;
1000
1001 strides[plane] = res_info.strides[plane];
1002 offsets[plane] = res_info.offsets[plane];
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001003 }
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +00001004 *format_modifier = res_info.format_modifier;
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001005
1006 return 0;
1007}
1008
Gurchetan Singhbbde01e2021-02-17 08:54:28 -08001009const struct backend virtgpu_virgl = { .name = "virtgpu_virgl",
1010 .init = virgl_init,
1011 .close = virgl_close,
1012 .bo_create = virgl_bo_create,
1013 .bo_destroy = virgl_bo_destroy,
1014 .bo_import = drv_prime_bo_import,
1015 .bo_map = virgl_bo_map,
1016 .bo_unmap = drv_bo_munmap,
1017 .bo_invalidate = virgl_bo_invalidate,
1018 .bo_flush = virgl_bo_flush,
1019 .resolve_format = virgl_resolve_format,
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001020 .resolve_use_flags = virgl_resolve_use_flags,
Gurchetan Singhbbde01e2021-02-17 08:54:28 -08001021 .resource_info = virgl_resource_info };