blob: 82a39d91b17c287cf5ad5a8ac12c3423118c4606 [file] [log] [blame]
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001/*
2 * Copyright 2017 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Jason Macnak1de7f662020-01-24 15:05:57 -08007#include <assert.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -07008#include <errno.h>
David Stevens0fe561f2020-10-28 16:06:38 +09009#include <stdatomic.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070010#include <stdint.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070011#include <string.h>
12#include <sys/mman.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070013#include <xf86drm.h>
14
15#include "drv_priv.h"
Gurchetan Singh9f3110b2020-04-03 15:15:30 -070016#include "external/virgl_hw.h"
17#include "external/virgl_protocol.h"
18#include "external/virtgpu_drm.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070019#include "helpers.h"
20#include "util.h"
Gurchetan Singh73c141e2021-01-21 14:51:19 -080021#include "virtgpu.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070022
Zach Reizner85c4c5f2017-10-04 13:15:57 -070023#define PIPE_TEXTURE_2D 2
24
Jason Macnakd6666c82021-09-29 11:13:25 -070025#define MESA_LLVMPIPE_MAX_TEXTURE_2D_LEVELS 15
26#define MESA_LLVMPIPE_MAX_TEXTURE_2D_SIZE (1 << (MESA_LLVMPIPE_MAX_TEXTURE_2D_LEVELS - 1))
Lepton Wu249e8632018-04-05 12:50:03 -070027#define MESA_LLVMPIPE_TILE_ORDER 6
28#define MESA_LLVMPIPE_TILE_SIZE (1 << MESA_LLVMPIPE_TILE_ORDER)
29
Zach Reizner85c4c5f2017-10-04 13:15:57 -070030static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888,
Gurchetan Singh71bc6652018-09-17 17:42:05 -070031 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888,
32 DRM_FORMAT_XRGB8888 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -070033
Jason Macnak1de7f662020-01-24 15:05:57 -080034static const uint32_t dumb_texture_source_formats[] = {
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000035 DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_YVU420,
36 DRM_FORMAT_NV12, DRM_FORMAT_NV21, DRM_FORMAT_YVU420_ANDROID,
37 DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
Jason Macnak1de7f662020-01-24 15:05:57 -080038};
Lepton Wu249e8632018-04-05 12:50:03 -070039
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000040static const uint32_t texture_source_formats[] = {
Yiwei Zhang9420ffe2021-09-24 06:24:30 +000041 DRM_FORMAT_NV21, DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_RG88,
42 DRM_FORMAT_YVU420_ANDROID, DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000043};
Zach Reizner85c4c5f2017-10-04 13:15:57 -070044
Gurchetan Singh73c141e2021-01-21 14:51:19 -080045extern struct virtgpu_param params[];
46
47struct virgl_priv {
Lepton Wueebce652020-02-26 15:13:34 -080048 int caps_is_v2;
Jason Macnakddf4ec02020-02-03 16:36:46 -080049 union virgl_caps caps;
Jason Macnak1de7f662020-01-24 15:05:57 -080050 int host_gbm_enabled;
David Stevens0fe561f2020-10-28 16:06:38 +090051 atomic_int next_blob_id;
Lepton Wu249e8632018-04-05 12:50:03 -070052};
53
Kansho Nishidad97877b2019-06-14 18:28:18 +090054static uint32_t translate_format(uint32_t drm_fourcc)
Zach Reizner85c4c5f2017-10-04 13:15:57 -070055{
56 switch (drm_fourcc) {
Jason Macnak1de7f662020-01-24 15:05:57 -080057 case DRM_FORMAT_BGR888:
58 case DRM_FORMAT_RGB888:
59 return VIRGL_FORMAT_R8G8B8_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070060 case DRM_FORMAT_XRGB8888:
61 return VIRGL_FORMAT_B8G8R8X8_UNORM;
62 case DRM_FORMAT_ARGB8888:
63 return VIRGL_FORMAT_B8G8R8A8_UNORM;
64 case DRM_FORMAT_XBGR8888:
65 return VIRGL_FORMAT_R8G8B8X8_UNORM;
66 case DRM_FORMAT_ABGR8888:
67 return VIRGL_FORMAT_R8G8B8A8_UNORM;
Jason Macnak1de7f662020-01-24 15:05:57 -080068 case DRM_FORMAT_ABGR16161616F:
Lepton Wufef113c2020-10-30 16:29:26 -070069 return VIRGL_FORMAT_R16G16B16A16_FLOAT;
Nataraj Deshpande450e5762021-06-30 12:10:55 -070070 case DRM_FORMAT_ABGR2101010:
71 return VIRGL_FORMAT_R10G10B10A2_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070072 case DRM_FORMAT_RGB565:
73 return VIRGL_FORMAT_B5G6R5_UNORM;
74 case DRM_FORMAT_R8:
75 return VIRGL_FORMAT_R8_UNORM;
Jason Macnak6e200ea2021-02-11 19:34:57 -080076 case DRM_FORMAT_R16:
77 return VIRGL_FORMAT_R16_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070078 case DRM_FORMAT_RG88:
79 return VIRGL_FORMAT_R8G8_UNORM;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -070080 case DRM_FORMAT_NV12:
81 return VIRGL_FORMAT_NV12;
Jason Macnak1de7f662020-01-24 15:05:57 -080082 case DRM_FORMAT_NV21:
83 return VIRGL_FORMAT_NV21;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -070084 case DRM_FORMAT_YVU420:
85 case DRM_FORMAT_YVU420_ANDROID:
86 return VIRGL_FORMAT_YV12;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070087 default:
Jason Macnak6e200ea2021-02-11 19:34:57 -080088 drv_log("Unhandled format:%d\n", drm_fourcc);
Zach Reizner85c4c5f2017-10-04 13:15:57 -070089 return 0;
90 }
91}
92
Gurchetan Singh73c141e2021-01-21 14:51:19 -080093static bool virgl_bitmask_supports_format(struct virgl_supported_format_mask *supported,
94 uint32_t drm_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -080095{
96 uint32_t virgl_format = translate_format(drm_format);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -080097 if (!virgl_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -080098 return false;
Jason Macnakddf4ec02020-02-03 16:36:46 -080099
100 uint32_t bitmask_index = virgl_format / 32;
101 uint32_t bit_index = virgl_format % 32;
102 return supported->bitmask[bitmask_index] & (1 << bit_index);
103}
104
Jason Macnak1de7f662020-01-24 15:05:57 -0800105// The metadata generated here for emulated buffers is slightly different than the metadata
106// generated by drv_bo_from_format. In order to simplify transfers in the flush and invalidate
107// functions below, the emulated buffers are oversized. For example, ignoring stride alignment
108// requirements to demonstrate, a 6x6 YUV420 image buffer might have the following layout from
109// drv_bo_from_format:
110//
111// | Y | Y | Y | Y | Y | Y |
112// | Y | Y | Y | Y | Y | Y |
113// | Y | Y | Y | Y | Y | Y |
114// | Y | Y | Y | Y | Y | Y |
115// | Y | Y | Y | Y | Y | Y |
116// | Y | Y | Y | Y | Y | Y |
117// | U | U | U | U | U | U |
118// | U | U | U | V | V | V |
119// | V | V | V | V | V | V |
120//
121// where each plane immediately follows the previous plane in memory. This layout makes it
122// difficult to compute the transfers needed for example when the middle 2x2 region of the
123// image is locked and needs to be flushed/invalidated.
124//
125// Emulated multi-plane buffers instead have a layout of:
126//
127// | Y | Y | Y | Y | Y | Y |
128// | Y | Y | Y | Y | Y | Y |
129// | Y | Y | Y | Y | Y | Y |
130// | Y | Y | Y | Y | Y | Y |
131// | Y | Y | Y | Y | Y | Y |
132// | Y | Y | Y | Y | Y | Y |
133// | U | U | U | | | |
134// | U | U | U | | | |
135// | U | U | U | | | |
136// | V | V | V | | | |
137// | V | V | V | | | |
138// | V | V | V | | | |
139//
140// where each plane is placed as a sub-image (albeit with a very large stride) in order to
141// simplify transfers into 3 sub-image transfers for the above example.
142//
143// Additional note: the V-plane is not placed to the right of the U-plane due to some
144// observed failures in media framework code which assumes the V-plane is not
145// "row-interlaced" with the U-plane.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800146static void virgl_get_emulated_metadata(const struct bo *bo, struct bo_metadata *metadata)
Jason Macnak1de7f662020-01-24 15:05:57 -0800147{
148 uint32_t y_plane_height;
149 uint32_t c_plane_height;
150 uint32_t original_width = bo->meta.width;
151 uint32_t original_height = bo->meta.height;
152
153 metadata->format = DRM_FORMAT_R8;
154 switch (bo->meta.format) {
155 case DRM_FORMAT_NV12:
156 case DRM_FORMAT_NV21:
157 // Bi-planar
158 metadata->num_planes = 2;
159
160 y_plane_height = original_height;
161 c_plane_height = DIV_ROUND_UP(original_height, 2);
162
163 metadata->width = original_width;
164 metadata->height = y_plane_height + c_plane_height;
165
166 // Y-plane (full resolution)
167 metadata->strides[0] = metadata->width;
168 metadata->offsets[0] = 0;
169 metadata->sizes[0] = metadata->width * y_plane_height;
170
171 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
172 metadata->strides[1] = metadata->width;
173 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
174 metadata->sizes[1] = metadata->width * c_plane_height;
175
176 metadata->total_size = metadata->width * metadata->height;
177 break;
178 case DRM_FORMAT_YVU420:
179 case DRM_FORMAT_YVU420_ANDROID:
180 // Tri-planar
181 metadata->num_planes = 3;
182
183 y_plane_height = original_height;
184 c_plane_height = DIV_ROUND_UP(original_height, 2);
185
186 metadata->width = ALIGN(original_width, 32);
187 metadata->height = y_plane_height + (2 * c_plane_height);
188
189 // Y-plane (full resolution)
190 metadata->strides[0] = metadata->width;
191 metadata->offsets[0] = 0;
192 metadata->sizes[0] = metadata->width * original_height;
193
194 // Cb-plane (half resolution, placed below Y-plane)
195 metadata->strides[1] = metadata->width;
196 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
197 metadata->sizes[1] = metadata->width * c_plane_height;
198
199 // Cr-plane (half resolution, placed below Cb-plane)
200 metadata->strides[2] = metadata->width;
201 metadata->offsets[2] = metadata->offsets[1] + metadata->sizes[1];
202 metadata->sizes[2] = metadata->width * c_plane_height;
203
204 metadata->total_size = metadata->width * metadata->height;
205 break;
206 default:
207 break;
208 }
209}
210
211struct virtio_transfers_params {
212 size_t xfers_needed;
213 struct rectangle xfer_boxes[DRV_MAX_PLANES];
214};
215
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800216static void virgl_get_emulated_transfers_params(const struct bo *bo,
217 const struct rectangle *transfer_box,
218 struct virtio_transfers_params *xfer_params)
Jason Macnak1de7f662020-01-24 15:05:57 -0800219{
220 uint32_t y_plane_height;
221 uint32_t c_plane_height;
222 struct bo_metadata emulated_metadata;
223
224 if (transfer_box->x == 0 && transfer_box->y == 0 && transfer_box->width == bo->meta.width &&
225 transfer_box->height == bo->meta.height) {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800226 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800227
228 xfer_params->xfers_needed = 1;
229 xfer_params->xfer_boxes[0].x = 0;
230 xfer_params->xfer_boxes[0].y = 0;
231 xfer_params->xfer_boxes[0].width = emulated_metadata.width;
232 xfer_params->xfer_boxes[0].height = emulated_metadata.height;
233
234 return;
235 }
236
237 switch (bo->meta.format) {
238 case DRM_FORMAT_NV12:
239 case DRM_FORMAT_NV21:
240 // Bi-planar
241 xfer_params->xfers_needed = 2;
242
243 y_plane_height = bo->meta.height;
244 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
245
246 // Y-plane (full resolution)
247 xfer_params->xfer_boxes[0].x = transfer_box->x;
248 xfer_params->xfer_boxes[0].y = transfer_box->y;
249 xfer_params->xfer_boxes[0].width = transfer_box->width;
250 xfer_params->xfer_boxes[0].height = transfer_box->height;
251
252 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
253 xfer_params->xfer_boxes[1].x = transfer_box->x;
254 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
255 xfer_params->xfer_boxes[1].width = transfer_box->width;
256 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
257
258 break;
259 case DRM_FORMAT_YVU420:
260 case DRM_FORMAT_YVU420_ANDROID:
261 // Tri-planar
262 xfer_params->xfers_needed = 3;
263
264 y_plane_height = bo->meta.height;
265 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
266
267 // Y-plane (full resolution)
268 xfer_params->xfer_boxes[0].x = transfer_box->x;
269 xfer_params->xfer_boxes[0].y = transfer_box->y;
270 xfer_params->xfer_boxes[0].width = transfer_box->width;
271 xfer_params->xfer_boxes[0].height = transfer_box->height;
272
273 // Cb-plane (half resolution, placed below Y-plane)
274 xfer_params->xfer_boxes[1].x = transfer_box->x;
275 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
276 xfer_params->xfer_boxes[1].width = DIV_ROUND_UP(transfer_box->width, 2);
277 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
278
279 // Cr-plane (half resolution, placed below Cb-plane)
280 xfer_params->xfer_boxes[2].x = transfer_box->x;
281 xfer_params->xfer_boxes[2].y = transfer_box->y + y_plane_height + c_plane_height;
282 xfer_params->xfer_boxes[2].width = DIV_ROUND_UP(transfer_box->width, 2);
283 xfer_params->xfer_boxes[2].height = DIV_ROUND_UP(transfer_box->height, 2);
284
285 break;
286 }
287}
288
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800289static bool virgl_supports_combination_natively(struct driver *drv, uint32_t drm_format,
290 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800291{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800292 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800293
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800294 if (priv->caps.max_version == 0)
Jason Macnak1de7f662020-01-24 15:05:57 -0800295 return true;
Jason Macnak1de7f662020-01-24 15:05:57 -0800296
297 if ((use_flags & BO_USE_RENDERING) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800298 !virgl_bitmask_supports_format(&priv->caps.v1.render, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800299 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800300
301 if ((use_flags & BO_USE_TEXTURE) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800302 !virgl_bitmask_supports_format(&priv->caps.v1.sampler, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800303 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800304
305 if ((use_flags & BO_USE_SCANOUT) && priv->caps_is_v2 &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800306 !virgl_bitmask_supports_format(&priv->caps.v2.scanout, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800307 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800308
309 return true;
310}
311
312// For virtio backends that do not support formats natively (e.g. multi-planar formats are not
313// supported in virglrenderer when gbm is unavailable on the host machine), whether or not the
314// format and usage combination can be handled as a blob (byte buffer).
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800315static bool virgl_supports_combination_through_emulation(struct driver *drv, uint32_t drm_format,
316 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800317{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800318 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800319
320 // Only enable emulation on non-gbm virtio backends.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800321 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800322 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800323
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800324 if (use_flags & (BO_USE_RENDERING | BO_USE_SCANOUT))
Jason Macnak1de7f662020-01-24 15:05:57 -0800325 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800326
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800327 if (!virgl_supports_combination_natively(drv, DRM_FORMAT_R8, use_flags))
Jason Macnak1de7f662020-01-24 15:05:57 -0800328 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800329
330 return drm_format == DRM_FORMAT_NV12 || drm_format == DRM_FORMAT_NV21 ||
331 drm_format == DRM_FORMAT_YVU420 || drm_format == DRM_FORMAT_YVU420_ANDROID;
332}
333
Jason Macnakddf4ec02020-02-03 16:36:46 -0800334// Adds the given buffer combination to the list of supported buffer combinations if the
335// combination is supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800336static void virgl_add_combination(struct driver *drv, uint32_t drm_format,
337 struct format_metadata *metadata, uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800338{
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000339 if (params[param_3d].value) {
340 if ((use_flags & BO_USE_SCANOUT) &&
341 !virgl_supports_combination_natively(drv, drm_format, BO_USE_SCANOUT)) {
342 drv_log("Strip scanout on format: %d\n", drm_format);
Jason Macnak1de7f662020-01-24 15:05:57 -0800343 use_flags &= ~BO_USE_SCANOUT;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800344 }
345
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800346 if (!virgl_supports_combination_natively(drv, drm_format, use_flags) &&
347 !virgl_supports_combination_through_emulation(drv, drm_format, use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800348 drv_log("Skipping unsupported combination format:%d\n", drm_format);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800349 return;
350 }
351 }
352
353 drv_add_combination(drv, drm_format, metadata, use_flags);
354}
355
356// Adds each given buffer combination to the list of supported buffer combinations if the
357// combination supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800358static void virgl_add_combinations(struct driver *drv, const uint32_t *drm_formats,
359 uint32_t num_formats, struct format_metadata *metadata,
360 uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800361{
362 uint32_t i;
363
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800364 for (i = 0; i < num_formats; i++)
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800365 virgl_add_combination(drv, drm_formats[i], metadata, use_flags);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800366}
367
Lepton Wu249e8632018-04-05 12:50:03 -0700368static int virtio_dumb_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
369 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700370{
Gurchetan Singh298b7572019-09-19 09:55:18 -0700371 if (bo->meta.format != DRM_FORMAT_R8) {
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900372 width = ALIGN(width, MESA_LLVMPIPE_TILE_SIZE);
373 height = ALIGN(height, MESA_LLVMPIPE_TILE_SIZE);
374 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700375
Dominik Behr6e6dc492019-10-09 15:43:52 -0700376 return drv_dumb_bo_create_ex(bo, width, height, format, use_flags, BO_QUIRK_DUMB32BPP);
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700377}
378
Lepton Wudbab0832019-04-19 12:26:39 -0700379static inline void handle_flag(uint64_t *flag, uint64_t check_flag, uint32_t *bind,
380 uint32_t virgl_bind)
381{
382 if ((*flag) & check_flag) {
383 (*flag) &= ~check_flag;
384 (*bind) |= virgl_bind;
385 }
386}
387
David Stevenscf280482020-12-21 11:43:44 +0900388static uint32_t compute_virgl_bind_flags(uint64_t use_flags, uint32_t format)
Lepton Wudbab0832019-04-19 12:26:39 -0700389{
Kansho Nishidad97877b2019-06-14 18:28:18 +0900390 /* In crosvm, VIRGL_BIND_SHARED means minigbm will allocate, not virglrenderer. */
391 uint32_t bind = VIRGL_BIND_SHARED;
Lepton Wudbab0832019-04-19 12:26:39 -0700392
393 handle_flag(&use_flags, BO_USE_TEXTURE, &bind, VIRGL_BIND_SAMPLER_VIEW);
394 handle_flag(&use_flags, BO_USE_RENDERING, &bind, VIRGL_BIND_RENDER_TARGET);
395 handle_flag(&use_flags, BO_USE_SCANOUT, &bind, VIRGL_BIND_SCANOUT);
David Stevens55a6cf92019-09-03 10:45:33 +0900396 handle_flag(&use_flags, BO_USE_CURSOR, &bind, VIRGL_BIND_CURSOR);
397 handle_flag(&use_flags, BO_USE_LINEAR, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000398 handle_flag(&use_flags, BO_USE_GPU_DATA_BUFFER, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangd3a73ff2021-07-08 05:48:01 +0000399 handle_flag(&use_flags, BO_USE_FRONT_RENDERING, &bind, VIRGL_BIND_LINEAR);
David Stevens55a6cf92019-09-03 10:45:33 +0900400
David Stevens23de4e22020-05-15 14:15:35 +0900401 if (use_flags & BO_USE_PROTECTED) {
402 handle_flag(&use_flags, BO_USE_PROTECTED, &bind, VIRGL_BIND_MINIGBM_PROTECTED);
403 } else {
404 // Make sure we don't set both flags, since that could be mistaken for
405 // protected. Give OFTEN priority over RARELY.
406 if (use_flags & BO_USE_SW_READ_OFTEN) {
407 handle_flag(&use_flags, BO_USE_SW_READ_OFTEN, &bind,
408 VIRGL_BIND_MINIGBM_SW_READ_OFTEN);
409 } else {
410 handle_flag(&use_flags, BO_USE_SW_READ_RARELY, &bind,
411 VIRGL_BIND_MINIGBM_SW_READ_RARELY);
412 }
413 if (use_flags & BO_USE_SW_WRITE_OFTEN) {
414 handle_flag(&use_flags, BO_USE_SW_WRITE_OFTEN, &bind,
415 VIRGL_BIND_MINIGBM_SW_WRITE_OFTEN);
416 } else {
417 handle_flag(&use_flags, BO_USE_SW_WRITE_RARELY, &bind,
418 VIRGL_BIND_MINIGBM_SW_WRITE_RARELY);
419 }
420 }
David Stevens55a6cf92019-09-03 10:45:33 +0900421
David Stevens23de4e22020-05-15 14:15:35 +0900422 handle_flag(&use_flags, BO_USE_CAMERA_WRITE, &bind, VIRGL_BIND_MINIGBM_CAMERA_WRITE);
423 handle_flag(&use_flags, BO_USE_CAMERA_READ, &bind, VIRGL_BIND_MINIGBM_CAMERA_READ);
424 handle_flag(&use_flags, BO_USE_HW_VIDEO_DECODER, &bind,
425 VIRGL_BIND_MINIGBM_HW_VIDEO_DECODER);
426 handle_flag(&use_flags, BO_USE_HW_VIDEO_ENCODER, &bind,
427 VIRGL_BIND_MINIGBM_HW_VIDEO_ENCODER);
David Stevens55a6cf92019-09-03 10:45:33 +0900428
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800429 if (use_flags)
Lepton Wudbab0832019-04-19 12:26:39 -0700430 drv_log("Unhandled bo use flag: %llx\n", (unsigned long long)use_flags);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900431
Lepton Wudbab0832019-04-19 12:26:39 -0700432 return bind;
433}
434
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800435static int virgl_3d_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
436 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700437{
438 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800439 size_t i;
Kansho Nishidad97877b2019-06-14 18:28:18 +0900440 uint32_t stride;
Gurchetan Singh99644382020-10-07 15:28:11 -0700441 struct drm_virtgpu_resource_create res_create = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800442 struct bo_metadata emulated_metadata;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700443
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800444 if (virgl_supports_combination_natively(bo->drv, format, use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800445 stride = drv_stride_from_format(format, width, 0);
446 drv_bo_from_format(bo, stride, height, format);
447 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800448 assert(virgl_supports_combination_through_emulation(bo->drv, format, use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800449
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800450 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800451
452 format = emulated_metadata.format;
453 width = emulated_metadata.width;
454 height = emulated_metadata.height;
455 for (i = 0; i < emulated_metadata.num_planes; i++) {
456 bo->meta.strides[i] = emulated_metadata.strides[i];
457 bo->meta.offsets[i] = emulated_metadata.offsets[i];
458 bo->meta.sizes[i] = emulated_metadata.sizes[i];
459 }
460 bo->meta.total_size = emulated_metadata.total_size;
461 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700462
Kansho Nishidad97877b2019-06-14 18:28:18 +0900463 /*
464 * Setting the target is intended to ensure this resource gets bound as a 2D
465 * texture in the host renderer's GL state. All of these resource properties are
466 * sent unchanged by the kernel to the host, which in turn sends them unchanged to
467 * virglrenderer. When virglrenderer makes a resource, it will convert the target
468 * enum to the equivalent one in GL and then bind the resource to that target.
469 */
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700470
Kansho Nishidad97877b2019-06-14 18:28:18 +0900471 res_create.target = PIPE_TEXTURE_2D;
472 res_create.format = translate_format(format);
David Stevenscf280482020-12-21 11:43:44 +0900473 res_create.bind = compute_virgl_bind_flags(use_flags, format);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900474 res_create.width = width;
475 res_create.height = height;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700476
Kansho Nishidad97877b2019-06-14 18:28:18 +0900477 /* For virgl 3D */
478 res_create.depth = 1;
479 res_create.array_size = 1;
480 res_create.last_level = 0;
481 res_create.nr_samples = 0;
482
Gurchetan Singh298b7572019-09-19 09:55:18 -0700483 res_create.size = ALIGN(bo->meta.total_size, PAGE_SIZE); // PAGE_SIZE = 0x1000
Kansho Nishidad97877b2019-06-14 18:28:18 +0900484 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE, &res_create);
485 if (ret) {
486 drv_log("DRM_IOCTL_VIRTGPU_RESOURCE_CREATE failed with %s\n", strerror(errno));
487 return ret;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700488 }
489
Gurchetan Singh298b7572019-09-19 09:55:18 -0700490 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
Kansho Nishidad97877b2019-06-14 18:28:18 +0900491 bo->handles[plane].u32 = res_create.bo_handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700492
493 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700494}
495
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800496static void *virgl_3d_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700497{
498 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700499 struct drm_virtgpu_map gem_map = { 0 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700500
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700501 gem_map.handle = bo->handles[0].u32;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700502 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_MAP, &gem_map);
503 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700504 drv_log("DRM_IOCTL_VIRTGPU_MAP failed with %s\n", strerror(errno));
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700505 return MAP_FAILED;
506 }
507
Gurchetan Singh298b7572019-09-19 09:55:18 -0700508 vma->length = bo->meta.total_size;
509 return mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700510 gem_map.offset);
511}
512
Jason Macnakd6666c82021-09-29 11:13:25 -0700513static uint32_t virgl_3d_get_max_texture_2d_size(struct driver *drv)
514{
515 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
516
517 if (priv->caps.v2.max_texture_2d_size)
518 return priv->caps.v2.max_texture_2d_size;
519
520 return UINT32_MAX;
521}
522
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800523static int virgl_get_caps(struct driver *drv, union virgl_caps *caps, int *caps_is_v2)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800524{
525 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700526 struct drm_virtgpu_get_caps cap_args = { 0 };
Jason Macnakddf4ec02020-02-03 16:36:46 -0800527
Lepton Wueebce652020-02-26 15:13:34 -0800528 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800529 cap_args.addr = (unsigned long long)caps;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800530 if (params[param_capset_fix].value) {
Lepton Wueebce652020-02-26 15:13:34 -0800531 *caps_is_v2 = 1;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800532 cap_args.cap_set_id = 2;
533 cap_args.size = sizeof(union virgl_caps);
534 } else {
535 cap_args.cap_set_id = 1;
536 cap_args.size = sizeof(struct virgl_caps_v1);
537 }
538
539 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
540 if (ret) {
541 drv_log("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Lepton Wueebce652020-02-26 15:13:34 -0800542 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800543
544 // Fallback to v1
545 cap_args.cap_set_id = 1;
546 cap_args.size = sizeof(struct virgl_caps_v1);
547
548 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800549 if (ret)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800550 drv_log("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Jason Macnakddf4ec02020-02-03 16:36:46 -0800551 }
552
553 return ret;
554}
555
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800556static void virgl_init_params_and_caps(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700557{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800558 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
559 if (params[param_3d].value) {
560 virgl_get_caps(drv, &priv->caps, &priv->caps_is_v2);
Lepton Wu249e8632018-04-05 12:50:03 -0700561
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800562 // We use two criteria to determine whether host minigbm is used on the host for
563 // swapchain allocations.
564 //
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800565 // (1) Host minigbm is only available via virglrenderer, and only virglrenderer
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800566 // advertises capabilities.
567 // (2) Only host minigbm doesn't emulate YUV formats. Checking this is a bit of a
568 // proxy, but it works.
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800569 priv->host_gbm_enabled =
570 priv->caps.max_version > 0 &&
571 virgl_supports_combination_natively(drv, DRM_FORMAT_NV12, BO_USE_TEXTURE);
Lepton Wu249e8632018-04-05 12:50:03 -0700572 }
Jason Macnak1de7f662020-01-24 15:05:57 -0800573}
574
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800575static int virgl_init(struct driver *drv)
Jason Macnak1de7f662020-01-24 15:05:57 -0800576{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800577 struct virgl_priv *priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800578
579 priv = calloc(1, sizeof(*priv));
Yiwei Zhangafdf87d2021-09-28 04:06:06 +0000580 if (!priv)
581 return -ENOMEM;
582
Jason Macnak1de7f662020-01-24 15:05:57 -0800583 drv->priv = priv;
584
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800585 virgl_init_params_and_caps(drv);
Jason Macnak1de7f662020-01-24 15:05:57 -0800586
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800587 if (params[param_3d].value) {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700588 /* This doesn't mean host can scanout everything, it just means host
589 * hypervisor can show it. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800590 virgl_add_combinations(drv, render_target_formats,
591 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
592 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
593 virgl_add_combinations(drv, texture_source_formats,
594 ARRAY_SIZE(texture_source_formats), &LINEAR_METADATA,
595 BO_USE_TEXTURE_MASK);
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000596 /* NV12 with scanout must flow through virgl_add_combination, so that the native
597 * support is checked and scanout use_flag can be conditionally stripped. */
598 virgl_add_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
599 BO_USE_TEXTURE_MASK | BO_USE_CAMERA_READ |
600 BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER |
601 BO_USE_HW_VIDEO_ENCODER | BO_USE_SCANOUT);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700602 } else {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700603 /* Virtio primary plane only allows this format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800604 virgl_add_combination(drv, DRM_FORMAT_XRGB8888, &LINEAR_METADATA,
605 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700606 /* Virtio cursor plane only allows this format and Chrome cannot live without
607 * ARGB888 renderable format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800608 virgl_add_combination(drv, DRM_FORMAT_ARGB8888, &LINEAR_METADATA,
609 BO_USE_RENDER_MASK | BO_USE_CURSOR);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700610 /* Android needs more, but they cannot be bound as scanouts anymore after
611 * "drm/virtio: fix DRM_FORMAT_* handling" */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800612 virgl_add_combinations(drv, render_target_formats,
613 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
614 BO_USE_RENDER_MASK);
615 virgl_add_combinations(drv, dumb_texture_source_formats,
616 ARRAY_SIZE(dumb_texture_source_formats), &LINEAR_METADATA,
617 BO_USE_TEXTURE_MASK);
Yiwei Zhang9fa17e72021-09-17 22:11:29 +0000618 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
619 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
620 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700621 }
Lepton Wu249e8632018-04-05 12:50:03 -0700622
Gurchetan Singh71bc6652018-09-17 17:42:05 -0700623 /* Android CTS tests require this. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800624 virgl_add_combination(drv, DRM_FORMAT_RGB888, &LINEAR_METADATA, BO_USE_SW_MASK);
625 virgl_add_combination(drv, DRM_FORMAT_BGR888, &LINEAR_METADATA, BO_USE_SW_MASK);
Jason Macnak2ce35772021-06-08 06:45:45 -0700626 virgl_add_combination(drv, DRM_FORMAT_P010, &LINEAR_METADATA,
627 BO_USE_SW_MASK | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900628 drv_modify_combination(drv, DRM_FORMAT_R8, &LINEAR_METADATA,
David Staessens04b7e242020-05-28 15:47:15 +0900629 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER |
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000630 BO_USE_HW_VIDEO_ENCODER | BO_USE_GPU_DATA_BUFFER);
David Stevens519978f2020-12-11 14:09:56 +0900631
632 if (!priv->host_gbm_enabled) {
633 drv_modify_combination(drv, DRM_FORMAT_ABGR8888, &LINEAR_METADATA,
634 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
635 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
636 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &LINEAR_METADATA,
637 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
638 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
639 drv_modify_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA,
640 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
641 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
642 drv_modify_combination(drv, DRM_FORMAT_R16, &LINEAR_METADATA,
643 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
644 BO_USE_HW_VIDEO_DECODER);
645 drv_modify_combination(drv, DRM_FORMAT_YVU420, &LINEAR_METADATA,
646 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
647 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
648 drv_modify_combination(drv, DRM_FORMAT_YVU420_ANDROID, &LINEAR_METADATA,
649 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
650 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
651 }
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900652
Lepton Wu249e8632018-04-05 12:50:03 -0700653 return drv_modify_linear_combinations(drv);
654}
655
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800656static void virgl_close(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700657{
658 free(drv->priv);
659 drv->priv = NULL;
660}
661
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800662static int virgl_bo_create_blob(struct driver *drv, struct bo *bo)
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700663{
664 int ret;
665 uint32_t stride;
David Stevens0fe561f2020-10-28 16:06:38 +0900666 uint32_t cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700667 uint32_t cmd[VIRGL_PIPE_RES_CREATE_SIZE + 1] = { 0 };
668 struct drm_virtgpu_resource_create_blob drm_rc_blob = { 0 };
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800669 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700670
David Stevensd3f07bd2020-09-25 18:52:26 +0900671 uint32_t blob_flags = VIRTGPU_BLOB_FLAG_USE_SHAREABLE;
672 if (bo->meta.use_flags & BO_USE_SW_MASK)
673 blob_flags |= VIRTGPU_BLOB_FLAG_USE_MAPPABLE;
David Stevens1b252e22021-08-03 16:48:17 +0900674
675 // For now, all blob use cases are cross device. When we add wider
676 // support for blobs, we can revisit making this unconditional.
677 blob_flags |= VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE;
David Stevensb42624c2020-09-10 10:50:26 +0900678
David Stevens0fe561f2020-10-28 16:06:38 +0900679 cur_blob_id = atomic_fetch_add(&priv->next_blob_id, 1);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700680 stride = drv_stride_from_format(bo->meta.format, bo->meta.width, 0);
681 drv_bo_from_format(bo, stride, bo->meta.height, bo->meta.format);
682 bo->meta.total_size = ALIGN(bo->meta.total_size, PAGE_SIZE);
David Stevensb42624c2020-09-10 10:50:26 +0900683 bo->meta.tiling = blob_flags;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700684
685 cmd[0] = VIRGL_CMD0(VIRGL_CCMD_PIPE_RESOURCE_CREATE, 0, VIRGL_PIPE_RES_CREATE_SIZE);
686 cmd[VIRGL_PIPE_RES_CREATE_TARGET] = PIPE_TEXTURE_2D;
687 cmd[VIRGL_PIPE_RES_CREATE_WIDTH] = bo->meta.width;
688 cmd[VIRGL_PIPE_RES_CREATE_HEIGHT] = bo->meta.height;
689 cmd[VIRGL_PIPE_RES_CREATE_FORMAT] = translate_format(bo->meta.format);
David Stevenscf280482020-12-21 11:43:44 +0900690 cmd[VIRGL_PIPE_RES_CREATE_BIND] =
691 compute_virgl_bind_flags(bo->meta.use_flags, bo->meta.format);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700692 cmd[VIRGL_PIPE_RES_CREATE_DEPTH] = 1;
David Stevens0fe561f2020-10-28 16:06:38 +0900693 cmd[VIRGL_PIPE_RES_CREATE_BLOB_ID] = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700694
695 drm_rc_blob.cmd = (uint64_t)&cmd;
696 drm_rc_blob.cmd_size = 4 * (VIRGL_PIPE_RES_CREATE_SIZE + 1);
697 drm_rc_blob.size = bo->meta.total_size;
698 drm_rc_blob.blob_mem = VIRTGPU_BLOB_MEM_HOST3D;
David Stevensb42624c2020-09-10 10:50:26 +0900699 drm_rc_blob.blob_flags = blob_flags;
David Stevens0fe561f2020-10-28 16:06:38 +0900700 drm_rc_blob.blob_id = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700701
702 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &drm_rc_blob);
703 if (ret < 0) {
704 drv_log("DRM_VIRTGPU_RESOURCE_CREATE_BLOB failed with %s\n", strerror(errno));
705 return -errno;
706 }
707
708 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
709 bo->handles[plane].u32 = drm_rc_blob.bo_handle;
710
711 return 0;
712}
713
714static bool should_use_blob(struct driver *drv, uint32_t format, uint64_t use_flags)
715{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800716 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700717
718 // TODO(gurchetansingh): remove once all minigbm users are blob-safe
719#ifndef VIRTIO_GPU_NEXT
720 return false;
721#endif
722
723 // Only use blob when host gbm is available
724 if (!priv->host_gbm_enabled)
725 return false;
726
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000727 // Use regular resources if only the GPU needs efficient access. Blob resource is a better
728 // fit for BO_USE_GPU_DATA_BUFFER which is mapped to VIRGL_BIND_LINEAR.
729 if (!(use_flags & (BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN | BO_USE_LINEAR |
730 BO_USE_NON_GPU_HW | BO_USE_GPU_DATA_BUFFER)))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700731 return false;
732
David Stevensd3f07bd2020-09-25 18:52:26 +0900733 switch (format) {
David Stevensd3f07bd2020-09-25 18:52:26 +0900734 case DRM_FORMAT_R8:
735 // Formats with strictly defined strides are supported
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700736 return true;
David Stevensc6df2b22021-08-10 19:02:09 +0900737 case DRM_FORMAT_YVU420_ANDROID:
David Stevensd3f07bd2020-09-25 18:52:26 +0900738 case DRM_FORMAT_NV12:
739 // Knowing buffer metadata at buffer creation isn't yet supported, so buffers
740 // can't be properly mapped into the guest.
741 return (use_flags & BO_USE_SW_MASK) == 0;
742 default:
743 return false;
744 }
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700745}
746
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800747static int virgl_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
748 uint64_t use_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700749{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800750 if (params[param_resource_blob].value && params[param_host_visible].value &&
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700751 should_use_blob(bo->drv, format, use_flags))
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800752 return virgl_bo_create_blob(bo->drv, bo);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700753
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800754 if (params[param_3d].value)
755 return virgl_3d_bo_create(bo, width, height, format, use_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700756 else
757 return virtio_dumb_bo_create(bo, width, height, format, use_flags);
758}
759
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800760static int virgl_bo_destroy(struct bo *bo)
Lepton Wu249e8632018-04-05 12:50:03 -0700761{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800762 if (params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700763 return drv_gem_bo_destroy(bo);
764 else
765 return drv_dumb_bo_destroy(bo);
766}
767
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800768static void *virgl_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700769{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800770 if (params[param_3d].value)
771 return virgl_3d_bo_map(bo, vma, plane, map_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700772 else
773 return drv_dumb_bo_map(bo, vma, plane, map_flags);
774}
775
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800776static int virgl_bo_invalidate(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700777{
778 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800779 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -0700780 struct drm_virtgpu_3d_transfer_from_host xfer = { 0 };
781 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800782 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800783 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
David Stevens9fe8c202020-12-21 18:47:55 +0900784 uint64_t host_write_flags;
Lepton Wu249e8632018-04-05 12:50:03 -0700785
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800786 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700787 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700788
David Stevens9fe8c202020-12-21 18:47:55 +0900789 // Invalidate is only necessary if the host writes to the buffer. The encoder and
790 // decoder flags don't differentiate between input and output buffers, but we can
791 // use the format to determine whether this buffer could be encoder/decoder output.
792 host_write_flags = BO_USE_RENDERING | BO_USE_CAMERA_WRITE;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800793 if (bo->meta.format == DRM_FORMAT_R8)
David Stevens9fe8c202020-12-21 18:47:55 +0900794 host_write_flags |= BO_USE_HW_VIDEO_ENCODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800795 else
David Stevens9fe8c202020-12-21 18:47:55 +0900796 host_write_flags |= BO_USE_HW_VIDEO_DECODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800797
David Stevens9fe8c202020-12-21 18:47:55 +0900798 if ((bo->meta.use_flags & host_write_flags) == 0)
David Stevens4d5358d2019-10-24 14:59:31 +0900799 return 0;
800
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800801 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700802 return 0;
803
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700804 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700805
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700806 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700807 /*
808 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
809 * images
810 */
811 if (bo->meta.num_planes == 1) {
812 xfer.offset =
813 (bo->meta.strides[0] * mapping->rect.y) +
814 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
815 }
816 }
817
David Stevensbaab6c82020-02-26 17:14:43 +0900818 if ((bo->meta.use_flags & BO_USE_RENDERING) == 0) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800819 // Unfortunately, the kernel doesn't actually pass the guest layer_stride
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800820 // and guest stride to the host (compare virgl.h and virtgpu_drm.h).
Jason Macnak1de7f662020-01-24 15:05:57 -0800821 // For gbm based resources, we can work around this by using the level field
822 // to pass the stride to virglrenderer's gbm transfer code. However, we need
823 // to avoid doing this for resources which don't rely on that transfer code,
824 // which is resources with the BO_USE_RENDERING flag set.
David Stevensbaab6c82020-02-26 17:14:43 +0900825 // TODO(b/145993887): Send also stride when the patches are landed
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800826 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800827 xfer.level = bo->meta.strides[0];
David Stevensbaab6c82020-02-26 17:14:43 +0900828 }
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700829
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800830 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800831 xfer_params.xfers_needed = 1;
832 xfer_params.xfer_boxes[0] = mapping->rect;
833 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800834 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
835 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800836
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800837 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -0800838 }
839
840 for (i = 0; i < xfer_params.xfers_needed; i++) {
841 xfer.box.x = xfer_params.xfer_boxes[i].x;
842 xfer.box.y = xfer_params.xfer_boxes[i].y;
843 xfer.box.w = xfer_params.xfer_boxes[i].width;
844 xfer.box.h = xfer_params.xfer_boxes[i].height;
845 xfer.box.d = 1;
846
847 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST, &xfer);
848 if (ret) {
849 drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST failed with %s\n",
850 strerror(errno));
851 return -errno;
852 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700853 }
854
David Stevens4d5358d2019-10-24 14:59:31 +0900855 // The transfer needs to complete before invalidate returns so that any host changes
856 // are visible and to ensure the host doesn't overwrite subsequent guest changes.
857 // TODO(b/136733358): Support returning fences from transfers
David Stevens4d5358d2019-10-24 14:59:31 +0900858 waitcmd.handle = mapping->vma->handle;
859 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
860 if (ret) {
861 drv_log("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
862 return -errno;
863 }
864
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700865 return 0;
866}
867
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800868static int virgl_bo_flush(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700869{
870 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800871 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -0700872 struct drm_virtgpu_3d_transfer_to_host xfer = { 0 };
873 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800874 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800875 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
Lepton Wu249e8632018-04-05 12:50:03 -0700876
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800877 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700878 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700879
880 if (!(mapping->vma->map_flags & BO_MAP_WRITE))
881 return 0;
882
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800883 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700884 return 0;
885
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700886 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700887
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700888 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700889 /*
890 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
891 * images
892 */
893 if (bo->meta.num_planes == 1) {
894 xfer.offset =
895 (bo->meta.strides[0] * mapping->rect.y) +
896 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
897 }
898 }
899
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700900 // Unfortunately, the kernel doesn't actually pass the guest layer_stride and
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800901 // guest stride to the host (compare virgl.h and virtgpu_drm.h). We can use
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700902 // the level to work around this.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800903 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800904 xfer.level = bo->meta.strides[0];
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700905
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800906 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800907 xfer_params.xfers_needed = 1;
908 xfer_params.xfer_boxes[0] = mapping->rect;
909 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800910 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
911 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800912
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800913 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -0800914 }
915
916 for (i = 0; i < xfer_params.xfers_needed; i++) {
917 xfer.box.x = xfer_params.xfer_boxes[i].x;
918 xfer.box.y = xfer_params.xfer_boxes[i].y;
919 xfer.box.w = xfer_params.xfer_boxes[i].width;
920 xfer.box.h = xfer_params.xfer_boxes[i].height;
921 xfer.box.d = 1;
922
923 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST, &xfer);
924 if (ret) {
925 drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST failed with %s\n",
926 strerror(errno));
927 return -errno;
928 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700929 }
930
David Stevensbaab6c82020-02-26 17:14:43 +0900931 // If the buffer is only accessed by the host GPU, then the flush is ordered
932 // with subsequent commands. However, if other host hardware can access the
933 // buffer, we need to wait for the transfer to complete for consistency.
934 // TODO(b/136733358): Support returning fences from transfers
935 if (bo->meta.use_flags & BO_USE_NON_GPU_HW) {
David Stevensbaab6c82020-02-26 17:14:43 +0900936 waitcmd.handle = mapping->vma->handle;
937
938 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
939 if (ret) {
940 drv_log("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
941 return -errno;
942 }
943 }
944
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700945 return 0;
946}
947
Yiwei Zhang9f390d92021-09-21 20:42:29 +0000948static uint32_t virgl_resolve_format(uint32_t format, uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700949{
950 switch (format) {
951 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900952 /* Camera subsystem requires NV12. */
953 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE))
954 return DRM_FORMAT_NV12;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700955 /*HACK: See b/28671744 */
956 return DRM_FORMAT_XBGR8888;
Lepton Wu249e8632018-04-05 12:50:03 -0700957 case DRM_FORMAT_FLEX_YCbCr_420_888:
Gurchetan Singhf5d280d2019-06-04 19:43:41 -0700958 /*
959 * All of our host drivers prefer NV12 as their flexible media format.
960 * If that changes, this will need to be modified.
961 */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800962 if (params[param_3d].value)
Gurchetan Singhf5d280d2019-06-04 19:43:41 -0700963 return DRM_FORMAT_NV12;
964 else
Jason Macnak1de7f662020-01-24 15:05:57 -0800965 return DRM_FORMAT_YVU420_ANDROID;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700966 default:
967 return format;
968 }
969}
Yiwei Zhangc1413ea2021-09-17 08:20:21 +0000970
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000971static uint64_t virgl_resolve_use_flags(struct driver *drv, uint32_t format, uint64_t use_flags)
Yiwei Zhangc1413ea2021-09-17 08:20:21 +0000972{
973 if (format == DRM_FORMAT_YVU420_ANDROID) {
974 use_flags &= ~BO_USE_SCANOUT;
975 /*
976 * HACK: See b/172389166. This is for HAL_PIXEL_FORMAT_YV12 buffers allocated by
977 * arcvm. None of our platforms can display YV12, so we can treat as a SW buffer.
978 * Remove once this can be intelligently resolved in the guest. Also see
979 * gbm_bo_create.
980 */
981 use_flags |= BO_USE_LINEAR;
982 return use_flags;
983 }
984
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000985 if (params[param_3d].value) {
986 switch (format) {
987 /* formats need to support scanout */
988 case DRM_FORMAT_ABGR8888:
989 case DRM_FORMAT_ARGB8888:
990 case DRM_FORMAT_RGB565:
991 case DRM_FORMAT_XBGR8888:
992 case DRM_FORMAT_XRGB8888:
993 case DRM_FORMAT_NV12:
994 /* strip scanout use_flag if necessary */
995 if ((use_flags & BO_USE_SCANOUT) &&
996 !virgl_supports_combination_natively(drv, format, BO_USE_SCANOUT))
997 return use_flags & ~BO_USE_SCANOUT;
998 break;
999 default:
1000 break;
1001 }
1002 } else {
1003 if (format != DRM_FORMAT_XRGB8888)
1004 return use_flags & ~BO_USE_SCANOUT;
1005 }
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001006
1007 return use_flags;
1008}
1009
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001010static int virgl_resource_info(struct bo *bo, uint32_t strides[DRV_MAX_PLANES],
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +00001011 uint32_t offsets[DRV_MAX_PLANES], uint64_t *format_modifier)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001012{
1013 int ret;
Chia-I Wu2e41f632021-01-11 11:08:21 -08001014 struct drm_virtgpu_resource_info_cros res_info = { 0 };
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001015
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001016 if (!params[param_3d].value)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001017 return 0;
1018
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001019 res_info.bo_handle = bo->handles[0].u32;
Chia-I Wu50855622021-01-12 12:38:09 -08001020 res_info.type = VIRTGPU_RESOURCE_INFO_TYPE_EXTENDED;
Chia-I Wu2e41f632021-01-11 11:08:21 -08001021 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_INFO_CROS, &res_info);
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001022 if (ret) {
1023 drv_log("DRM_IOCTL_VIRTGPU_RESOURCE_INFO failed with %s\n", strerror(errno));
1024 return ret;
1025 }
1026
Yiwei Zhangf58616e2021-08-26 05:54:15 +00001027 for (uint32_t plane = 0; plane < DRV_MAX_PLANES; plane++) {
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001028 /*
1029 * Currently, kernel v4.14 (Betty) doesn't have the extended resource info
1030 * ioctl.
1031 */
Yiwei Zhangf58616e2021-08-26 05:54:15 +00001032 if (!res_info.strides[plane])
1033 break;
1034
1035 strides[plane] = res_info.strides[plane];
1036 offsets[plane] = res_info.offsets[plane];
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001037 }
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +00001038 *format_modifier = res_info.format_modifier;
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001039
1040 return 0;
1041}
1042
Jason Macnakd6666c82021-09-29 11:13:25 -07001043static uint32_t virgl_get_max_texture_2d_size(struct driver *drv)
1044{
1045 if (params[param_3d].value)
1046 return virgl_3d_get_max_texture_2d_size(drv);
1047 else
1048 return MESA_LLVMPIPE_MAX_TEXTURE_2D_SIZE;
1049}
1050
Gurchetan Singhbbde01e2021-02-17 08:54:28 -08001051const struct backend virtgpu_virgl = { .name = "virtgpu_virgl",
1052 .init = virgl_init,
1053 .close = virgl_close,
1054 .bo_create = virgl_bo_create,
1055 .bo_destroy = virgl_bo_destroy,
1056 .bo_import = drv_prime_bo_import,
1057 .bo_map = virgl_bo_map,
1058 .bo_unmap = drv_bo_munmap,
1059 .bo_invalidate = virgl_bo_invalidate,
1060 .bo_flush = virgl_bo_flush,
1061 .resolve_format = virgl_resolve_format,
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001062 .resolve_use_flags = virgl_resolve_use_flags,
Jason Macnakd6666c82021-09-29 11:13:25 -07001063 .resource_info = virgl_resource_info,
1064 .get_max_texture_2d_size = virgl_get_max_texture_2d_size };