Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 The Chromium OS Authors. All rights reserved. |
| 3 | * Use of this source code is governed by a BSD-style license that can be |
| 4 | * found in the LICENSE file. |
| 5 | */ |
| 6 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 7 | #include <assert.h> |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 8 | #include <errno.h> |
David Stevens | 0fe561f | 2020-10-28 16:06:38 +0900 | [diff] [blame] | 9 | #include <stdatomic.h> |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 10 | #include <stdint.h> |
| 11 | #include <stdio.h> |
| 12 | #include <string.h> |
| 13 | #include <sys/mman.h> |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 14 | #include <xf86drm.h> |
| 15 | |
| 16 | #include "drv_priv.h" |
Gurchetan Singh | 9f3110b | 2020-04-03 15:15:30 -0700 | [diff] [blame] | 17 | #include "external/virgl_hw.h" |
| 18 | #include "external/virgl_protocol.h" |
| 19 | #include "external/virtgpu_drm.h" |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 20 | #include "helpers.h" |
| 21 | #include "util.h" |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 22 | |
Tao Wu | 3381588 | 2018-03-12 18:07:43 -0700 | [diff] [blame] | 23 | #ifndef PAGE_SIZE |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 24 | #define PAGE_SIZE 0x1000 |
Tao Wu | 3381588 | 2018-03-12 18:07:43 -0700 | [diff] [blame] | 25 | #endif |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 26 | #define PIPE_TEXTURE_2D 2 |
| 27 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 28 | #define MESA_LLVMPIPE_TILE_ORDER 6 |
| 29 | #define MESA_LLVMPIPE_TILE_SIZE (1 << MESA_LLVMPIPE_TILE_ORDER) |
| 30 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 31 | struct feature { |
| 32 | uint64_t feature; |
| 33 | const char *name; |
| 34 | uint32_t enabled; |
| 35 | }; |
| 36 | |
| 37 | enum feature_id { |
| 38 | feat_3d, |
| 39 | feat_capset_fix, |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 40 | feat_resource_blob, |
| 41 | feat_host_visible, |
| 42 | feat_host_cross_device, |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 43 | feat_max, |
| 44 | }; |
| 45 | |
| 46 | #define FEATURE(x) \ |
| 47 | (struct feature) \ |
| 48 | { \ |
| 49 | x, #x, 0 \ |
| 50 | } |
| 51 | |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 52 | static struct feature features[] = { |
| 53 | FEATURE(VIRTGPU_PARAM_3D_FEATURES), FEATURE(VIRTGPU_PARAM_CAPSET_QUERY_FIX), |
| 54 | FEATURE(VIRTGPU_PARAM_RESOURCE_BLOB), FEATURE(VIRTGPU_PARAM_HOST_VISIBLE), |
| 55 | FEATURE(VIRTGPU_PARAM_CROSS_DEVICE), |
| 56 | }; |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 57 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 58 | static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888, |
Gurchetan Singh | 71bc665 | 2018-09-17 17:42:05 -0700 | [diff] [blame] | 59 | DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888, |
| 60 | DRM_FORMAT_XRGB8888 }; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 61 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 62 | static const uint32_t dumb_texture_source_formats[] = { |
| 63 | DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_YVU420, |
| 64 | DRM_FORMAT_NV12, DRM_FORMAT_NV21, DRM_FORMAT_YVU420_ANDROID |
| 65 | }; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 66 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 67 | static const uint32_t texture_source_formats[] = { DRM_FORMAT_NV12, DRM_FORMAT_NV21, |
| 68 | DRM_FORMAT_R8, DRM_FORMAT_R16, |
| 69 | DRM_FORMAT_RG88, DRM_FORMAT_YVU420_ANDROID }; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 70 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 71 | struct virtio_gpu_priv { |
Lepton Wu | eebce65 | 2020-02-26 15:13:34 -0800 | [diff] [blame] | 72 | int caps_is_v2; |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 73 | union virgl_caps caps; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 74 | int host_gbm_enabled; |
David Stevens | 0fe561f | 2020-10-28 16:06:38 +0900 | [diff] [blame] | 75 | atomic_int next_blob_id; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 76 | }; |
| 77 | |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 78 | static uint32_t translate_format(uint32_t drm_fourcc) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 79 | { |
| 80 | switch (drm_fourcc) { |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 81 | case DRM_FORMAT_BGR888: |
| 82 | case DRM_FORMAT_RGB888: |
| 83 | return VIRGL_FORMAT_R8G8B8_UNORM; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 84 | case DRM_FORMAT_XRGB8888: |
| 85 | return VIRGL_FORMAT_B8G8R8X8_UNORM; |
| 86 | case DRM_FORMAT_ARGB8888: |
| 87 | return VIRGL_FORMAT_B8G8R8A8_UNORM; |
| 88 | case DRM_FORMAT_XBGR8888: |
| 89 | return VIRGL_FORMAT_R8G8B8X8_UNORM; |
| 90 | case DRM_FORMAT_ABGR8888: |
| 91 | return VIRGL_FORMAT_R8G8B8A8_UNORM; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 92 | case DRM_FORMAT_ABGR16161616F: |
Lepton Wu | fef113c | 2020-10-30 16:29:26 -0700 | [diff] [blame] | 93 | return VIRGL_FORMAT_R16G16B16A16_FLOAT; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 94 | case DRM_FORMAT_RGB565: |
| 95 | return VIRGL_FORMAT_B5G6R5_UNORM; |
| 96 | case DRM_FORMAT_R8: |
| 97 | return VIRGL_FORMAT_R8_UNORM; |
| 98 | case DRM_FORMAT_RG88: |
| 99 | return VIRGL_FORMAT_R8G8_UNORM; |
Gurchetan Singh | f5d280d | 2019-06-04 19:43:41 -0700 | [diff] [blame] | 100 | case DRM_FORMAT_NV12: |
| 101 | return VIRGL_FORMAT_NV12; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 102 | case DRM_FORMAT_NV21: |
| 103 | return VIRGL_FORMAT_NV21; |
Gurchetan Singh | f5d280d | 2019-06-04 19:43:41 -0700 | [diff] [blame] | 104 | case DRM_FORMAT_YVU420: |
| 105 | case DRM_FORMAT_YVU420_ANDROID: |
| 106 | return VIRGL_FORMAT_YV12; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 107 | default: |
| 108 | return 0; |
| 109 | } |
| 110 | } |
| 111 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 112 | static bool virtio_gpu_bitmask_supports_format(struct virgl_supported_format_mask *supported, |
| 113 | uint32_t drm_format) |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 114 | { |
| 115 | uint32_t virgl_format = translate_format(drm_format); |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 116 | if (!virgl_format) |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 117 | return false; |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 118 | |
| 119 | uint32_t bitmask_index = virgl_format / 32; |
| 120 | uint32_t bit_index = virgl_format % 32; |
| 121 | return supported->bitmask[bitmask_index] & (1 << bit_index); |
| 122 | } |
| 123 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 124 | // The metadata generated here for emulated buffers is slightly different than the metadata |
| 125 | // generated by drv_bo_from_format. In order to simplify transfers in the flush and invalidate |
| 126 | // functions below, the emulated buffers are oversized. For example, ignoring stride alignment |
| 127 | // requirements to demonstrate, a 6x6 YUV420 image buffer might have the following layout from |
| 128 | // drv_bo_from_format: |
| 129 | // |
| 130 | // | Y | Y | Y | Y | Y | Y | |
| 131 | // | Y | Y | Y | Y | Y | Y | |
| 132 | // | Y | Y | Y | Y | Y | Y | |
| 133 | // | Y | Y | Y | Y | Y | Y | |
| 134 | // | Y | Y | Y | Y | Y | Y | |
| 135 | // | Y | Y | Y | Y | Y | Y | |
| 136 | // | U | U | U | U | U | U | |
| 137 | // | U | U | U | V | V | V | |
| 138 | // | V | V | V | V | V | V | |
| 139 | // |
| 140 | // where each plane immediately follows the previous plane in memory. This layout makes it |
| 141 | // difficult to compute the transfers needed for example when the middle 2x2 region of the |
| 142 | // image is locked and needs to be flushed/invalidated. |
| 143 | // |
| 144 | // Emulated multi-plane buffers instead have a layout of: |
| 145 | // |
| 146 | // | Y | Y | Y | Y | Y | Y | |
| 147 | // | Y | Y | Y | Y | Y | Y | |
| 148 | // | Y | Y | Y | Y | Y | Y | |
| 149 | // | Y | Y | Y | Y | Y | Y | |
| 150 | // | Y | Y | Y | Y | Y | Y | |
| 151 | // | Y | Y | Y | Y | Y | Y | |
| 152 | // | U | U | U | | | | |
| 153 | // | U | U | U | | | | |
| 154 | // | U | U | U | | | | |
| 155 | // | V | V | V | | | | |
| 156 | // | V | V | V | | | | |
| 157 | // | V | V | V | | | | |
| 158 | // |
| 159 | // where each plane is placed as a sub-image (albeit with a very large stride) in order to |
| 160 | // simplify transfers into 3 sub-image transfers for the above example. |
| 161 | // |
| 162 | // Additional note: the V-plane is not placed to the right of the U-plane due to some |
| 163 | // observed failures in media framework code which assumes the V-plane is not |
| 164 | // "row-interlaced" with the U-plane. |
| 165 | static void virtio_gpu_get_emulated_metadata(const struct bo *bo, struct bo_metadata *metadata) |
| 166 | { |
| 167 | uint32_t y_plane_height; |
| 168 | uint32_t c_plane_height; |
| 169 | uint32_t original_width = bo->meta.width; |
| 170 | uint32_t original_height = bo->meta.height; |
| 171 | |
| 172 | metadata->format = DRM_FORMAT_R8; |
| 173 | switch (bo->meta.format) { |
| 174 | case DRM_FORMAT_NV12: |
| 175 | case DRM_FORMAT_NV21: |
| 176 | // Bi-planar |
| 177 | metadata->num_planes = 2; |
| 178 | |
| 179 | y_plane_height = original_height; |
| 180 | c_plane_height = DIV_ROUND_UP(original_height, 2); |
| 181 | |
| 182 | metadata->width = original_width; |
| 183 | metadata->height = y_plane_height + c_plane_height; |
| 184 | |
| 185 | // Y-plane (full resolution) |
| 186 | metadata->strides[0] = metadata->width; |
| 187 | metadata->offsets[0] = 0; |
| 188 | metadata->sizes[0] = metadata->width * y_plane_height; |
| 189 | |
| 190 | // CbCr-plane (half resolution, interleaved, placed below Y-plane) |
| 191 | metadata->strides[1] = metadata->width; |
| 192 | metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0]; |
| 193 | metadata->sizes[1] = metadata->width * c_plane_height; |
| 194 | |
| 195 | metadata->total_size = metadata->width * metadata->height; |
| 196 | break; |
| 197 | case DRM_FORMAT_YVU420: |
| 198 | case DRM_FORMAT_YVU420_ANDROID: |
| 199 | // Tri-planar |
| 200 | metadata->num_planes = 3; |
| 201 | |
| 202 | y_plane_height = original_height; |
| 203 | c_plane_height = DIV_ROUND_UP(original_height, 2); |
| 204 | |
| 205 | metadata->width = ALIGN(original_width, 32); |
| 206 | metadata->height = y_plane_height + (2 * c_plane_height); |
| 207 | |
| 208 | // Y-plane (full resolution) |
| 209 | metadata->strides[0] = metadata->width; |
| 210 | metadata->offsets[0] = 0; |
| 211 | metadata->sizes[0] = metadata->width * original_height; |
| 212 | |
| 213 | // Cb-plane (half resolution, placed below Y-plane) |
| 214 | metadata->strides[1] = metadata->width; |
| 215 | metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0]; |
| 216 | metadata->sizes[1] = metadata->width * c_plane_height; |
| 217 | |
| 218 | // Cr-plane (half resolution, placed below Cb-plane) |
| 219 | metadata->strides[2] = metadata->width; |
| 220 | metadata->offsets[2] = metadata->offsets[1] + metadata->sizes[1]; |
| 221 | metadata->sizes[2] = metadata->width * c_plane_height; |
| 222 | |
| 223 | metadata->total_size = metadata->width * metadata->height; |
| 224 | break; |
| 225 | default: |
| 226 | break; |
| 227 | } |
| 228 | } |
| 229 | |
| 230 | struct virtio_transfers_params { |
| 231 | size_t xfers_needed; |
| 232 | struct rectangle xfer_boxes[DRV_MAX_PLANES]; |
| 233 | }; |
| 234 | |
| 235 | static void virtio_gpu_get_emulated_transfers_params(const struct bo *bo, |
| 236 | const struct rectangle *transfer_box, |
| 237 | struct virtio_transfers_params *xfer_params) |
| 238 | { |
| 239 | uint32_t y_plane_height; |
| 240 | uint32_t c_plane_height; |
| 241 | struct bo_metadata emulated_metadata; |
| 242 | |
| 243 | if (transfer_box->x == 0 && transfer_box->y == 0 && transfer_box->width == bo->meta.width && |
| 244 | transfer_box->height == bo->meta.height) { |
| 245 | virtio_gpu_get_emulated_metadata(bo, &emulated_metadata); |
| 246 | |
| 247 | xfer_params->xfers_needed = 1; |
| 248 | xfer_params->xfer_boxes[0].x = 0; |
| 249 | xfer_params->xfer_boxes[0].y = 0; |
| 250 | xfer_params->xfer_boxes[0].width = emulated_metadata.width; |
| 251 | xfer_params->xfer_boxes[0].height = emulated_metadata.height; |
| 252 | |
| 253 | return; |
| 254 | } |
| 255 | |
| 256 | switch (bo->meta.format) { |
| 257 | case DRM_FORMAT_NV12: |
| 258 | case DRM_FORMAT_NV21: |
| 259 | // Bi-planar |
| 260 | xfer_params->xfers_needed = 2; |
| 261 | |
| 262 | y_plane_height = bo->meta.height; |
| 263 | c_plane_height = DIV_ROUND_UP(bo->meta.height, 2); |
| 264 | |
| 265 | // Y-plane (full resolution) |
| 266 | xfer_params->xfer_boxes[0].x = transfer_box->x; |
| 267 | xfer_params->xfer_boxes[0].y = transfer_box->y; |
| 268 | xfer_params->xfer_boxes[0].width = transfer_box->width; |
| 269 | xfer_params->xfer_boxes[0].height = transfer_box->height; |
| 270 | |
| 271 | // CbCr-plane (half resolution, interleaved, placed below Y-plane) |
| 272 | xfer_params->xfer_boxes[1].x = transfer_box->x; |
| 273 | xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height; |
| 274 | xfer_params->xfer_boxes[1].width = transfer_box->width; |
| 275 | xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2); |
| 276 | |
| 277 | break; |
| 278 | case DRM_FORMAT_YVU420: |
| 279 | case DRM_FORMAT_YVU420_ANDROID: |
| 280 | // Tri-planar |
| 281 | xfer_params->xfers_needed = 3; |
| 282 | |
| 283 | y_plane_height = bo->meta.height; |
| 284 | c_plane_height = DIV_ROUND_UP(bo->meta.height, 2); |
| 285 | |
| 286 | // Y-plane (full resolution) |
| 287 | xfer_params->xfer_boxes[0].x = transfer_box->x; |
| 288 | xfer_params->xfer_boxes[0].y = transfer_box->y; |
| 289 | xfer_params->xfer_boxes[0].width = transfer_box->width; |
| 290 | xfer_params->xfer_boxes[0].height = transfer_box->height; |
| 291 | |
| 292 | // Cb-plane (half resolution, placed below Y-plane) |
| 293 | xfer_params->xfer_boxes[1].x = transfer_box->x; |
| 294 | xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height; |
| 295 | xfer_params->xfer_boxes[1].width = DIV_ROUND_UP(transfer_box->width, 2); |
| 296 | xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2); |
| 297 | |
| 298 | // Cr-plane (half resolution, placed below Cb-plane) |
| 299 | xfer_params->xfer_boxes[2].x = transfer_box->x; |
| 300 | xfer_params->xfer_boxes[2].y = transfer_box->y + y_plane_height + c_plane_height; |
| 301 | xfer_params->xfer_boxes[2].width = DIV_ROUND_UP(transfer_box->width, 2); |
| 302 | xfer_params->xfer_boxes[2].height = DIV_ROUND_UP(transfer_box->height, 2); |
| 303 | |
| 304 | break; |
| 305 | } |
| 306 | } |
| 307 | |
| 308 | static bool virtio_gpu_supports_combination_natively(struct driver *drv, uint32_t drm_format, |
| 309 | uint64_t use_flags) |
| 310 | { |
| 311 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)drv->priv; |
| 312 | |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 313 | if (priv->caps.max_version == 0) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 314 | return true; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 315 | |
| 316 | if ((use_flags & BO_USE_RENDERING) && |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 317 | !virtio_gpu_bitmask_supports_format(&priv->caps.v1.render, drm_format)) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 318 | return false; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 319 | |
| 320 | if ((use_flags & BO_USE_TEXTURE) && |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 321 | !virtio_gpu_bitmask_supports_format(&priv->caps.v1.sampler, drm_format)) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 322 | return false; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 323 | |
| 324 | if ((use_flags & BO_USE_SCANOUT) && priv->caps_is_v2 && |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 325 | !virtio_gpu_bitmask_supports_format(&priv->caps.v2.scanout, drm_format)) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 326 | return false; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 327 | |
| 328 | return true; |
| 329 | } |
| 330 | |
| 331 | // For virtio backends that do not support formats natively (e.g. multi-planar formats are not |
| 332 | // supported in virglrenderer when gbm is unavailable on the host machine), whether or not the |
| 333 | // format and usage combination can be handled as a blob (byte buffer). |
| 334 | static bool virtio_gpu_supports_combination_through_emulation(struct driver *drv, |
| 335 | uint32_t drm_format, |
| 336 | uint64_t use_flags) |
| 337 | { |
| 338 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)drv->priv; |
| 339 | |
| 340 | // Only enable emulation on non-gbm virtio backends. |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 341 | if (priv->host_gbm_enabled) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 342 | return false; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 343 | |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 344 | if (use_flags & (BO_USE_RENDERING | BO_USE_SCANOUT)) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 345 | return false; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 346 | |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 347 | if (!virtio_gpu_supports_combination_natively(drv, DRM_FORMAT_R8, use_flags)) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 348 | return false; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 349 | |
| 350 | return drm_format == DRM_FORMAT_NV12 || drm_format == DRM_FORMAT_NV21 || |
| 351 | drm_format == DRM_FORMAT_YVU420 || drm_format == DRM_FORMAT_YVU420_ANDROID; |
| 352 | } |
| 353 | |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 354 | // Adds the given buffer combination to the list of supported buffer combinations if the |
| 355 | // combination is supported by the virtio backend. |
| 356 | static void virtio_gpu_add_combination(struct driver *drv, uint32_t drm_format, |
| 357 | struct format_metadata *metadata, uint64_t use_flags) |
| 358 | { |
| 359 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)drv->priv; |
| 360 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 361 | if (features[feat_3d].enabled && priv->caps.max_version >= 1) { |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 362 | if ((use_flags & BO_USE_SCANOUT) && priv->caps_is_v2 && |
| 363 | !virtio_gpu_supports_combination_natively(drv, drm_format, use_flags)) { |
| 364 | drv_log("Scanout format: %d\n", drm_format); |
| 365 | use_flags &= ~BO_USE_SCANOUT; |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 366 | } |
| 367 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 368 | if (!virtio_gpu_supports_combination_natively(drv, drm_format, use_flags) && |
| 369 | !virtio_gpu_supports_combination_through_emulation(drv, drm_format, |
| 370 | use_flags)) { |
| 371 | drv_log("Skipping unsupported combination format:%d\n", drm_format); |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 372 | return; |
| 373 | } |
| 374 | } |
| 375 | |
| 376 | drv_add_combination(drv, drm_format, metadata, use_flags); |
| 377 | } |
| 378 | |
| 379 | // Adds each given buffer combination to the list of supported buffer combinations if the |
| 380 | // combination supported by the virtio backend. |
| 381 | static void virtio_gpu_add_combinations(struct driver *drv, const uint32_t *drm_formats, |
| 382 | uint32_t num_formats, struct format_metadata *metadata, |
| 383 | uint64_t use_flags) |
| 384 | { |
| 385 | uint32_t i; |
| 386 | |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 387 | for (i = 0; i < num_formats; i++) |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 388 | virtio_gpu_add_combination(drv, drm_formats[i], metadata, use_flags); |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 389 | } |
| 390 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 391 | static int virtio_dumb_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format, |
| 392 | uint64_t use_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 393 | { |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 394 | if (bo->meta.format != DRM_FORMAT_R8) { |
Keiichi Watanabe | a13dda7 | 2018-08-02 22:45:05 +0900 | [diff] [blame] | 395 | width = ALIGN(width, MESA_LLVMPIPE_TILE_SIZE); |
| 396 | height = ALIGN(height, MESA_LLVMPIPE_TILE_SIZE); |
| 397 | } |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 398 | |
Dominik Behr | 6e6dc49 | 2019-10-09 15:43:52 -0700 | [diff] [blame] | 399 | return drv_dumb_bo_create_ex(bo, width, height, format, use_flags, BO_QUIRK_DUMB32BPP); |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 400 | } |
| 401 | |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 402 | static inline void handle_flag(uint64_t *flag, uint64_t check_flag, uint32_t *bind, |
| 403 | uint32_t virgl_bind) |
| 404 | { |
| 405 | if ((*flag) & check_flag) { |
| 406 | (*flag) &= ~check_flag; |
| 407 | (*bind) |= virgl_bind; |
| 408 | } |
| 409 | } |
| 410 | |
David Stevens | cf28048 | 2020-12-21 11:43:44 +0900 | [diff] [blame] | 411 | static uint32_t compute_virgl_bind_flags(uint64_t use_flags, uint32_t format) |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 412 | { |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 413 | /* In crosvm, VIRGL_BIND_SHARED means minigbm will allocate, not virglrenderer. */ |
| 414 | uint32_t bind = VIRGL_BIND_SHARED; |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 415 | |
| 416 | handle_flag(&use_flags, BO_USE_TEXTURE, &bind, VIRGL_BIND_SAMPLER_VIEW); |
| 417 | handle_flag(&use_flags, BO_USE_RENDERING, &bind, VIRGL_BIND_RENDER_TARGET); |
| 418 | handle_flag(&use_flags, BO_USE_SCANOUT, &bind, VIRGL_BIND_SCANOUT); |
David Stevens | 55a6cf9 | 2019-09-03 10:45:33 +0900 | [diff] [blame] | 419 | handle_flag(&use_flags, BO_USE_CURSOR, &bind, VIRGL_BIND_CURSOR); |
| 420 | handle_flag(&use_flags, BO_USE_LINEAR, &bind, VIRGL_BIND_LINEAR); |
| 421 | |
David Stevens | 23de4e2 | 2020-05-15 14:15:35 +0900 | [diff] [blame] | 422 | if (use_flags & BO_USE_PROTECTED) { |
| 423 | handle_flag(&use_flags, BO_USE_PROTECTED, &bind, VIRGL_BIND_MINIGBM_PROTECTED); |
| 424 | } else { |
| 425 | // Make sure we don't set both flags, since that could be mistaken for |
| 426 | // protected. Give OFTEN priority over RARELY. |
| 427 | if (use_flags & BO_USE_SW_READ_OFTEN) { |
| 428 | handle_flag(&use_flags, BO_USE_SW_READ_OFTEN, &bind, |
| 429 | VIRGL_BIND_MINIGBM_SW_READ_OFTEN); |
| 430 | } else { |
| 431 | handle_flag(&use_flags, BO_USE_SW_READ_RARELY, &bind, |
| 432 | VIRGL_BIND_MINIGBM_SW_READ_RARELY); |
| 433 | } |
| 434 | if (use_flags & BO_USE_SW_WRITE_OFTEN) { |
| 435 | handle_flag(&use_flags, BO_USE_SW_WRITE_OFTEN, &bind, |
| 436 | VIRGL_BIND_MINIGBM_SW_WRITE_OFTEN); |
| 437 | } else { |
| 438 | handle_flag(&use_flags, BO_USE_SW_WRITE_RARELY, &bind, |
| 439 | VIRGL_BIND_MINIGBM_SW_WRITE_RARELY); |
| 440 | } |
| 441 | } |
David Stevens | 55a6cf9 | 2019-09-03 10:45:33 +0900 | [diff] [blame] | 442 | |
David Stevens | 23de4e2 | 2020-05-15 14:15:35 +0900 | [diff] [blame] | 443 | handle_flag(&use_flags, BO_USE_CAMERA_WRITE, &bind, VIRGL_BIND_MINIGBM_CAMERA_WRITE); |
| 444 | handle_flag(&use_flags, BO_USE_CAMERA_READ, &bind, VIRGL_BIND_MINIGBM_CAMERA_READ); |
| 445 | handle_flag(&use_flags, BO_USE_HW_VIDEO_DECODER, &bind, |
| 446 | VIRGL_BIND_MINIGBM_HW_VIDEO_DECODER); |
| 447 | handle_flag(&use_flags, BO_USE_HW_VIDEO_ENCODER, &bind, |
| 448 | VIRGL_BIND_MINIGBM_HW_VIDEO_ENCODER); |
David Stevens | 55a6cf9 | 2019-09-03 10:45:33 +0900 | [diff] [blame] | 449 | |
David Stevens | cf28048 | 2020-12-21 11:43:44 +0900 | [diff] [blame] | 450 | /* |
| 451 | * HACK: This is for HAL_PIXEL_FORMAT_YV12 buffers allocated by arcvm. None of |
| 452 | * our platforms can display YV12, so we can treat as a SW buffer. Remove once |
| 453 | * this can be intelligently resolved in the guest. Also see gbm_bo_create. |
| 454 | */ |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 455 | if (format == DRM_FORMAT_YVU420_ANDROID) |
David Stevens | cf28048 | 2020-12-21 11:43:44 +0900 | [diff] [blame] | 456 | bind |= VIRGL_BIND_LINEAR; |
David Stevens | cf28048 | 2020-12-21 11:43:44 +0900 | [diff] [blame] | 457 | |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 458 | if (use_flags) |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 459 | drv_log("Unhandled bo use flag: %llx\n", (unsigned long long)use_flags); |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 460 | |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 461 | return bind; |
| 462 | } |
| 463 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 464 | static int virtio_virgl_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format, |
| 465 | uint64_t use_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 466 | { |
| 467 | int ret; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 468 | size_t i; |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 469 | uint32_t stride; |
Gurchetan Singh | 9964438 | 2020-10-07 15:28:11 -0700 | [diff] [blame] | 470 | struct drm_virtgpu_resource_create res_create = { 0 }; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 471 | struct bo_metadata emulated_metadata; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 472 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 473 | if (virtio_gpu_supports_combination_natively(bo->drv, format, use_flags)) { |
| 474 | stride = drv_stride_from_format(format, width, 0); |
| 475 | drv_bo_from_format(bo, stride, height, format); |
| 476 | } else { |
| 477 | assert( |
| 478 | virtio_gpu_supports_combination_through_emulation(bo->drv, format, use_flags)); |
| 479 | |
| 480 | virtio_gpu_get_emulated_metadata(bo, &emulated_metadata); |
| 481 | |
| 482 | format = emulated_metadata.format; |
| 483 | width = emulated_metadata.width; |
| 484 | height = emulated_metadata.height; |
| 485 | for (i = 0; i < emulated_metadata.num_planes; i++) { |
| 486 | bo->meta.strides[i] = emulated_metadata.strides[i]; |
| 487 | bo->meta.offsets[i] = emulated_metadata.offsets[i]; |
| 488 | bo->meta.sizes[i] = emulated_metadata.sizes[i]; |
| 489 | } |
| 490 | bo->meta.total_size = emulated_metadata.total_size; |
| 491 | } |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 492 | |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 493 | /* |
| 494 | * Setting the target is intended to ensure this resource gets bound as a 2D |
| 495 | * texture in the host renderer's GL state. All of these resource properties are |
| 496 | * sent unchanged by the kernel to the host, which in turn sends them unchanged to |
| 497 | * virglrenderer. When virglrenderer makes a resource, it will convert the target |
| 498 | * enum to the equivalent one in GL and then bind the resource to that target. |
| 499 | */ |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 500 | |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 501 | res_create.target = PIPE_TEXTURE_2D; |
| 502 | res_create.format = translate_format(format); |
David Stevens | cf28048 | 2020-12-21 11:43:44 +0900 | [diff] [blame] | 503 | res_create.bind = compute_virgl_bind_flags(use_flags, format); |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 504 | res_create.width = width; |
| 505 | res_create.height = height; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 506 | |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 507 | /* For virgl 3D */ |
| 508 | res_create.depth = 1; |
| 509 | res_create.array_size = 1; |
| 510 | res_create.last_level = 0; |
| 511 | res_create.nr_samples = 0; |
| 512 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 513 | res_create.size = ALIGN(bo->meta.total_size, PAGE_SIZE); // PAGE_SIZE = 0x1000 |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 514 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE, &res_create); |
| 515 | if (ret) { |
| 516 | drv_log("DRM_IOCTL_VIRTGPU_RESOURCE_CREATE failed with %s\n", strerror(errno)); |
| 517 | return ret; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 518 | } |
| 519 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 520 | for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++) |
Kansho Nishida | d97877b | 2019-06-14 18:28:18 +0900 | [diff] [blame] | 521 | bo->handles[plane].u32 = res_create.bo_handle; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 522 | |
| 523 | return 0; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 524 | } |
| 525 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 526 | static void *virtio_virgl_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 527 | { |
| 528 | int ret; |
Gurchetan Singh | 9964438 | 2020-10-07 15:28:11 -0700 | [diff] [blame] | 529 | struct drm_virtgpu_map gem_map = { 0 }; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 530 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 531 | gem_map.handle = bo->handles[0].u32; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 532 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_MAP, &gem_map); |
| 533 | if (ret) { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 534 | drv_log("DRM_IOCTL_VIRTGPU_MAP failed with %s\n", strerror(errno)); |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 535 | return MAP_FAILED; |
| 536 | } |
| 537 | |
Gurchetan Singh | 298b757 | 2019-09-19 09:55:18 -0700 | [diff] [blame] | 538 | vma->length = bo->meta.total_size; |
| 539 | return mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 540 | gem_map.offset); |
| 541 | } |
| 542 | |
Lepton Wu | eebce65 | 2020-02-26 15:13:34 -0800 | [diff] [blame] | 543 | static int virtio_gpu_get_caps(struct driver *drv, union virgl_caps *caps, int *caps_is_v2) |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 544 | { |
| 545 | int ret; |
Gurchetan Singh | 9964438 | 2020-10-07 15:28:11 -0700 | [diff] [blame] | 546 | struct drm_virtgpu_get_caps cap_args = { 0 }; |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 547 | |
Lepton Wu | eebce65 | 2020-02-26 15:13:34 -0800 | [diff] [blame] | 548 | *caps_is_v2 = 0; |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 549 | cap_args.addr = (unsigned long long)caps; |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 550 | if (features[feat_capset_fix].enabled) { |
Lepton Wu | eebce65 | 2020-02-26 15:13:34 -0800 | [diff] [blame] | 551 | *caps_is_v2 = 1; |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 552 | cap_args.cap_set_id = 2; |
| 553 | cap_args.size = sizeof(union virgl_caps); |
| 554 | } else { |
| 555 | cap_args.cap_set_id = 1; |
| 556 | cap_args.size = sizeof(struct virgl_caps_v1); |
| 557 | } |
| 558 | |
| 559 | ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args); |
| 560 | if (ret) { |
| 561 | drv_log("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno)); |
Lepton Wu | eebce65 | 2020-02-26 15:13:34 -0800 | [diff] [blame] | 562 | *caps_is_v2 = 0; |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 563 | |
| 564 | // Fallback to v1 |
| 565 | cap_args.cap_set_id = 1; |
| 566 | cap_args.size = sizeof(struct virgl_caps_v1); |
| 567 | |
| 568 | ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args); |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 569 | if (ret) |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 570 | drv_log("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno)); |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 571 | } |
| 572 | |
| 573 | return ret; |
| 574 | } |
| 575 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 576 | static void virtio_gpu_init_features_and_caps(struct driver *drv) |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 577 | { |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 578 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)drv->priv; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 579 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 580 | for (uint32_t i = 0; i < ARRAY_SIZE(features); i++) { |
| 581 | struct drm_virtgpu_getparam params = { 0 }; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 582 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 583 | params.param = features[i].feature; |
| 584 | params.value = (uint64_t)(uintptr_t)&features[i].enabled; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 585 | int ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GETPARAM, ¶ms); |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 586 | if (ret) |
| 587 | drv_log("DRM_IOCTL_VIRTGPU_GET_PARAM failed with %s\n", strerror(errno)); |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 588 | } |
| 589 | |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 590 | if (features[feat_3d].enabled) |
Lepton Wu | eebce65 | 2020-02-26 15:13:34 -0800 | [diff] [blame] | 591 | virtio_gpu_get_caps(drv, &priv->caps, &priv->caps_is_v2); |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 592 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 593 | // Multi-planar formats are currently only supported in virglrenderer through gbm. |
| 594 | priv->host_gbm_enabled = |
Jason Macnak | 7e4325e | 2021-02-01 15:17:01 -0800 | [diff] [blame^] | 595 | features[feat_3d].enabled && |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 596 | virtio_gpu_supports_combination_natively(drv, DRM_FORMAT_NV12, BO_USE_TEXTURE); |
| 597 | } |
| 598 | |
| 599 | static int virtio_gpu_init(struct driver *drv) |
| 600 | { |
| 601 | struct virtio_gpu_priv *priv; |
| 602 | |
| 603 | priv = calloc(1, sizeof(*priv)); |
| 604 | drv->priv = priv; |
| 605 | |
| 606 | virtio_gpu_init_features_and_caps(drv); |
| 607 | |
| 608 | if (features[feat_3d].enabled) { |
Dominik Behr | 6e6dc49 | 2019-10-09 15:43:52 -0700 | [diff] [blame] | 609 | /* This doesn't mean host can scanout everything, it just means host |
| 610 | * hypervisor can show it. */ |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 611 | virtio_gpu_add_combinations(drv, render_target_formats, |
| 612 | ARRAY_SIZE(render_target_formats), &LINEAR_METADATA, |
| 613 | BO_USE_RENDER_MASK | BO_USE_SCANOUT); |
| 614 | virtio_gpu_add_combinations(drv, texture_source_formats, |
| 615 | ARRAY_SIZE(texture_source_formats), &LINEAR_METADATA, |
| 616 | BO_USE_TEXTURE_MASK); |
Gurchetan Singh | 3f3e5f9 | 2019-07-08 09:50:01 -0700 | [diff] [blame] | 617 | } else { |
Dominik Behr | 6e6dc49 | 2019-10-09 15:43:52 -0700 | [diff] [blame] | 618 | /* Virtio primary plane only allows this format. */ |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 619 | virtio_gpu_add_combination(drv, DRM_FORMAT_XRGB8888, &LINEAR_METADATA, |
| 620 | BO_USE_RENDER_MASK | BO_USE_SCANOUT); |
Dominik Behr | 6e6dc49 | 2019-10-09 15:43:52 -0700 | [diff] [blame] | 621 | /* Virtio cursor plane only allows this format and Chrome cannot live without |
| 622 | * ARGB888 renderable format. */ |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 623 | virtio_gpu_add_combination(drv, DRM_FORMAT_ARGB8888, &LINEAR_METADATA, |
| 624 | BO_USE_RENDER_MASK | BO_USE_CURSOR); |
Dominik Behr | 6e6dc49 | 2019-10-09 15:43:52 -0700 | [diff] [blame] | 625 | /* Android needs more, but they cannot be bound as scanouts anymore after |
| 626 | * "drm/virtio: fix DRM_FORMAT_* handling" */ |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 627 | virtio_gpu_add_combinations(drv, render_target_formats, |
| 628 | ARRAY_SIZE(render_target_formats), &LINEAR_METADATA, |
| 629 | BO_USE_RENDER_MASK); |
| 630 | virtio_gpu_add_combinations(drv, dumb_texture_source_formats, |
| 631 | ARRAY_SIZE(dumb_texture_source_formats), |
| 632 | &LINEAR_METADATA, BO_USE_TEXTURE_MASK); |
| 633 | virtio_gpu_add_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA, |
| 634 | BO_USE_SW_MASK | BO_USE_LINEAR); |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 635 | virtio_gpu_add_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA, |
| 636 | BO_USE_SW_MASK | BO_USE_LINEAR); |
Gurchetan Singh | 3f3e5f9 | 2019-07-08 09:50:01 -0700 | [diff] [blame] | 637 | } |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 638 | |
Gurchetan Singh | 71bc665 | 2018-09-17 17:42:05 -0700 | [diff] [blame] | 639 | /* Android CTS tests require this. */ |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 640 | virtio_gpu_add_combination(drv, DRM_FORMAT_RGB888, &LINEAR_METADATA, BO_USE_SW_MASK); |
Jason Macnak | ddf4ec0 | 2020-02-03 16:36:46 -0800 | [diff] [blame] | 641 | virtio_gpu_add_combination(drv, DRM_FORMAT_BGR888, &LINEAR_METADATA, BO_USE_SW_MASK); |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 642 | virtio_gpu_add_combination(drv, DRM_FORMAT_ABGR16161616F, &LINEAR_METADATA, |
| 643 | BO_USE_SW_MASK | BO_USE_TEXTURE_MASK); |
Gurchetan Singh | 71bc665 | 2018-09-17 17:42:05 -0700 | [diff] [blame] | 644 | |
David Stevens | 9f7897f | 2019-08-09 20:20:23 +0900 | [diff] [blame] | 645 | drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA, |
Hirokazu Honda | 20e4a93 | 2019-12-06 15:21:45 +0900 | [diff] [blame] | 646 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER | |
| 647 | BO_USE_HW_VIDEO_ENCODER); |
Keiichi Watanabe | a13dda7 | 2018-08-02 22:45:05 +0900 | [diff] [blame] | 648 | drv_modify_combination(drv, DRM_FORMAT_R8, &LINEAR_METADATA, |
David Staessens | 04b7e24 | 2020-05-28 15:47:15 +0900 | [diff] [blame] | 649 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER | |
| 650 | BO_USE_HW_VIDEO_ENCODER); |
David Stevens | 519978f | 2020-12-11 14:09:56 +0900 | [diff] [blame] | 651 | |
| 652 | if (!priv->host_gbm_enabled) { |
| 653 | drv_modify_combination(drv, DRM_FORMAT_ABGR8888, &LINEAR_METADATA, |
| 654 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | |
| 655 | BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER); |
| 656 | drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &LINEAR_METADATA, |
| 657 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | |
| 658 | BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER); |
| 659 | drv_modify_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA, |
| 660 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | |
| 661 | BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER); |
| 662 | drv_modify_combination(drv, DRM_FORMAT_R16, &LINEAR_METADATA, |
| 663 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | |
| 664 | BO_USE_HW_VIDEO_DECODER); |
| 665 | drv_modify_combination(drv, DRM_FORMAT_YVU420, &LINEAR_METADATA, |
| 666 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | |
| 667 | BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER); |
| 668 | drv_modify_combination(drv, DRM_FORMAT_YVU420_ANDROID, &LINEAR_METADATA, |
| 669 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | |
| 670 | BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER); |
| 671 | } |
Keiichi Watanabe | a13dda7 | 2018-08-02 22:45:05 +0900 | [diff] [blame] | 672 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 673 | return drv_modify_linear_combinations(drv); |
| 674 | } |
| 675 | |
| 676 | static void virtio_gpu_close(struct driver *drv) |
| 677 | { |
| 678 | free(drv->priv); |
| 679 | drv->priv = NULL; |
| 680 | } |
| 681 | |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 682 | static int virtio_gpu_bo_create_blob(struct driver *drv, struct bo *bo) |
| 683 | { |
| 684 | int ret; |
| 685 | uint32_t stride; |
David Stevens | 0fe561f | 2020-10-28 16:06:38 +0900 | [diff] [blame] | 686 | uint32_t cur_blob_id; |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 687 | uint32_t cmd[VIRGL_PIPE_RES_CREATE_SIZE + 1] = { 0 }; |
| 688 | struct drm_virtgpu_resource_create_blob drm_rc_blob = { 0 }; |
David Stevens | 0fe561f | 2020-10-28 16:06:38 +0900 | [diff] [blame] | 689 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)drv->priv; |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 690 | |
David Stevens | d3f07bd | 2020-09-25 18:52:26 +0900 | [diff] [blame] | 691 | uint32_t blob_flags = VIRTGPU_BLOB_FLAG_USE_SHAREABLE; |
| 692 | if (bo->meta.use_flags & BO_USE_SW_MASK) |
| 693 | blob_flags |= VIRTGPU_BLOB_FLAG_USE_MAPPABLE; |
| 694 | if (bo->meta.use_flags & BO_USE_NON_GPU_HW) |
David Stevens | b42624c | 2020-09-10 10:50:26 +0900 | [diff] [blame] | 695 | blob_flags |= VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE; |
David Stevens | b42624c | 2020-09-10 10:50:26 +0900 | [diff] [blame] | 696 | |
David Stevens | 0fe561f | 2020-10-28 16:06:38 +0900 | [diff] [blame] | 697 | cur_blob_id = atomic_fetch_add(&priv->next_blob_id, 1); |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 698 | stride = drv_stride_from_format(bo->meta.format, bo->meta.width, 0); |
| 699 | drv_bo_from_format(bo, stride, bo->meta.height, bo->meta.format); |
| 700 | bo->meta.total_size = ALIGN(bo->meta.total_size, PAGE_SIZE); |
David Stevens | b42624c | 2020-09-10 10:50:26 +0900 | [diff] [blame] | 701 | bo->meta.tiling = blob_flags; |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 702 | |
| 703 | cmd[0] = VIRGL_CMD0(VIRGL_CCMD_PIPE_RESOURCE_CREATE, 0, VIRGL_PIPE_RES_CREATE_SIZE); |
| 704 | cmd[VIRGL_PIPE_RES_CREATE_TARGET] = PIPE_TEXTURE_2D; |
| 705 | cmd[VIRGL_PIPE_RES_CREATE_WIDTH] = bo->meta.width; |
| 706 | cmd[VIRGL_PIPE_RES_CREATE_HEIGHT] = bo->meta.height; |
| 707 | cmd[VIRGL_PIPE_RES_CREATE_FORMAT] = translate_format(bo->meta.format); |
David Stevens | cf28048 | 2020-12-21 11:43:44 +0900 | [diff] [blame] | 708 | cmd[VIRGL_PIPE_RES_CREATE_BIND] = |
| 709 | compute_virgl_bind_flags(bo->meta.use_flags, bo->meta.format); |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 710 | cmd[VIRGL_PIPE_RES_CREATE_DEPTH] = 1; |
David Stevens | 0fe561f | 2020-10-28 16:06:38 +0900 | [diff] [blame] | 711 | cmd[VIRGL_PIPE_RES_CREATE_BLOB_ID] = cur_blob_id; |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 712 | |
| 713 | drm_rc_blob.cmd = (uint64_t)&cmd; |
| 714 | drm_rc_blob.cmd_size = 4 * (VIRGL_PIPE_RES_CREATE_SIZE + 1); |
| 715 | drm_rc_blob.size = bo->meta.total_size; |
| 716 | drm_rc_blob.blob_mem = VIRTGPU_BLOB_MEM_HOST3D; |
David Stevens | b42624c | 2020-09-10 10:50:26 +0900 | [diff] [blame] | 717 | drm_rc_blob.blob_flags = blob_flags; |
David Stevens | 0fe561f | 2020-10-28 16:06:38 +0900 | [diff] [blame] | 718 | drm_rc_blob.blob_id = cur_blob_id; |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 719 | |
| 720 | ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &drm_rc_blob); |
| 721 | if (ret < 0) { |
| 722 | drv_log("DRM_VIRTGPU_RESOURCE_CREATE_BLOB failed with %s\n", strerror(errno)); |
| 723 | return -errno; |
| 724 | } |
| 725 | |
| 726 | for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++) |
| 727 | bo->handles[plane].u32 = drm_rc_blob.bo_handle; |
| 728 | |
| 729 | return 0; |
| 730 | } |
| 731 | |
| 732 | static bool should_use_blob(struct driver *drv, uint32_t format, uint64_t use_flags) |
| 733 | { |
| 734 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)drv->priv; |
| 735 | |
| 736 | // TODO(gurchetansingh): remove once all minigbm users are blob-safe |
| 737 | #ifndef VIRTIO_GPU_NEXT |
| 738 | return false; |
| 739 | #endif |
| 740 | |
| 741 | // Only use blob when host gbm is available |
| 742 | if (!priv->host_gbm_enabled) |
| 743 | return false; |
| 744 | |
David Stevens | d3f07bd | 2020-09-25 18:52:26 +0900 | [diff] [blame] | 745 | // Use regular resources if only the GPU needs efficient access |
| 746 | if (!(use_flags & |
| 747 | (BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN | BO_USE_LINEAR | BO_USE_NON_GPU_HW))) |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 748 | return false; |
| 749 | |
David Stevens | d3f07bd | 2020-09-25 18:52:26 +0900 | [diff] [blame] | 750 | switch (format) { |
| 751 | case DRM_FORMAT_YVU420_ANDROID: |
| 752 | case DRM_FORMAT_R8: |
| 753 | // Formats with strictly defined strides are supported |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 754 | return true; |
David Stevens | d3f07bd | 2020-09-25 18:52:26 +0900 | [diff] [blame] | 755 | case DRM_FORMAT_NV12: |
| 756 | // Knowing buffer metadata at buffer creation isn't yet supported, so buffers |
| 757 | // can't be properly mapped into the guest. |
| 758 | return (use_flags & BO_USE_SW_MASK) == 0; |
| 759 | default: |
| 760 | return false; |
| 761 | } |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 762 | } |
| 763 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 764 | static int virtio_gpu_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format, |
| 765 | uint64_t use_flags) |
| 766 | { |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 767 | if (features[feat_resource_blob].enabled && features[feat_host_visible].enabled && |
| 768 | should_use_blob(bo->drv, format, use_flags)) |
| 769 | return virtio_gpu_bo_create_blob(bo->drv, bo); |
| 770 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 771 | if (features[feat_3d].enabled) |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 772 | return virtio_virgl_bo_create(bo, width, height, format, use_flags); |
| 773 | else |
| 774 | return virtio_dumb_bo_create(bo, width, height, format, use_flags); |
| 775 | } |
| 776 | |
| 777 | static int virtio_gpu_bo_destroy(struct bo *bo) |
| 778 | { |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 779 | if (features[feat_3d].enabled) |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 780 | return drv_gem_bo_destroy(bo); |
| 781 | else |
| 782 | return drv_dumb_bo_destroy(bo); |
| 783 | } |
| 784 | |
| 785 | static void *virtio_gpu_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags) |
| 786 | { |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 787 | if (features[feat_3d].enabled) |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 788 | return virtio_virgl_bo_map(bo, vma, plane, map_flags); |
| 789 | else |
| 790 | return drv_dumb_bo_map(bo, vma, plane, map_flags); |
| 791 | } |
| 792 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 793 | static int virtio_gpu_bo_invalidate(struct bo *bo, struct mapping *mapping) |
| 794 | { |
| 795 | int ret; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 796 | size_t i; |
Gurchetan Singh | 9964438 | 2020-10-07 15:28:11 -0700 | [diff] [blame] | 797 | struct drm_virtgpu_3d_transfer_from_host xfer = { 0 }; |
| 798 | struct drm_virtgpu_3d_wait waitcmd = { 0 }; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 799 | struct virtio_transfers_params xfer_params; |
| 800 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)bo->drv->priv; |
David Stevens | 9fe8c20 | 2020-12-21 18:47:55 +0900 | [diff] [blame] | 801 | uint64_t host_write_flags; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 802 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 803 | if (!features[feat_3d].enabled) |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 804 | return 0; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 805 | |
David Stevens | 9fe8c20 | 2020-12-21 18:47:55 +0900 | [diff] [blame] | 806 | // Invalidate is only necessary if the host writes to the buffer. The encoder and |
| 807 | // decoder flags don't differentiate between input and output buffers, but we can |
| 808 | // use the format to determine whether this buffer could be encoder/decoder output. |
| 809 | host_write_flags = BO_USE_RENDERING | BO_USE_CAMERA_WRITE; |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 810 | if (bo->meta.format == DRM_FORMAT_R8) |
David Stevens | 9fe8c20 | 2020-12-21 18:47:55 +0900 | [diff] [blame] | 811 | host_write_flags |= BO_USE_HW_VIDEO_ENCODER; |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 812 | else |
David Stevens | 9fe8c20 | 2020-12-21 18:47:55 +0900 | [diff] [blame] | 813 | host_write_flags |= BO_USE_HW_VIDEO_DECODER; |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 814 | |
David Stevens | 9fe8c20 | 2020-12-21 18:47:55 +0900 | [diff] [blame] | 815 | if ((bo->meta.use_flags & host_write_flags) == 0) |
David Stevens | 4d5358d | 2019-10-24 14:59:31 +0900 | [diff] [blame] | 816 | return 0; |
| 817 | |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 818 | if (features[feat_resource_blob].enabled && |
| 819 | (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE)) |
| 820 | return 0; |
| 821 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 822 | xfer.bo_handle = mapping->vma->handle; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 823 | |
Gurchetan Singh | 1b57fe2 | 2020-05-05 09:18:22 -0700 | [diff] [blame] | 824 | if (mapping->rect.x || mapping->rect.y) { |
Gurchetan Singh | 1b57fe2 | 2020-05-05 09:18:22 -0700 | [diff] [blame] | 825 | /* |
| 826 | * virglrenderer uses the box parameters and assumes that offset == 0 for planar |
| 827 | * images |
| 828 | */ |
| 829 | if (bo->meta.num_planes == 1) { |
| 830 | xfer.offset = |
| 831 | (bo->meta.strides[0] * mapping->rect.y) + |
| 832 | drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x; |
| 833 | } |
| 834 | } |
| 835 | |
David Stevens | baab6c8 | 2020-02-26 17:14:43 +0900 | [diff] [blame] | 836 | if ((bo->meta.use_flags & BO_USE_RENDERING) == 0) { |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 837 | // Unfortunately, the kernel doesn't actually pass the guest layer_stride |
| 838 | // and guest stride to the host (compare virtio_gpu.h and virtgpu_drm.h). |
| 839 | // For gbm based resources, we can work around this by using the level field |
| 840 | // to pass the stride to virglrenderer's gbm transfer code. However, we need |
| 841 | // to avoid doing this for resources which don't rely on that transfer code, |
| 842 | // which is resources with the BO_USE_RENDERING flag set. |
David Stevens | baab6c8 | 2020-02-26 17:14:43 +0900 | [diff] [blame] | 843 | // TODO(b/145993887): Send also stride when the patches are landed |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 844 | if (priv->host_gbm_enabled) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 845 | xfer.level = bo->meta.strides[0]; |
David Stevens | baab6c8 | 2020-02-26 17:14:43 +0900 | [diff] [blame] | 846 | } |
Gurchetan Singh | 05e67cc | 2019-06-28 17:21:40 -0700 | [diff] [blame] | 847 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 848 | if (virtio_gpu_supports_combination_natively(bo->drv, bo->meta.format, |
| 849 | bo->meta.use_flags)) { |
| 850 | xfer_params.xfers_needed = 1; |
| 851 | xfer_params.xfer_boxes[0] = mapping->rect; |
| 852 | } else { |
| 853 | assert(virtio_gpu_supports_combination_through_emulation(bo->drv, bo->meta.format, |
| 854 | bo->meta.use_flags)); |
| 855 | |
| 856 | virtio_gpu_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params); |
| 857 | } |
| 858 | |
| 859 | for (i = 0; i < xfer_params.xfers_needed; i++) { |
| 860 | xfer.box.x = xfer_params.xfer_boxes[i].x; |
| 861 | xfer.box.y = xfer_params.xfer_boxes[i].y; |
| 862 | xfer.box.w = xfer_params.xfer_boxes[i].width; |
| 863 | xfer.box.h = xfer_params.xfer_boxes[i].height; |
| 864 | xfer.box.d = 1; |
| 865 | |
| 866 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST, &xfer); |
| 867 | if (ret) { |
| 868 | drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST failed with %s\n", |
| 869 | strerror(errno)); |
| 870 | return -errno; |
| 871 | } |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 872 | } |
| 873 | |
David Stevens | 4d5358d | 2019-10-24 14:59:31 +0900 | [diff] [blame] | 874 | // The transfer needs to complete before invalidate returns so that any host changes |
| 875 | // are visible and to ensure the host doesn't overwrite subsequent guest changes. |
| 876 | // TODO(b/136733358): Support returning fences from transfers |
David Stevens | 4d5358d | 2019-10-24 14:59:31 +0900 | [diff] [blame] | 877 | waitcmd.handle = mapping->vma->handle; |
| 878 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd); |
| 879 | if (ret) { |
| 880 | drv_log("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno)); |
| 881 | return -errno; |
| 882 | } |
| 883 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 884 | return 0; |
| 885 | } |
| 886 | |
| 887 | static int virtio_gpu_bo_flush(struct bo *bo, struct mapping *mapping) |
| 888 | { |
| 889 | int ret; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 890 | size_t i; |
Gurchetan Singh | 9964438 | 2020-10-07 15:28:11 -0700 | [diff] [blame] | 891 | struct drm_virtgpu_3d_transfer_to_host xfer = { 0 }; |
| 892 | struct drm_virtgpu_3d_wait waitcmd = { 0 }; |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 893 | struct virtio_transfers_params xfer_params; |
| 894 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)bo->drv->priv; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 895 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 896 | if (!features[feat_3d].enabled) |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 897 | return 0; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 898 | |
| 899 | if (!(mapping->vma->map_flags & BO_MAP_WRITE)) |
| 900 | return 0; |
| 901 | |
Gurchetan Singh | 0ee06fb | 2019-09-13 17:49:20 -0700 | [diff] [blame] | 902 | if (features[feat_resource_blob].enabled && |
| 903 | (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE)) |
| 904 | return 0; |
| 905 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 906 | xfer.bo_handle = mapping->vma->handle; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 907 | |
Gurchetan Singh | 1b57fe2 | 2020-05-05 09:18:22 -0700 | [diff] [blame] | 908 | if (mapping->rect.x || mapping->rect.y) { |
Gurchetan Singh | 1b57fe2 | 2020-05-05 09:18:22 -0700 | [diff] [blame] | 909 | /* |
| 910 | * virglrenderer uses the box parameters and assumes that offset == 0 for planar |
| 911 | * images |
| 912 | */ |
| 913 | if (bo->meta.num_planes == 1) { |
| 914 | xfer.offset = |
| 915 | (bo->meta.strides[0] * mapping->rect.y) + |
| 916 | drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x; |
| 917 | } |
| 918 | } |
| 919 | |
Gurchetan Singh | 05e67cc | 2019-06-28 17:21:40 -0700 | [diff] [blame] | 920 | // Unfortunately, the kernel doesn't actually pass the guest layer_stride and |
| 921 | // guest stride to the host (compare virtio_gpu.h and virtgpu_drm.h). We can use |
| 922 | // the level to work around this. |
Gurchetan Singh | cadc54f | 2021-02-01 12:03:11 -0800 | [diff] [blame] | 923 | if (priv->host_gbm_enabled) |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 924 | xfer.level = bo->meta.strides[0]; |
Gurchetan Singh | 05e67cc | 2019-06-28 17:21:40 -0700 | [diff] [blame] | 925 | |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 926 | if (virtio_gpu_supports_combination_natively(bo->drv, bo->meta.format, |
| 927 | bo->meta.use_flags)) { |
| 928 | xfer_params.xfers_needed = 1; |
| 929 | xfer_params.xfer_boxes[0] = mapping->rect; |
| 930 | } else { |
| 931 | assert(virtio_gpu_supports_combination_through_emulation(bo->drv, bo->meta.format, |
| 932 | bo->meta.use_flags)); |
| 933 | |
| 934 | virtio_gpu_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params); |
| 935 | } |
| 936 | |
| 937 | for (i = 0; i < xfer_params.xfers_needed; i++) { |
| 938 | xfer.box.x = xfer_params.xfer_boxes[i].x; |
| 939 | xfer.box.y = xfer_params.xfer_boxes[i].y; |
| 940 | xfer.box.w = xfer_params.xfer_boxes[i].width; |
| 941 | xfer.box.h = xfer_params.xfer_boxes[i].height; |
| 942 | xfer.box.d = 1; |
| 943 | |
| 944 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST, &xfer); |
| 945 | if (ret) { |
| 946 | drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST failed with %s\n", |
| 947 | strerror(errno)); |
| 948 | return -errno; |
| 949 | } |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 950 | } |
| 951 | |
David Stevens | baab6c8 | 2020-02-26 17:14:43 +0900 | [diff] [blame] | 952 | // If the buffer is only accessed by the host GPU, then the flush is ordered |
| 953 | // with subsequent commands. However, if other host hardware can access the |
| 954 | // buffer, we need to wait for the transfer to complete for consistency. |
| 955 | // TODO(b/136733358): Support returning fences from transfers |
| 956 | if (bo->meta.use_flags & BO_USE_NON_GPU_HW) { |
David Stevens | baab6c8 | 2020-02-26 17:14:43 +0900 | [diff] [blame] | 957 | waitcmd.handle = mapping->vma->handle; |
| 958 | |
| 959 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd); |
| 960 | if (ret) { |
| 961 | drv_log("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno)); |
| 962 | return -errno; |
| 963 | } |
| 964 | } |
| 965 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 966 | return 0; |
| 967 | } |
| 968 | |
Gurchetan Singh | 0d44d48 | 2019-06-04 19:39:51 -0700 | [diff] [blame] | 969 | static uint32_t virtio_gpu_resolve_format(struct driver *drv, uint32_t format, uint64_t use_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 970 | { |
| 971 | switch (format) { |
| 972 | case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED: |
Keiichi Watanabe | a13dda7 | 2018-08-02 22:45:05 +0900 | [diff] [blame] | 973 | /* Camera subsystem requires NV12. */ |
| 974 | if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)) |
| 975 | return DRM_FORMAT_NV12; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 976 | /*HACK: See b/28671744 */ |
| 977 | return DRM_FORMAT_XBGR8888; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 978 | case DRM_FORMAT_FLEX_YCbCr_420_888: |
Gurchetan Singh | f5d280d | 2019-06-04 19:43:41 -0700 | [diff] [blame] | 979 | /* |
| 980 | * All of our host drivers prefer NV12 as their flexible media format. |
| 981 | * If that changes, this will need to be modified. |
| 982 | */ |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 983 | if (features[feat_3d].enabled) |
Gurchetan Singh | f5d280d | 2019-06-04 19:43:41 -0700 | [diff] [blame] | 984 | return DRM_FORMAT_NV12; |
| 985 | else |
Jason Macnak | 1de7f66 | 2020-01-24 15:05:57 -0800 | [diff] [blame] | 986 | return DRM_FORMAT_YVU420_ANDROID; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 987 | default: |
| 988 | return format; |
| 989 | } |
| 990 | } |
| 991 | |
Gurchetan Singh | bc4f023 | 2019-06-27 20:05:54 -0700 | [diff] [blame] | 992 | static int virtio_gpu_resource_info(struct bo *bo, uint32_t strides[DRV_MAX_PLANES], |
| 993 | uint32_t offsets[DRV_MAX_PLANES]) |
| 994 | { |
| 995 | int ret; |
Chia-I Wu | 2e41f63 | 2021-01-11 11:08:21 -0800 | [diff] [blame] | 996 | struct drm_virtgpu_resource_info_cros res_info = { 0 }; |
Gurchetan Singh | bc4f023 | 2019-06-27 20:05:54 -0700 | [diff] [blame] | 997 | |
Gurchetan Singh | d708f61 | 2019-09-12 17:26:45 -0700 | [diff] [blame] | 998 | if (!features[feat_3d].enabled) |
Gurchetan Singh | bc4f023 | 2019-06-27 20:05:54 -0700 | [diff] [blame] | 999 | return 0; |
| 1000 | |
Gurchetan Singh | bc4f023 | 2019-06-27 20:05:54 -0700 | [diff] [blame] | 1001 | res_info.bo_handle = bo->handles[0].u32; |
Chia-I Wu | 5085562 | 2021-01-12 12:38:09 -0800 | [diff] [blame] | 1002 | res_info.type = VIRTGPU_RESOURCE_INFO_TYPE_EXTENDED; |
Chia-I Wu | 2e41f63 | 2021-01-11 11:08:21 -0800 | [diff] [blame] | 1003 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_INFO_CROS, &res_info); |
Gurchetan Singh | bc4f023 | 2019-06-27 20:05:54 -0700 | [diff] [blame] | 1004 | if (ret) { |
| 1005 | drv_log("DRM_IOCTL_VIRTGPU_RESOURCE_INFO failed with %s\n", strerror(errno)); |
| 1006 | return ret; |
| 1007 | } |
| 1008 | |
| 1009 | for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++) { |
| 1010 | /* |
| 1011 | * Currently, kernel v4.14 (Betty) doesn't have the extended resource info |
| 1012 | * ioctl. |
| 1013 | */ |
| 1014 | if (res_info.strides[plane]) { |
| 1015 | strides[plane] = res_info.strides[plane]; |
| 1016 | offsets[plane] = res_info.offsets[plane]; |
| 1017 | } |
| 1018 | } |
| 1019 | |
| 1020 | return 0; |
| 1021 | } |
| 1022 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 1023 | const struct backend backend_virtio_gpu = { |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 1024 | .name = "virtio_gpu", |
| 1025 | .init = virtio_gpu_init, |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 1026 | .close = virtio_gpu_close, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 1027 | .bo_create = virtio_gpu_bo_create, |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 1028 | .bo_destroy = virtio_gpu_bo_destroy, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 1029 | .bo_import = drv_prime_bo_import, |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 1030 | .bo_map = virtio_gpu_bo_map, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 1031 | .bo_unmap = drv_bo_munmap, |
| 1032 | .bo_invalidate = virtio_gpu_bo_invalidate, |
| 1033 | .bo_flush = virtio_gpu_bo_flush, |
| 1034 | .resolve_format = virtio_gpu_resolve_format, |
Gurchetan Singh | bc4f023 | 2019-06-27 20:05:54 -0700 | [diff] [blame] | 1035 | .resource_info = virtio_gpu_resource_info, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 1036 | }; |