Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 The Chromium OS Authors. All rights reserved. |
| 3 | * Use of this source code is governed by a BSD-style license that can be |
| 4 | * found in the LICENSE file. |
| 5 | */ |
| 6 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 7 | #include <errno.h> |
| 8 | #include <stdint.h> |
| 9 | #include <stdio.h> |
| 10 | #include <string.h> |
| 11 | #include <sys/mman.h> |
| 12 | #include <virtgpu_drm.h> |
| 13 | #include <xf86drm.h> |
| 14 | |
| 15 | #include "drv_priv.h" |
| 16 | #include "helpers.h" |
| 17 | #include "util.h" |
| 18 | #include "virgl_hw.h" |
| 19 | |
Tao Wu | 3381588 | 2018-03-12 18:07:43 -0700 | [diff] [blame] | 20 | #ifndef PAGE_SIZE |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 21 | #define PAGE_SIZE 0x1000 |
Tao Wu | 3381588 | 2018-03-12 18:07:43 -0700 | [diff] [blame] | 22 | #endif |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 23 | #define PIPE_TEXTURE_2D 2 |
| 24 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 25 | #define MESA_LLVMPIPE_TILE_ORDER 6 |
| 26 | #define MESA_LLVMPIPE_TILE_SIZE (1 << MESA_LLVMPIPE_TILE_ORDER) |
| 27 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 28 | static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888, |
Gurchetan Singh | 71bc665 | 2018-09-17 17:42:05 -0700 | [diff] [blame] | 29 | DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888, |
| 30 | DRM_FORMAT_XRGB8888 }; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 31 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 32 | static const uint32_t dumb_texture_source_formats[] = { DRM_FORMAT_R8, DRM_FORMAT_YVU420, |
Gurchetan Singh | 2efa6b9 | 2019-06-05 17:07:56 -0700 | [diff] [blame] | 33 | DRM_FORMAT_YVU420_ANDROID }; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 34 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 35 | static const uint32_t texture_source_formats[] = { DRM_FORMAT_R8, DRM_FORMAT_RG88 }; |
| 36 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 37 | struct virtio_gpu_priv { |
| 38 | int has_3d; |
| 39 | }; |
| 40 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 41 | static uint32_t translate_format(uint32_t drm_fourcc, uint32_t plane) |
| 42 | { |
| 43 | switch (drm_fourcc) { |
| 44 | case DRM_FORMAT_XRGB8888: |
| 45 | return VIRGL_FORMAT_B8G8R8X8_UNORM; |
| 46 | case DRM_FORMAT_ARGB8888: |
| 47 | return VIRGL_FORMAT_B8G8R8A8_UNORM; |
| 48 | case DRM_FORMAT_XBGR8888: |
| 49 | return VIRGL_FORMAT_R8G8B8X8_UNORM; |
| 50 | case DRM_FORMAT_ABGR8888: |
| 51 | return VIRGL_FORMAT_R8G8B8A8_UNORM; |
| 52 | case DRM_FORMAT_RGB565: |
| 53 | return VIRGL_FORMAT_B5G6R5_UNORM; |
| 54 | case DRM_FORMAT_R8: |
| 55 | return VIRGL_FORMAT_R8_UNORM; |
| 56 | case DRM_FORMAT_RG88: |
| 57 | return VIRGL_FORMAT_R8G8_UNORM; |
| 58 | default: |
| 59 | return 0; |
| 60 | } |
| 61 | } |
| 62 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 63 | static int virtio_dumb_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format, |
| 64 | uint64_t use_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 65 | { |
Keiichi Watanabe | a13dda7 | 2018-08-02 22:45:05 +0900 | [diff] [blame] | 66 | if (bo->format != DRM_FORMAT_R8) { |
| 67 | width = ALIGN(width, MESA_LLVMPIPE_TILE_SIZE); |
| 68 | height = ALIGN(height, MESA_LLVMPIPE_TILE_SIZE); |
| 69 | } |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 70 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 71 | return drv_dumb_bo_create(bo, width, height, format, use_flags); |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 72 | } |
| 73 | |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 74 | static inline void handle_flag(uint64_t *flag, uint64_t check_flag, uint32_t *bind, |
| 75 | uint32_t virgl_bind) |
| 76 | { |
| 77 | if ((*flag) & check_flag) { |
| 78 | (*flag) &= ~check_flag; |
| 79 | (*bind) |= virgl_bind; |
| 80 | } |
| 81 | } |
| 82 | |
| 83 | static uint32_t use_flags_to_bind(uint64_t use_flags) |
| 84 | { |
| 85 | uint32_t bind = 0; |
| 86 | |
| 87 | handle_flag(&use_flags, BO_USE_TEXTURE, &bind, VIRGL_BIND_SAMPLER_VIEW); |
| 88 | handle_flag(&use_flags, BO_USE_RENDERING, &bind, VIRGL_BIND_RENDER_TARGET); |
| 89 | handle_flag(&use_flags, BO_USE_SCANOUT, &bind, VIRGL_BIND_SCANOUT); |
| 90 | // TODO (b/12983436): handle other use flags. |
| 91 | if (use_flags) { |
| 92 | drv_log("Unhandled bo use flag: %llx\n", (unsigned long long)use_flags); |
| 93 | } |
| 94 | return bind; |
| 95 | } |
| 96 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 97 | static int virtio_virgl_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format, |
| 98 | uint64_t use_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 99 | { |
| 100 | int ret; |
| 101 | ssize_t plane; |
| 102 | ssize_t num_planes = drv_num_planes_from_format(format); |
| 103 | uint32_t stride0; |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 104 | uint32_t bind = use_flags_to_bind(use_flags); |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 105 | |
| 106 | for (plane = 0; plane < num_planes; plane++) { |
| 107 | uint32_t stride = drv_stride_from_format(format, width, plane); |
| 108 | uint32_t size = drv_size_from_format(format, stride, height, plane); |
| 109 | uint32_t res_format = translate_format(format, plane); |
| 110 | struct drm_virtgpu_resource_create res_create; |
| 111 | |
| 112 | memset(&res_create, 0, sizeof(res_create)); |
| 113 | size = ALIGN(size, PAGE_SIZE); |
| 114 | /* |
| 115 | * Setting the target is intended to ensure this resource gets bound as a 2D |
| 116 | * texture in the host renderer's GL state. All of these resource properties are |
| 117 | * sent unchanged by the kernel to the host, which in turn sends them unchanged to |
| 118 | * virglrenderer. When virglrenderer makes a resource, it will convert the target |
| 119 | * enum to the equivalent one in GL and then bind the resource to that target. |
| 120 | */ |
| 121 | res_create.target = PIPE_TEXTURE_2D; |
| 122 | res_create.format = res_format; |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 123 | res_create.bind = bind; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 124 | res_create.width = width; |
| 125 | res_create.height = height; |
| 126 | res_create.depth = 1; |
| 127 | res_create.array_size = 1; |
| 128 | res_create.last_level = 0; |
| 129 | res_create.nr_samples = 0; |
| 130 | res_create.stride = stride; |
| 131 | res_create.size = size; |
| 132 | |
| 133 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE, &res_create); |
| 134 | if (ret) { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 135 | drv_log("DRM_IOCTL_VIRTGPU_RESOURCE_CREATE failed with %s\n", |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 136 | strerror(errno)); |
Stéphane Marchesin | 6ac299f | 2019-03-21 12:23:29 -0700 | [diff] [blame] | 137 | ret = -errno; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 138 | goto fail; |
| 139 | } |
| 140 | |
| 141 | bo->handles[plane].u32 = res_create.bo_handle; |
| 142 | } |
| 143 | |
| 144 | stride0 = drv_stride_from_format(format, width, 0); |
| 145 | drv_bo_from_format(bo, stride0, height, format); |
| 146 | |
| 147 | for (plane = 0; plane < num_planes; plane++) |
| 148 | bo->offsets[plane] = 0; |
| 149 | |
| 150 | return 0; |
| 151 | |
| 152 | fail: |
| 153 | for (plane--; plane >= 0; plane--) { |
| 154 | struct drm_gem_close gem_close; |
| 155 | memset(&gem_close, 0, sizeof(gem_close)); |
| 156 | gem_close.handle = bo->handles[plane].u32; |
| 157 | drmIoctl(bo->drv->fd, DRM_IOCTL_GEM_CLOSE, &gem_close); |
| 158 | } |
| 159 | |
| 160 | return ret; |
| 161 | } |
| 162 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 163 | static void *virtio_virgl_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 164 | { |
| 165 | int ret; |
| 166 | struct drm_virtgpu_map gem_map; |
| 167 | |
| 168 | memset(&gem_map, 0, sizeof(gem_map)); |
| 169 | gem_map.handle = bo->handles[0].u32; |
| 170 | |
| 171 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_MAP, &gem_map); |
| 172 | if (ret) { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 173 | drv_log("DRM_IOCTL_VIRTGPU_MAP failed with %s\n", strerror(errno)); |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 174 | return MAP_FAILED; |
| 175 | } |
| 176 | |
Tao Wu | 3381588 | 2018-03-12 18:07:43 -0700 | [diff] [blame] | 177 | vma->length = bo->total_size; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 178 | return mmap(0, bo->total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd, |
| 179 | gem_map.offset); |
| 180 | } |
| 181 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 182 | static int virtio_gpu_init(struct driver *drv) |
| 183 | { |
| 184 | int ret; |
| 185 | struct virtio_gpu_priv *priv; |
| 186 | struct drm_virtgpu_getparam args; |
| 187 | |
| 188 | priv = calloc(1, sizeof(*priv)); |
| 189 | drv->priv = priv; |
| 190 | |
| 191 | memset(&args, 0, sizeof(args)); |
| 192 | args.param = VIRTGPU_PARAM_3D_FEATURES; |
| 193 | args.value = (uint64_t)(uintptr_t)&priv->has_3d; |
| 194 | ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GETPARAM, &args); |
| 195 | if (ret) { |
| 196 | drv_log("virtio 3D acceleration is not available\n"); |
| 197 | /* Be paranoid */ |
| 198 | priv->has_3d = 0; |
| 199 | } |
| 200 | |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 201 | /* This doesn't mean host can scanout everything, it just means host |
| 202 | * hypervisor can show it. */ |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 203 | drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats), |
Lepton Wu | dbab083 | 2019-04-19 12:26:39 -0700 | [diff] [blame] | 204 | &LINEAR_METADATA, BO_USE_RENDER_MASK | BO_USE_SCANOUT); |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 205 | |
Gurchetan Singh | 2efa6b9 | 2019-06-05 17:07:56 -0700 | [diff] [blame] | 206 | if (priv->has_3d) { |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 207 | drv_add_combinations(drv, texture_source_formats, |
| 208 | ARRAY_SIZE(texture_source_formats), &LINEAR_METADATA, |
| 209 | BO_USE_TEXTURE_MASK); |
Gurchetan Singh | 2efa6b9 | 2019-06-05 17:07:56 -0700 | [diff] [blame] | 210 | } else { |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 211 | drv_add_combinations(drv, dumb_texture_source_formats, |
| 212 | ARRAY_SIZE(dumb_texture_source_formats), &LINEAR_METADATA, |
| 213 | BO_USE_TEXTURE_MASK); |
Gurchetan Singh | 2efa6b9 | 2019-06-05 17:07:56 -0700 | [diff] [blame] | 214 | drv_add_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA, |
| 215 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE); |
| 216 | } |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 217 | |
Gurchetan Singh | 71bc665 | 2018-09-17 17:42:05 -0700 | [diff] [blame] | 218 | /* Android CTS tests require this. */ |
| 219 | drv_add_combination(drv, DRM_FORMAT_BGR888, &LINEAR_METADATA, BO_USE_SW_MASK); |
| 220 | |
Keiichi Watanabe | a13dda7 | 2018-08-02 22:45:05 +0900 | [diff] [blame] | 221 | drv_modify_combination(drv, DRM_FORMAT_R8, &LINEAR_METADATA, |
| 222 | BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE); |
| 223 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 224 | return drv_modify_linear_combinations(drv); |
| 225 | } |
| 226 | |
| 227 | static void virtio_gpu_close(struct driver *drv) |
| 228 | { |
| 229 | free(drv->priv); |
| 230 | drv->priv = NULL; |
| 231 | } |
| 232 | |
| 233 | static int virtio_gpu_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format, |
| 234 | uint64_t use_flags) |
| 235 | { |
| 236 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)bo->drv->priv; |
| 237 | if (priv->has_3d) |
| 238 | return virtio_virgl_bo_create(bo, width, height, format, use_flags); |
| 239 | else |
| 240 | return virtio_dumb_bo_create(bo, width, height, format, use_flags); |
| 241 | } |
| 242 | |
| 243 | static int virtio_gpu_bo_destroy(struct bo *bo) |
| 244 | { |
| 245 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)bo->drv->priv; |
| 246 | if (priv->has_3d) |
| 247 | return drv_gem_bo_destroy(bo); |
| 248 | else |
| 249 | return drv_dumb_bo_destroy(bo); |
| 250 | } |
| 251 | |
| 252 | static void *virtio_gpu_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags) |
| 253 | { |
| 254 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)bo->drv->priv; |
| 255 | if (priv->has_3d) |
| 256 | return virtio_virgl_bo_map(bo, vma, plane, map_flags); |
| 257 | else |
| 258 | return drv_dumb_bo_map(bo, vma, plane, map_flags); |
| 259 | } |
| 260 | |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 261 | static int virtio_gpu_bo_invalidate(struct bo *bo, struct mapping *mapping) |
| 262 | { |
| 263 | int ret; |
| 264 | struct drm_virtgpu_3d_transfer_from_host xfer; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 265 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)bo->drv->priv; |
| 266 | |
| 267 | if (!priv->has_3d) |
| 268 | return 0; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 269 | |
| 270 | memset(&xfer, 0, sizeof(xfer)); |
| 271 | xfer.bo_handle = mapping->vma->handle; |
| 272 | xfer.box.x = mapping->rect.x; |
| 273 | xfer.box.y = mapping->rect.y; |
| 274 | xfer.box.w = mapping->rect.width; |
| 275 | xfer.box.h = mapping->rect.height; |
| 276 | xfer.box.d = 1; |
| 277 | |
| 278 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST, &xfer); |
| 279 | if (ret) { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 280 | drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST failed with %s\n", strerror(errno)); |
Stéphane Marchesin | 6ac299f | 2019-03-21 12:23:29 -0700 | [diff] [blame] | 281 | return -errno; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 282 | } |
| 283 | |
| 284 | return 0; |
| 285 | } |
| 286 | |
| 287 | static int virtio_gpu_bo_flush(struct bo *bo, struct mapping *mapping) |
| 288 | { |
| 289 | int ret; |
| 290 | struct drm_virtgpu_3d_transfer_to_host xfer; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 291 | struct virtio_gpu_priv *priv = (struct virtio_gpu_priv *)bo->drv->priv; |
| 292 | |
| 293 | if (!priv->has_3d) |
| 294 | return 0; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 295 | |
| 296 | if (!(mapping->vma->map_flags & BO_MAP_WRITE)) |
| 297 | return 0; |
| 298 | |
| 299 | memset(&xfer, 0, sizeof(xfer)); |
| 300 | xfer.bo_handle = mapping->vma->handle; |
| 301 | xfer.box.x = mapping->rect.x; |
| 302 | xfer.box.y = mapping->rect.y; |
| 303 | xfer.box.w = mapping->rect.width; |
| 304 | xfer.box.h = mapping->rect.height; |
| 305 | xfer.box.d = 1; |
| 306 | |
| 307 | ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST, &xfer); |
| 308 | if (ret) { |
Alistair Strachan | 0cfaaa5 | 2018-03-19 14:03:23 -0700 | [diff] [blame] | 309 | drv_log("DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST failed with %s\n", strerror(errno)); |
Stéphane Marchesin | 6ac299f | 2019-03-21 12:23:29 -0700 | [diff] [blame] | 310 | return -errno; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 311 | } |
| 312 | |
| 313 | return 0; |
| 314 | } |
| 315 | |
Gurchetan Singh | 0d44d48 | 2019-06-04 19:39:51 -0700 | [diff] [blame^] | 316 | static uint32_t virtio_gpu_resolve_format(struct driver *drv, uint32_t format, uint64_t use_flags) |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 317 | { |
| 318 | switch (format) { |
| 319 | case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED: |
Keiichi Watanabe | a13dda7 | 2018-08-02 22:45:05 +0900 | [diff] [blame] | 320 | /* Camera subsystem requires NV12. */ |
| 321 | if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)) |
| 322 | return DRM_FORMAT_NV12; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 323 | /*HACK: See b/28671744 */ |
| 324 | return DRM_FORMAT_XBGR8888; |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 325 | case DRM_FORMAT_FLEX_YCbCr_420_888: |
| 326 | return DRM_FORMAT_YVU420; |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 327 | default: |
| 328 | return format; |
| 329 | } |
| 330 | } |
| 331 | |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 332 | const struct backend backend_virtio_gpu = { |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 333 | .name = "virtio_gpu", |
| 334 | .init = virtio_gpu_init, |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 335 | .close = virtio_gpu_close, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 336 | .bo_create = virtio_gpu_bo_create, |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 337 | .bo_destroy = virtio_gpu_bo_destroy, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 338 | .bo_import = drv_prime_bo_import, |
Lepton Wu | 249e863 | 2018-04-05 12:50:03 -0700 | [diff] [blame] | 339 | .bo_map = virtio_gpu_bo_map, |
Zach Reizner | 85c4c5f | 2017-10-04 13:15:57 -0700 | [diff] [blame] | 340 | .bo_unmap = drv_bo_munmap, |
| 341 | .bo_invalidate = virtio_gpu_bo_invalidate, |
| 342 | .bo_flush = virtio_gpu_bo_flush, |
| 343 | .resolve_format = virtio_gpu_resolve_format, |
| 344 | }; |