blob: c9c1427c361d536b4e9f0a117926843420b1e79c [file] [log] [blame]
Dave Airliedc5698e2013-09-09 10:02:56 +10001/*
2 * Copyright (C) 2015 Red Hat, Inc.
3 * All Rights Reserved.
4 *
5 * Authors:
6 * Dave Airlie
7 * Alon Levy
8 *
9 * Permission is hereby granted, free of charge, to any person obtaining a
10 * copy of this software and associated documentation files (the "Software"),
11 * to deal in the Software without restriction, including without limitation
12 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 * and/or sell copies of the Software, and to permit persons to whom the
14 * Software is furnished to do so, subject to the following conditions:
15 *
16 * The above copyright notice and this permission notice shall be included in
17 * all copies or substantial portions of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
26 */
27
28#include "virtgpu_drv.h"
29#include <drm/drm_crtc_helper.h>
30#include <drm/drm_atomic_helper.h>
31
32#define XRES_MIN 320
33#define YRES_MIN 200
34
35#define XRES_DEF 1024
36#define YRES_DEF 768
37
38#define XRES_MAX 8192
39#define YRES_MAX 8192
40
41static void virtio_gpu_crtc_gamma_set(struct drm_crtc *crtc,
42 u16 *red, u16 *green, u16 *blue,
43 uint32_t start, uint32_t size)
44{
45 /* TODO */
46}
47
48static void
49virtio_gpu_hide_cursor(struct virtio_gpu_device *vgdev,
50 struct virtio_gpu_output *output)
51{
52 output->cursor.hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_UPDATE_CURSOR);
53 output->cursor.resource_id = 0;
54 virtio_gpu_cursor_ping(vgdev, output);
55}
56
57static int virtio_gpu_crtc_cursor_set(struct drm_crtc *crtc,
58 struct drm_file *file_priv,
59 uint32_t handle,
60 uint32_t width,
61 uint32_t height,
62 int32_t hot_x, int32_t hot_y)
63{
64 struct virtio_gpu_device *vgdev = crtc->dev->dev_private;
65 struct virtio_gpu_output *output =
66 container_of(crtc, struct virtio_gpu_output, crtc);
67 struct drm_gem_object *gobj = NULL;
68 struct virtio_gpu_object *qobj = NULL;
69 struct virtio_gpu_fence *fence = NULL;
70 int ret = 0;
71
72 if (handle == 0) {
73 virtio_gpu_hide_cursor(vgdev, output);
74 return 0;
75 }
76
77 /* lookup the cursor */
78 gobj = drm_gem_object_lookup(crtc->dev, file_priv, handle);
79 if (gobj == NULL)
80 return -ENOENT;
81
82 qobj = gem_to_virtio_gpu_obj(gobj);
83
84 if (!qobj->hw_res_handle) {
85 ret = -EINVAL;
86 goto out;
87 }
88
89 virtio_gpu_cmd_transfer_to_host_2d(vgdev, qobj->hw_res_handle, 0,
90 cpu_to_le32(64),
91 cpu_to_le32(64),
92 0, 0, &fence);
Gerd Hoffmann6d415332015-09-15 08:20:46 +020093 ret = virtio_gpu_object_reserve(qobj, false);
94 if (!ret) {
95 reservation_object_add_excl_fence(qobj->tbo.resv,
96 &fence->f);
97 fence_put(&fence->f);
98 virtio_gpu_object_unreserve(qobj);
99 virtio_gpu_object_wait(qobj, false);
100 }
Dave Airliedc5698e2013-09-09 10:02:56 +1000101
102 output->cursor.hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_UPDATE_CURSOR);
103 output->cursor.resource_id = cpu_to_le32(qobj->hw_res_handle);
104 output->cursor.hot_x = cpu_to_le32(hot_x);
105 output->cursor.hot_y = cpu_to_le32(hot_y);
106 virtio_gpu_cursor_ping(vgdev, output);
107 ret = 0;
108
109out:
110 drm_gem_object_unreference_unlocked(gobj);
111 return ret;
112}
113
114static int virtio_gpu_crtc_cursor_move(struct drm_crtc *crtc,
115 int x, int y)
116{
117 struct virtio_gpu_device *vgdev = crtc->dev->dev_private;
118 struct virtio_gpu_output *output =
119 container_of(crtc, struct virtio_gpu_output, crtc);
120
121 output->cursor.hdr.type = cpu_to_le32(VIRTIO_GPU_CMD_MOVE_CURSOR);
122 output->cursor.pos.x = cpu_to_le32(x);
123 output->cursor.pos.y = cpu_to_le32(y);
124 virtio_gpu_cursor_ping(vgdev, output);
125 return 0;
126}
127
128static const struct drm_crtc_funcs virtio_gpu_crtc_funcs = {
129 .cursor_set2 = virtio_gpu_crtc_cursor_set,
130 .cursor_move = virtio_gpu_crtc_cursor_move,
131 .gamma_set = virtio_gpu_crtc_gamma_set,
132 .set_config = drm_atomic_helper_set_config,
133 .destroy = drm_crtc_cleanup,
134
135#if 0 /* not (yet) working without vblank support according to docs */
136 .page_flip = drm_atomic_helper_page_flip,
137#endif
138 .reset = drm_atomic_helper_crtc_reset,
139 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
140 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
141};
142
143static void virtio_gpu_user_framebuffer_destroy(struct drm_framebuffer *fb)
144{
145 struct virtio_gpu_framebuffer *virtio_gpu_fb
146 = to_virtio_gpu_framebuffer(fb);
147
148 if (virtio_gpu_fb->obj)
149 drm_gem_object_unreference_unlocked(virtio_gpu_fb->obj);
150 drm_framebuffer_cleanup(fb);
151 kfree(virtio_gpu_fb);
152}
153
154static int
155virtio_gpu_framebuffer_surface_dirty(struct drm_framebuffer *fb,
156 struct drm_file *file_priv,
157 unsigned flags, unsigned color,
158 struct drm_clip_rect *clips,
159 unsigned num_clips)
160{
161 struct virtio_gpu_framebuffer *virtio_gpu_fb
162 = to_virtio_gpu_framebuffer(fb);
163
164 return virtio_gpu_surface_dirty(virtio_gpu_fb, clips, num_clips);
165}
166
167static const struct drm_framebuffer_funcs virtio_gpu_fb_funcs = {
168 .destroy = virtio_gpu_user_framebuffer_destroy,
169 .dirty = virtio_gpu_framebuffer_surface_dirty,
170};
171
172int
173virtio_gpu_framebuffer_init(struct drm_device *dev,
174 struct virtio_gpu_framebuffer *vgfb,
175 struct drm_mode_fb_cmd2 *mode_cmd,
176 struct drm_gem_object *obj)
177{
178 int ret;
179 struct virtio_gpu_object *bo;
180 vgfb->obj = obj;
181
182 bo = gem_to_virtio_gpu_obj(obj);
183
184 ret = drm_framebuffer_init(dev, &vgfb->base, &virtio_gpu_fb_funcs);
185 if (ret) {
186 vgfb->obj = NULL;
187 return ret;
188 }
189 drm_helper_mode_fill_fb_struct(&vgfb->base, mode_cmd);
190
191 spin_lock_init(&vgfb->dirty_lock);
192 vgfb->x1 = vgfb->y1 = INT_MAX;
193 vgfb->x2 = vgfb->y2 = 0;
194 return 0;
195}
196
197static bool virtio_gpu_crtc_mode_fixup(struct drm_crtc *crtc,
198 const struct drm_display_mode *mode,
199 struct drm_display_mode *adjusted_mode)
200{
201 return true;
202}
203
204static void virtio_gpu_crtc_mode_set_nofb(struct drm_crtc *crtc)
205{
206 struct drm_device *dev = crtc->dev;
207 struct virtio_gpu_device *vgdev = dev->dev_private;
208 struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc);
209
210 virtio_gpu_cmd_set_scanout(vgdev, output->index, 0,
211 crtc->mode.hdisplay,
212 crtc->mode.vdisplay, 0, 0);
213}
214
215static void virtio_gpu_crtc_enable(struct drm_crtc *crtc)
216{
217}
218
219static void virtio_gpu_crtc_disable(struct drm_crtc *crtc)
220{
221 struct drm_device *dev = crtc->dev;
222 struct virtio_gpu_device *vgdev = dev->dev_private;
223 struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc);
224
225 virtio_gpu_cmd_set_scanout(vgdev, output->index, 0, 0, 0, 0, 0);
226}
227
228static int virtio_gpu_crtc_atomic_check(struct drm_crtc *crtc,
229 struct drm_crtc_state *state)
230{
231 return 0;
232}
233
234static const struct drm_crtc_helper_funcs virtio_gpu_crtc_helper_funcs = {
235 .enable = virtio_gpu_crtc_enable,
236 .disable = virtio_gpu_crtc_disable,
237 .mode_fixup = virtio_gpu_crtc_mode_fixup,
238 .mode_set_nofb = virtio_gpu_crtc_mode_set_nofb,
239 .atomic_check = virtio_gpu_crtc_atomic_check,
240};
241
242static bool virtio_gpu_enc_mode_fixup(struct drm_encoder *encoder,
243 const struct drm_display_mode *mode,
244 struct drm_display_mode *adjusted_mode)
245{
246 return true;
247}
248
249static void virtio_gpu_enc_mode_set(struct drm_encoder *encoder,
250 struct drm_display_mode *mode,
251 struct drm_display_mode *adjusted_mode)
252{
253}
254
255static void virtio_gpu_enc_enable(struct drm_encoder *encoder)
256{
257}
258
259static void virtio_gpu_enc_disable(struct drm_encoder *encoder)
260{
261}
262
263static int virtio_gpu_conn_get_modes(struct drm_connector *connector)
264{
265 struct virtio_gpu_output *output =
266 drm_connector_to_virtio_gpu_output(connector);
267 struct drm_display_mode *mode = NULL;
268 int count, width, height;
269
270 width = le32_to_cpu(output->info.r.width);
271 height = le32_to_cpu(output->info.r.height);
272 count = drm_add_modes_noedid(connector, XRES_MAX, YRES_MAX);
273
274 if (width == 0 || height == 0) {
275 width = XRES_DEF;
276 height = YRES_DEF;
277 drm_set_preferred_mode(connector, XRES_DEF, YRES_DEF);
278 } else {
279 DRM_DEBUG("add mode: %dx%d\n", width, height);
280 mode = drm_cvt_mode(connector->dev, width, height, 60,
281 false, false, false);
282 mode->type |= DRM_MODE_TYPE_PREFERRED;
283 drm_mode_probed_add(connector, mode);
284 count++;
285 }
286
287 return count;
288}
289
290static int virtio_gpu_conn_mode_valid(struct drm_connector *connector,
291 struct drm_display_mode *mode)
292{
293 struct virtio_gpu_output *output =
294 drm_connector_to_virtio_gpu_output(connector);
295 int width, height;
296
297 width = le32_to_cpu(output->info.r.width);
298 height = le32_to_cpu(output->info.r.height);
299
300 if (!(mode->type & DRM_MODE_TYPE_PREFERRED))
301 return MODE_OK;
302 if (mode->hdisplay == XRES_DEF && mode->vdisplay == YRES_DEF)
303 return MODE_OK;
304 if (mode->hdisplay <= width && mode->hdisplay >= width - 16 &&
305 mode->vdisplay <= height && mode->vdisplay >= height - 16)
306 return MODE_OK;
307
308 DRM_DEBUG("del mode: %dx%d\n", mode->hdisplay, mode->vdisplay);
309 return MODE_BAD;
310}
311
312static struct drm_encoder*
313virtio_gpu_best_encoder(struct drm_connector *connector)
314{
315 struct virtio_gpu_output *virtio_gpu_output =
316 drm_connector_to_virtio_gpu_output(connector);
317
318 return &virtio_gpu_output->enc;
319}
320
321static const struct drm_encoder_helper_funcs virtio_gpu_enc_helper_funcs = {
322 .mode_fixup = virtio_gpu_enc_mode_fixup,
323 .mode_set = virtio_gpu_enc_mode_set,
324 .enable = virtio_gpu_enc_enable,
325 .disable = virtio_gpu_enc_disable,
326};
327
328static const struct drm_connector_helper_funcs virtio_gpu_conn_helper_funcs = {
329 .get_modes = virtio_gpu_conn_get_modes,
330 .mode_valid = virtio_gpu_conn_mode_valid,
331 .best_encoder = virtio_gpu_best_encoder,
332};
333
334static void virtio_gpu_conn_save(struct drm_connector *connector)
335{
336 DRM_DEBUG("\n");
337}
338
339static void virtio_gpu_conn_restore(struct drm_connector *connector)
340{
341 DRM_DEBUG("\n");
342}
343
344static enum drm_connector_status virtio_gpu_conn_detect(
345 struct drm_connector *connector,
346 bool force)
347{
348 struct virtio_gpu_output *output =
349 drm_connector_to_virtio_gpu_output(connector);
350
351 if (output->info.enabled)
352 return connector_status_connected;
353 else
354 return connector_status_disconnected;
355}
356
357static void virtio_gpu_conn_destroy(struct drm_connector *connector)
358{
359 struct virtio_gpu_output *virtio_gpu_output =
360 drm_connector_to_virtio_gpu_output(connector);
361
362 drm_connector_unregister(connector);
363 drm_connector_cleanup(connector);
364 kfree(virtio_gpu_output);
365}
366
367static const struct drm_connector_funcs virtio_gpu_connector_funcs = {
368 .dpms = drm_atomic_helper_connector_dpms,
369 .save = virtio_gpu_conn_save,
370 .restore = virtio_gpu_conn_restore,
371 .detect = virtio_gpu_conn_detect,
372 .fill_modes = drm_helper_probe_single_connector_modes,
373 .destroy = virtio_gpu_conn_destroy,
374 .reset = drm_atomic_helper_connector_reset,
375 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
376 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
377};
378
379static const struct drm_encoder_funcs virtio_gpu_enc_funcs = {
380 .destroy = drm_encoder_cleanup,
381};
382
383static int vgdev_output_init(struct virtio_gpu_device *vgdev, int index)
384{
385 struct drm_device *dev = vgdev->ddev;
386 struct virtio_gpu_output *output = vgdev->outputs + index;
387 struct drm_connector *connector = &output->conn;
388 struct drm_encoder *encoder = &output->enc;
389 struct drm_crtc *crtc = &output->crtc;
390 struct drm_plane *plane;
391
392 output->index = index;
393 if (index == 0) {
394 output->info.enabled = cpu_to_le32(true);
395 output->info.r.width = cpu_to_le32(XRES_DEF);
396 output->info.r.height = cpu_to_le32(YRES_DEF);
397 }
398
399 plane = virtio_gpu_plane_init(vgdev, index);
400 if (IS_ERR(plane))
401 return PTR_ERR(plane);
402 drm_crtc_init_with_planes(dev, crtc, plane, NULL,
403 &virtio_gpu_crtc_funcs);
404 drm_mode_crtc_set_gamma_size(crtc, 256);
405 drm_crtc_helper_add(crtc, &virtio_gpu_crtc_helper_funcs);
406 plane->crtc = crtc;
407
408 drm_connector_init(dev, connector, &virtio_gpu_connector_funcs,
409 DRM_MODE_CONNECTOR_VIRTUAL);
410 drm_connector_helper_add(connector, &virtio_gpu_conn_helper_funcs);
411
412 drm_encoder_init(dev, encoder, &virtio_gpu_enc_funcs,
413 DRM_MODE_ENCODER_VIRTUAL);
414 drm_encoder_helper_add(encoder, &virtio_gpu_enc_helper_funcs);
415 encoder->possible_crtcs = 1 << index;
416
417 drm_mode_connector_attach_encoder(connector, encoder);
418 drm_connector_register(connector);
419 return 0;
420}
421
422static struct drm_framebuffer *
423virtio_gpu_user_framebuffer_create(struct drm_device *dev,
424 struct drm_file *file_priv,
425 struct drm_mode_fb_cmd2 *mode_cmd)
426{
427 struct drm_gem_object *obj = NULL;
428 struct virtio_gpu_framebuffer *virtio_gpu_fb;
429 int ret;
430
431 /* lookup object associated with res handle */
432 obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
433 if (!obj)
434 return ERR_PTR(-EINVAL);
435
436 virtio_gpu_fb = kzalloc(sizeof(*virtio_gpu_fb), GFP_KERNEL);
437 if (virtio_gpu_fb == NULL)
438 return ERR_PTR(-ENOMEM);
439
440 ret = virtio_gpu_framebuffer_init(dev, virtio_gpu_fb, mode_cmd, obj);
441 if (ret) {
442 kfree(virtio_gpu_fb);
443 if (obj)
444 drm_gem_object_unreference_unlocked(obj);
445 return NULL;
446 }
447
448 return &virtio_gpu_fb->base;
449}
450
451static const struct drm_mode_config_funcs virtio_gpu_mode_funcs = {
452 .fb_create = virtio_gpu_user_framebuffer_create,
453 .atomic_check = drm_atomic_helper_check,
454 .atomic_commit = drm_atomic_helper_commit,
455};
456
457int virtio_gpu_modeset_init(struct virtio_gpu_device *vgdev)
458{
459 int i;
460
461 drm_mode_config_init(vgdev->ddev);
462 vgdev->ddev->mode_config.funcs = (void *)&virtio_gpu_mode_funcs;
463
464 /* modes will be validated against the framebuffer size */
465 vgdev->ddev->mode_config.min_width = XRES_MIN;
466 vgdev->ddev->mode_config.min_height = YRES_MIN;
467 vgdev->ddev->mode_config.max_width = XRES_MAX;
468 vgdev->ddev->mode_config.max_height = YRES_MAX;
469
470 for (i = 0 ; i < vgdev->num_scanouts; ++i)
471 vgdev_output_init(vgdev, i);
472
473 drm_mode_config_reset(vgdev->ddev);
474 return 0;
475}
476
477void virtio_gpu_modeset_fini(struct virtio_gpu_device *vgdev)
478{
479 virtio_gpu_fbdev_fini(vgdev);
480 drm_mode_config_cleanup(vgdev->ddev);
481}