]>
Commit | Line | Data |
---|---|---|
dc5698e8 DA |
1 | /* |
2 | * Copyright (C) 2015 Red Hat, Inc. | |
3 | * All Rights Reserved. | |
4 | * | |
5 | * Authors: | |
6 | * Dave Airlie | |
7 | * Alon Levy | |
8 | * | |
9 | * Permission is hereby granted, free of charge, to any person obtaining a | |
10 | * copy of this software and associated documentation files (the "Software"), | |
11 | * to deal in the Software without restriction, including without limitation | |
12 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
13 | * and/or sell copies of the Software, and to permit persons to whom the | |
14 | * Software is furnished to do so, subject to the following conditions: | |
15 | * | |
16 | * The above copyright notice and this permission notice shall be included in | |
17 | * all copies or substantial portions of the Software. | |
18 | * | |
19 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
20 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
21 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
22 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | |
23 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | |
24 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
25 | * OTHER DEALINGS IN THE SOFTWARE. | |
26 | */ | |
27 | ||
28 | #include "virtgpu_drv.h" | |
29 | #include <drm/drm_crtc_helper.h> | |
30 | #include <drm/drm_atomic_helper.h> | |
31 | ||
bbbed888 GH |
32 | #define XRES_MIN 32 |
33 | #define YRES_MIN 32 | |
dc5698e8 DA |
34 | |
35 | #define XRES_DEF 1024 | |
36 | #define YRES_DEF 768 | |
37 | ||
38 | #define XRES_MAX 8192 | |
39 | #define YRES_MAX 8192 | |
40 | ||
d24796a4 GH |
41 | static int virtio_gpu_page_flip(struct drm_crtc *crtc, |
42 | struct drm_framebuffer *fb, | |
43 | struct drm_pending_vblank_event *event, | |
44 | uint32_t flags) | |
45 | { | |
46 | struct virtio_gpu_device *vgdev = crtc->dev->dev_private; | |
47 | struct virtio_gpu_output *output = | |
48 | container_of(crtc, struct virtio_gpu_output, crtc); | |
49 | struct drm_plane *plane = crtc->primary; | |
50 | struct virtio_gpu_framebuffer *vgfb; | |
51 | struct virtio_gpu_object *bo; | |
52 | unsigned long irqflags; | |
53 | uint32_t handle; | |
54 | ||
55 | plane->fb = fb; | |
56 | vgfb = to_virtio_gpu_framebuffer(plane->fb); | |
57 | bo = gem_to_virtio_gpu_obj(vgfb->obj); | |
58 | handle = bo->hw_res_handle; | |
59 | ||
60 | DRM_DEBUG("handle 0x%x%s, crtc %dx%d\n", handle, | |
61 | bo->dumb ? ", dumb" : "", | |
62 | crtc->mode.hdisplay, crtc->mode.vdisplay); | |
63 | if (bo->dumb) { | |
64 | virtio_gpu_cmd_transfer_to_host_2d | |
65 | (vgdev, handle, 0, | |
66 | cpu_to_le32(crtc->mode.hdisplay), | |
67 | cpu_to_le32(crtc->mode.vdisplay), | |
68 | 0, 0, NULL); | |
69 | } | |
70 | virtio_gpu_cmd_set_scanout(vgdev, output->index, handle, | |
71 | crtc->mode.hdisplay, | |
72 | crtc->mode.vdisplay, 0, 0); | |
73 | virtio_gpu_cmd_resource_flush(vgdev, handle, 0, 0, | |
74 | crtc->mode.hdisplay, | |
75 | crtc->mode.vdisplay); | |
76 | ||
77 | if (event) { | |
78 | spin_lock_irqsave(&crtc->dev->event_lock, irqflags); | |
a288c1ea | 79 | drm_crtc_send_vblank_event(crtc, event); |
d24796a4 GH |
80 | spin_unlock_irqrestore(&crtc->dev->event_lock, irqflags); |
81 | } | |
82 | ||
83 | return 0; | |
84 | } | |
85 | ||
dc5698e8 | 86 | static const struct drm_crtc_funcs virtio_gpu_crtc_funcs = { |
dc5698e8 DA |
87 | .set_config = drm_atomic_helper_set_config, |
88 | .destroy = drm_crtc_cleanup, | |
89 | ||
d24796a4 | 90 | .page_flip = virtio_gpu_page_flip, |
dc5698e8 DA |
91 | .reset = drm_atomic_helper_crtc_reset, |
92 | .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state, | |
93 | .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state, | |
94 | }; | |
95 | ||
96 | static void virtio_gpu_user_framebuffer_destroy(struct drm_framebuffer *fb) | |
97 | { | |
98 | struct virtio_gpu_framebuffer *virtio_gpu_fb | |
99 | = to_virtio_gpu_framebuffer(fb); | |
100 | ||
101 | if (virtio_gpu_fb->obj) | |
102 | drm_gem_object_unreference_unlocked(virtio_gpu_fb->obj); | |
103 | drm_framebuffer_cleanup(fb); | |
104 | kfree(virtio_gpu_fb); | |
105 | } | |
106 | ||
107 | static int | |
108 | virtio_gpu_framebuffer_surface_dirty(struct drm_framebuffer *fb, | |
109 | struct drm_file *file_priv, | |
110 | unsigned flags, unsigned color, | |
111 | struct drm_clip_rect *clips, | |
112 | unsigned num_clips) | |
113 | { | |
114 | struct virtio_gpu_framebuffer *virtio_gpu_fb | |
115 | = to_virtio_gpu_framebuffer(fb); | |
116 | ||
117 | return virtio_gpu_surface_dirty(virtio_gpu_fb, clips, num_clips); | |
118 | } | |
119 | ||
120 | static const struct drm_framebuffer_funcs virtio_gpu_fb_funcs = { | |
121 | .destroy = virtio_gpu_user_framebuffer_destroy, | |
122 | .dirty = virtio_gpu_framebuffer_surface_dirty, | |
123 | }; | |
124 | ||
125 | int | |
126 | virtio_gpu_framebuffer_init(struct drm_device *dev, | |
127 | struct virtio_gpu_framebuffer *vgfb, | |
1eb83451 | 128 | const struct drm_mode_fb_cmd2 *mode_cmd, |
dc5698e8 DA |
129 | struct drm_gem_object *obj) |
130 | { | |
131 | int ret; | |
132 | struct virtio_gpu_object *bo; | |
133 | vgfb->obj = obj; | |
134 | ||
135 | bo = gem_to_virtio_gpu_obj(obj); | |
136 | ||
137 | ret = drm_framebuffer_init(dev, &vgfb->base, &virtio_gpu_fb_funcs); | |
138 | if (ret) { | |
139 | vgfb->obj = NULL; | |
140 | return ret; | |
141 | } | |
142 | drm_helper_mode_fill_fb_struct(&vgfb->base, mode_cmd); | |
143 | ||
144 | spin_lock_init(&vgfb->dirty_lock); | |
145 | vgfb->x1 = vgfb->y1 = INT_MAX; | |
146 | vgfb->x2 = vgfb->y2 = 0; | |
147 | return 0; | |
148 | } | |
149 | ||
dc5698e8 DA |
150 | static void virtio_gpu_crtc_mode_set_nofb(struct drm_crtc *crtc) |
151 | { | |
152 | struct drm_device *dev = crtc->dev; | |
153 | struct virtio_gpu_device *vgdev = dev->dev_private; | |
154 | struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc); | |
155 | ||
156 | virtio_gpu_cmd_set_scanout(vgdev, output->index, 0, | |
157 | crtc->mode.hdisplay, | |
158 | crtc->mode.vdisplay, 0, 0); | |
159 | } | |
160 | ||
161 | static void virtio_gpu_crtc_enable(struct drm_crtc *crtc) | |
162 | { | |
163 | } | |
164 | ||
165 | static void virtio_gpu_crtc_disable(struct drm_crtc *crtc) | |
166 | { | |
167 | struct drm_device *dev = crtc->dev; | |
168 | struct virtio_gpu_device *vgdev = dev->dev_private; | |
169 | struct virtio_gpu_output *output = drm_crtc_to_virtio_gpu_output(crtc); | |
170 | ||
171 | virtio_gpu_cmd_set_scanout(vgdev, output->index, 0, 0, 0, 0, 0); | |
172 | } | |
173 | ||
174 | static int virtio_gpu_crtc_atomic_check(struct drm_crtc *crtc, | |
175 | struct drm_crtc_state *state) | |
176 | { | |
177 | return 0; | |
178 | } | |
179 | ||
9a11d2e7 GP |
180 | static void virtio_gpu_crtc_atomic_flush(struct drm_crtc *crtc, |
181 | struct drm_crtc_state *old_state) | |
182 | { | |
183 | unsigned long flags; | |
184 | ||
185 | spin_lock_irqsave(&crtc->dev->event_lock, flags); | |
186 | if (crtc->state->event) | |
187 | drm_crtc_send_vblank_event(crtc, crtc->state->event); | |
188 | spin_unlock_irqrestore(&crtc->dev->event_lock, flags); | |
189 | } | |
190 | ||
dc5698e8 DA |
191 | static const struct drm_crtc_helper_funcs virtio_gpu_crtc_helper_funcs = { |
192 | .enable = virtio_gpu_crtc_enable, | |
193 | .disable = virtio_gpu_crtc_disable, | |
dc5698e8 DA |
194 | .mode_set_nofb = virtio_gpu_crtc_mode_set_nofb, |
195 | .atomic_check = virtio_gpu_crtc_atomic_check, | |
9a11d2e7 | 196 | .atomic_flush = virtio_gpu_crtc_atomic_flush, |
dc5698e8 DA |
197 | }; |
198 | ||
dc5698e8 DA |
199 | static void virtio_gpu_enc_mode_set(struct drm_encoder *encoder, |
200 | struct drm_display_mode *mode, | |
201 | struct drm_display_mode *adjusted_mode) | |
202 | { | |
203 | } | |
204 | ||
205 | static void virtio_gpu_enc_enable(struct drm_encoder *encoder) | |
206 | { | |
207 | } | |
208 | ||
209 | static void virtio_gpu_enc_disable(struct drm_encoder *encoder) | |
210 | { | |
211 | } | |
212 | ||
213 | static int virtio_gpu_conn_get_modes(struct drm_connector *connector) | |
214 | { | |
215 | struct virtio_gpu_output *output = | |
216 | drm_connector_to_virtio_gpu_output(connector); | |
217 | struct drm_display_mode *mode = NULL; | |
218 | int count, width, height; | |
219 | ||
220 | width = le32_to_cpu(output->info.r.width); | |
221 | height = le32_to_cpu(output->info.r.height); | |
222 | count = drm_add_modes_noedid(connector, XRES_MAX, YRES_MAX); | |
223 | ||
224 | if (width == 0 || height == 0) { | |
225 | width = XRES_DEF; | |
226 | height = YRES_DEF; | |
227 | drm_set_preferred_mode(connector, XRES_DEF, YRES_DEF); | |
228 | } else { | |
229 | DRM_DEBUG("add mode: %dx%d\n", width, height); | |
230 | mode = drm_cvt_mode(connector->dev, width, height, 60, | |
231 | false, false, false); | |
232 | mode->type |= DRM_MODE_TYPE_PREFERRED; | |
233 | drm_mode_probed_add(connector, mode); | |
234 | count++; | |
235 | } | |
236 | ||
237 | return count; | |
238 | } | |
239 | ||
240 | static int virtio_gpu_conn_mode_valid(struct drm_connector *connector, | |
241 | struct drm_display_mode *mode) | |
242 | { | |
243 | struct virtio_gpu_output *output = | |
244 | drm_connector_to_virtio_gpu_output(connector); | |
245 | int width, height; | |
246 | ||
247 | width = le32_to_cpu(output->info.r.width); | |
248 | height = le32_to_cpu(output->info.r.height); | |
249 | ||
250 | if (!(mode->type & DRM_MODE_TYPE_PREFERRED)) | |
251 | return MODE_OK; | |
252 | if (mode->hdisplay == XRES_DEF && mode->vdisplay == YRES_DEF) | |
253 | return MODE_OK; | |
254 | if (mode->hdisplay <= width && mode->hdisplay >= width - 16 && | |
255 | mode->vdisplay <= height && mode->vdisplay >= height - 16) | |
256 | return MODE_OK; | |
257 | ||
258 | DRM_DEBUG("del mode: %dx%d\n", mode->hdisplay, mode->vdisplay); | |
259 | return MODE_BAD; | |
260 | } | |
261 | ||
262 | static struct drm_encoder* | |
263 | virtio_gpu_best_encoder(struct drm_connector *connector) | |
264 | { | |
265 | struct virtio_gpu_output *virtio_gpu_output = | |
266 | drm_connector_to_virtio_gpu_output(connector); | |
267 | ||
268 | return &virtio_gpu_output->enc; | |
269 | } | |
270 | ||
271 | static const struct drm_encoder_helper_funcs virtio_gpu_enc_helper_funcs = { | |
dc5698e8 DA |
272 | .mode_set = virtio_gpu_enc_mode_set, |
273 | .enable = virtio_gpu_enc_enable, | |
274 | .disable = virtio_gpu_enc_disable, | |
275 | }; | |
276 | ||
277 | static const struct drm_connector_helper_funcs virtio_gpu_conn_helper_funcs = { | |
278 | .get_modes = virtio_gpu_conn_get_modes, | |
279 | .mode_valid = virtio_gpu_conn_mode_valid, | |
280 | .best_encoder = virtio_gpu_best_encoder, | |
281 | }; | |
282 | ||
dc5698e8 DA |
283 | static enum drm_connector_status virtio_gpu_conn_detect( |
284 | struct drm_connector *connector, | |
285 | bool force) | |
286 | { | |
287 | struct virtio_gpu_output *output = | |
288 | drm_connector_to_virtio_gpu_output(connector); | |
289 | ||
290 | if (output->info.enabled) | |
291 | return connector_status_connected; | |
292 | else | |
293 | return connector_status_disconnected; | |
294 | } | |
295 | ||
296 | static void virtio_gpu_conn_destroy(struct drm_connector *connector) | |
297 | { | |
298 | struct virtio_gpu_output *virtio_gpu_output = | |
299 | drm_connector_to_virtio_gpu_output(connector); | |
300 | ||
301 | drm_connector_unregister(connector); | |
302 | drm_connector_cleanup(connector); | |
303 | kfree(virtio_gpu_output); | |
304 | } | |
305 | ||
306 | static const struct drm_connector_funcs virtio_gpu_connector_funcs = { | |
307 | .dpms = drm_atomic_helper_connector_dpms, | |
dc5698e8 | 308 | .detect = virtio_gpu_conn_detect, |
6af3e656 | 309 | .fill_modes = drm_helper_probe_single_connector_modes, |
dc5698e8 DA |
310 | .destroy = virtio_gpu_conn_destroy, |
311 | .reset = drm_atomic_helper_connector_reset, | |
312 | .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state, | |
313 | .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, | |
314 | }; | |
315 | ||
316 | static const struct drm_encoder_funcs virtio_gpu_enc_funcs = { | |
317 | .destroy = drm_encoder_cleanup, | |
318 | }; | |
319 | ||
320 | static int vgdev_output_init(struct virtio_gpu_device *vgdev, int index) | |
321 | { | |
322 | struct drm_device *dev = vgdev->ddev; | |
323 | struct virtio_gpu_output *output = vgdev->outputs + index; | |
324 | struct drm_connector *connector = &output->conn; | |
325 | struct drm_encoder *encoder = &output->enc; | |
326 | struct drm_crtc *crtc = &output->crtc; | |
bbbed888 | 327 | struct drm_plane *primary, *cursor; |
dc5698e8 DA |
328 | |
329 | output->index = index; | |
330 | if (index == 0) { | |
331 | output->info.enabled = cpu_to_le32(true); | |
332 | output->info.r.width = cpu_to_le32(XRES_DEF); | |
333 | output->info.r.height = cpu_to_le32(YRES_DEF); | |
334 | } | |
335 | ||
bbbed888 GH |
336 | primary = virtio_gpu_plane_init(vgdev, DRM_PLANE_TYPE_PRIMARY, index); |
337 | if (IS_ERR(primary)) | |
338 | return PTR_ERR(primary); | |
339 | cursor = virtio_gpu_plane_init(vgdev, DRM_PLANE_TYPE_CURSOR, index); | |
340 | if (IS_ERR(cursor)) | |
341 | return PTR_ERR(cursor); | |
342 | drm_crtc_init_with_planes(dev, crtc, primary, cursor, | |
f9882876 | 343 | &virtio_gpu_crtc_funcs, NULL); |
dc5698e8 | 344 | drm_crtc_helper_add(crtc, &virtio_gpu_crtc_helper_funcs); |
bbbed888 GH |
345 | primary->crtc = crtc; |
346 | cursor->crtc = crtc; | |
dc5698e8 DA |
347 | |
348 | drm_connector_init(dev, connector, &virtio_gpu_connector_funcs, | |
349 | DRM_MODE_CONNECTOR_VIRTUAL); | |
350 | drm_connector_helper_add(connector, &virtio_gpu_conn_helper_funcs); | |
351 | ||
352 | drm_encoder_init(dev, encoder, &virtio_gpu_enc_funcs, | |
13a3d91f | 353 | DRM_MODE_ENCODER_VIRTUAL, NULL); |
dc5698e8 DA |
354 | drm_encoder_helper_add(encoder, &virtio_gpu_enc_helper_funcs); |
355 | encoder->possible_crtcs = 1 << index; | |
356 | ||
357 | drm_mode_connector_attach_encoder(connector, encoder); | |
358 | drm_connector_register(connector); | |
359 | return 0; | |
360 | } | |
361 | ||
362 | static struct drm_framebuffer * | |
363 | virtio_gpu_user_framebuffer_create(struct drm_device *dev, | |
364 | struct drm_file *file_priv, | |
1eb83451 | 365 | const struct drm_mode_fb_cmd2 *mode_cmd) |
dc5698e8 DA |
366 | { |
367 | struct drm_gem_object *obj = NULL; | |
368 | struct virtio_gpu_framebuffer *virtio_gpu_fb; | |
369 | int ret; | |
370 | ||
371 | /* lookup object associated with res handle */ | |
a8ad0bd8 | 372 | obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); |
dc5698e8 DA |
373 | if (!obj) |
374 | return ERR_PTR(-EINVAL); | |
375 | ||
376 | virtio_gpu_fb = kzalloc(sizeof(*virtio_gpu_fb), GFP_KERNEL); | |
377 | if (virtio_gpu_fb == NULL) | |
378 | return ERR_PTR(-ENOMEM); | |
379 | ||
380 | ret = virtio_gpu_framebuffer_init(dev, virtio_gpu_fb, mode_cmd, obj); | |
381 | if (ret) { | |
382 | kfree(virtio_gpu_fb); | |
383 | if (obj) | |
384 | drm_gem_object_unreference_unlocked(obj); | |
385 | return NULL; | |
386 | } | |
387 | ||
388 | return &virtio_gpu_fb->base; | |
389 | } | |
390 | ||
e7cf0963 GH |
391 | static int vgdev_atomic_commit(struct drm_device *dev, |
392 | struct drm_atomic_state *state, | |
393 | bool nonblock) | |
394 | { | |
395 | if (nonblock) | |
396 | return -EBUSY; | |
397 | ||
5e84c269 | 398 | drm_atomic_helper_swap_state(state, true); |
e7cf0963 GH |
399 | drm_atomic_helper_wait_for_fences(dev, state); |
400 | ||
401 | drm_atomic_helper_commit_modeset_disables(dev, state); | |
402 | drm_atomic_helper_commit_modeset_enables(dev, state); | |
403 | drm_atomic_helper_commit_planes(dev, state, true); | |
404 | ||
405 | drm_atomic_helper_wait_for_vblanks(dev, state); | |
406 | drm_atomic_helper_cleanup_planes(dev, state); | |
407 | drm_atomic_state_free(state); | |
408 | return 0; | |
409 | } | |
410 | ||
dc5698e8 DA |
411 | static const struct drm_mode_config_funcs virtio_gpu_mode_funcs = { |
412 | .fb_create = virtio_gpu_user_framebuffer_create, | |
413 | .atomic_check = drm_atomic_helper_check, | |
e7cf0963 | 414 | .atomic_commit = vgdev_atomic_commit, |
dc5698e8 DA |
415 | }; |
416 | ||
417 | int virtio_gpu_modeset_init(struct virtio_gpu_device *vgdev) | |
418 | { | |
419 | int i; | |
420 | ||
421 | drm_mode_config_init(vgdev->ddev); | |
422 | vgdev->ddev->mode_config.funcs = (void *)&virtio_gpu_mode_funcs; | |
423 | ||
424 | /* modes will be validated against the framebuffer size */ | |
425 | vgdev->ddev->mode_config.min_width = XRES_MIN; | |
426 | vgdev->ddev->mode_config.min_height = YRES_MIN; | |
427 | vgdev->ddev->mode_config.max_width = XRES_MAX; | |
428 | vgdev->ddev->mode_config.max_height = YRES_MAX; | |
429 | ||
430 | for (i = 0 ; i < vgdev->num_scanouts; ++i) | |
431 | vgdev_output_init(vgdev, i); | |
432 | ||
433 | drm_mode_config_reset(vgdev->ddev); | |
434 | return 0; | |
435 | } | |
436 | ||
437 | void virtio_gpu_modeset_fini(struct virtio_gpu_device *vgdev) | |
438 | { | |
439 | virtio_gpu_fbdev_fini(vgdev); | |
440 | drm_mode_config_cleanup(vgdev->ddev); | |
441 | } |