]> git.proxmox.com Git - mirror_ubuntu-disco-kernel.git/blame - drivers/gpu/drm/amd/amdgpu/dce_virtual.c
Merge tag 'for-linus-20190118' of git://git.kernel.dk/linux-block
[mirror_ubuntu-disco-kernel.git] / drivers / gpu / drm / amd / amdgpu / dce_virtual.c
CommitLineData
c6e14f40
ED
1/*
2 * Copyright 2014 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
248a1d6f 23#include <drm/drmP.h>
c6e14f40
ED
24#include "amdgpu.h"
25#include "amdgpu_pm.h"
26#include "amdgpu_i2c.h"
27#include "atom.h"
c6e14f40
ED
28#include "amdgpu_pll.h"
29#include "amdgpu_connectors.h"
a1d37046
AD
30#ifdef CONFIG_DRM_AMDGPU_SI
31#include "dce_v6_0.h"
32#endif
83c9b025
ED
33#ifdef CONFIG_DRM_AMDGPU_CIK
34#include "dce_v8_0.h"
35#endif
36#include "dce_v10_0.h"
37#include "dce_v11_0.h"
46ac3622 38#include "dce_virtual.h"
091aec0b 39#include "ivsrcid/ivsrcid_vislands30.h"
c6e14f40 40
623fea18
AD
41#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
42
43
c6e14f40
ED
44static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
45static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
66264ba8
AD
46static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
47 int index);
1719efc2
ML
48static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
49 int crtc,
50 enum amdgpu_interrupt_state state);
c6e14f40 51
8e6de75b
ED
52static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
53{
041aa658 54 return 0;
8e6de75b
ED
55}
56
57static void dce_virtual_page_flip(struct amdgpu_device *adev,
58 int crtc_id, u64 crtc_base, bool async)
59{
60 return;
61}
62
63static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
64 u32 *vbl, u32 *position)
65{
8e6de75b
ED
66 *vbl = 0;
67 *position = 0;
68
041aa658 69 return -EINVAL;
8e6de75b
ED
70}
71
72static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
73 enum amdgpu_hpd_id hpd)
74{
75 return true;
76}
77
78static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
79 enum amdgpu_hpd_id hpd)
80{
81 return;
82}
83
84static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
85{
86 return 0;
87}
88
8e6de75b
ED
89/**
90 * dce_virtual_bandwidth_update - program display watermarks
91 *
92 * @adev: amdgpu_device pointer
93 *
94 * Calculate and program the display watermarks and line
95 * buffer allocation (CIK).
96 */
97static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
98{
99 return;
100}
101
0d43f3bc 102static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
6d124ff8
DV
103 u16 *green, u16 *blue, uint32_t size,
104 struct drm_modeset_acquire_ctx *ctx)
0d43f3bc 105{
0d43f3bc
ED
106 return 0;
107}
108
109static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
110{
111 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
112
113 drm_crtc_cleanup(crtc);
114 kfree(amdgpu_crtc);
115}
116
c6e14f40
ED
117static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
118 .cursor_set2 = NULL,
119 .cursor_move = NULL,
0d43f3bc 120 .gamma_set = dce_virtual_crtc_gamma_set,
775a8364 121 .set_config = amdgpu_display_crtc_set_config,
0d43f3bc 122 .destroy = dce_virtual_crtc_destroy,
0cd11932 123 .page_flip_target = amdgpu_display_crtc_page_flip_target,
c6e14f40
ED
124};
125
f1f5ef92
ED
126static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
127{
128 struct drm_device *dev = crtc->dev;
129 struct amdgpu_device *adev = dev->dev_private;
130 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
131 unsigned type;
132
ebe0a809
XY
133 if (amdgpu_sriov_vf(adev))
134 return;
135
f1f5ef92
ED
136 switch (mode) {
137 case DRM_MODE_DPMS_ON:
138 amdgpu_crtc->enabled = true;
82b9f817 139 /* Make sure VBLANK interrupts are still enabled */
734dd01d
SL
140 type = amdgpu_display_crtc_idx_to_irq_type(adev,
141 amdgpu_crtc->crtc_id);
f1f5ef92 142 amdgpu_irq_update(adev, &adev->crtc_irq, type);
2d1e331f 143 drm_crtc_vblank_on(crtc);
f1f5ef92
ED
144 break;
145 case DRM_MODE_DPMS_STANDBY:
146 case DRM_MODE_DPMS_SUSPEND:
147 case DRM_MODE_DPMS_OFF:
2d1e331f 148 drm_crtc_vblank_off(crtc);
f1f5ef92
ED
149 amdgpu_crtc->enabled = false;
150 break;
151 }
152}
153
154
155static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
156{
157 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
158}
159
160static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
161{
162 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
163}
164
165static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
166{
167 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
168
169 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
f1f5ef92
ED
170
171 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
172 amdgpu_crtc->encoder = NULL;
173 amdgpu_crtc->connector = NULL;
174}
175
176static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
177 struct drm_display_mode *mode,
178 struct drm_display_mode *adjusted_mode,
179 int x, int y, struct drm_framebuffer *old_fb)
180{
181 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
182
183 /* update the hw version fpr dpm */
184 amdgpu_crtc->hw_mode = *adjusted_mode;
185
186 return 0;
187}
188
189static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
190 const struct drm_display_mode *mode,
191 struct drm_display_mode *adjusted_mode)
192{
f1f5ef92
ED
193 return true;
194}
195
196
197static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
198 struct drm_framebuffer *old_fb)
199{
200 return 0;
201}
202
f1f5ef92
ED
203static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
204 struct drm_framebuffer *fb,
205 int x, int y, enum mode_set_atomic state)
206{
207 return 0;
208}
209
c6e14f40 210static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
f1f5ef92
ED
211 .dpms = dce_virtual_crtc_dpms,
212 .mode_fixup = dce_virtual_crtc_mode_fixup,
213 .mode_set = dce_virtual_crtc_mode_set,
214 .mode_set_base = dce_virtual_crtc_set_base,
215 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
216 .prepare = dce_virtual_crtc_prepare,
217 .commit = dce_virtual_crtc_commit,
f1f5ef92 218 .disable = dce_virtual_crtc_disable,
c6e14f40
ED
219};
220
221static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
222{
223 struct amdgpu_crtc *amdgpu_crtc;
c6e14f40
ED
224
225 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
226 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
227 if (amdgpu_crtc == NULL)
228 return -ENOMEM;
229
230 drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
231
232 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
233 amdgpu_crtc->crtc_id = index;
234 adev->mode_info.crtcs[index] = amdgpu_crtc;
235
c6e14f40
ED
236 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
237 amdgpu_crtc->encoder = NULL;
238 amdgpu_crtc->connector = NULL;
0f66356d 239 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
c6e14f40
ED
240 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
241
242 return 0;
243}
244
245static int dce_virtual_early_init(void *handle)
246{
247 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
248
249 dce_virtual_set_display_funcs(adev);
250 dce_virtual_set_irq_funcs(adev);
251
c6e14f40
ED
252 adev->mode_info.num_hpd = 1;
253 adev->mode_info.num_dig = 1;
254 return 0;
255}
256
66264ba8
AD
257static struct drm_encoder *
258dce_virtual_encoder(struct drm_connector *connector)
c6e14f40 259{
66264ba8
AD
260 struct drm_encoder *encoder;
261 int i;
262
98c0e348 263 drm_connector_for_each_possible_encoder(connector, encoder, i) {
66264ba8
AD
264 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
265 return encoder;
266 }
c6e14f40 267
66264ba8 268 /* pick the first one */
98c0e348
VS
269 drm_connector_for_each_possible_encoder(connector, encoder, i)
270 return encoder;
271
66264ba8
AD
272 return NULL;
273}
274
275static int dce_virtual_get_modes(struct drm_connector *connector)
276{
277 struct drm_device *dev = connector->dev;
278 struct drm_display_mode *mode = NULL;
279 unsigned i;
280 static const struct mode_size {
281 int w;
282 int h;
283 } common_modes[17] = {
284 { 640, 480},
285 { 720, 480},
286 { 800, 600},
287 { 848, 480},
288 {1024, 768},
289 {1152, 768},
290 {1280, 720},
291 {1280, 800},
292 {1280, 854},
293 {1280, 960},
294 {1280, 1024},
295 {1440, 900},
296 {1400, 1050},
297 {1680, 1050},
298 {1600, 1200},
299 {1920, 1080},
300 {1920, 1200}
301 };
302
303 for (i = 0; i < 17; i++) {
304 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
305 drm_mode_probed_add(connector, mode);
306 }
c6e14f40 307
66264ba8
AD
308 return 0;
309}
c6e14f40 310
ba9ca088 311static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
66264ba8
AD
312 struct drm_display_mode *mode)
313{
314 return MODE_OK;
315}
c6e14f40 316
66264ba8
AD
317static int
318dce_virtual_dpms(struct drm_connector *connector, int mode)
319{
320 return 0;
c6e14f40
ED
321}
322
66264ba8
AD
323static int
324dce_virtual_set_property(struct drm_connector *connector,
325 struct drm_property *property,
326 uint64_t val)
327{
328 return 0;
329}
330
331static void dce_virtual_destroy(struct drm_connector *connector)
332{
333 drm_connector_unregister(connector);
334 drm_connector_cleanup(connector);
335 kfree(connector);
336}
337
338static void dce_virtual_force(struct drm_connector *connector)
339{
340 return;
341}
342
343static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
344 .get_modes = dce_virtual_get_modes,
345 .mode_valid = dce_virtual_mode_valid,
346 .best_encoder = dce_virtual_encoder,
347};
348
349static const struct drm_connector_funcs dce_virtual_connector_funcs = {
350 .dpms = dce_virtual_dpms,
66264ba8
AD
351 .fill_modes = drm_helper_probe_single_connector_modes,
352 .set_property = dce_virtual_set_property,
353 .destroy = dce_virtual_destroy,
354 .force = dce_virtual_force,
355};
356
c6e14f40
ED
357static int dce_virtual_sw_init(void *handle)
358{
359 int r, i;
360 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
361
1ffdeca6 362 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
c6e14f40
ED
363 if (r)
364 return r;
365
041aa658
ED
366 adev->ddev->max_vblank_count = 0;
367
c6e14f40
ED
368 adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
369
370 adev->ddev->mode_config.max_width = 16384;
371 adev->ddev->mode_config.max_height = 16384;
372
373 adev->ddev->mode_config.preferred_depth = 24;
374 adev->ddev->mode_config.prefer_shadow = 1;
375
770d13b1 376 adev->ddev->mode_config.fb_base = adev->gmc.aper_base;
c6e14f40 377
3dc9b1ce 378 r = amdgpu_display_modeset_create_props(adev);
c6e14f40
ED
379 if (r)
380 return r;
381
382 adev->ddev->mode_config.max_width = 16384;
383 adev->ddev->mode_config.max_height = 16384;
384
66264ba8 385 /* allocate crtcs, encoders, connectors */
c6e14f40
ED
386 for (i = 0; i < adev->mode_info.num_crtc; i++) {
387 r = dce_virtual_crtc_init(adev, i);
388 if (r)
389 return r;
66264ba8
AD
390 r = dce_virtual_connector_encoder_init(adev, i);
391 if (r)
392 return r;
c6e14f40
ED
393 }
394
c6e14f40
ED
395 drm_kms_helper_poll_init(adev->ddev);
396
397 adev->mode_info.mode_config_initialized = true;
398 return 0;
399}
400
401static int dce_virtual_sw_fini(void *handle)
402{
403 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
404
405 kfree(adev->mode_info.bios_hardcoded_edid);
406
407 drm_kms_helper_poll_fini(adev->ddev);
408
409 drm_mode_config_cleanup(adev->ddev);
129d65c1
ML
410 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
411 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
c6e14f40
ED
412 adev->mode_info.mode_config_initialized = false;
413 return 0;
414}
415
416static int dce_virtual_hw_init(void *handle)
417{
e4f6b39e
AD
418 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
419
420 switch (adev->asic_type) {
421#ifdef CONFIG_DRM_AMDGPU_SI
422 case CHIP_TAHITI:
423 case CHIP_PITCAIRN:
424 case CHIP_VERDE:
425 case CHIP_OLAND:
426 dce_v6_0_disable_dce(adev);
427 break;
428#endif
429#ifdef CONFIG_DRM_AMDGPU_CIK
430 case CHIP_BONAIRE:
431 case CHIP_HAWAII:
432 case CHIP_KAVERI:
433 case CHIP_KABINI:
434 case CHIP_MULLINS:
435 dce_v8_0_disable_dce(adev);
436 break;
437#endif
438 case CHIP_FIJI:
439 case CHIP_TONGA:
440 dce_v10_0_disable_dce(adev);
441 break;
442 case CHIP_CARRIZO:
443 case CHIP_STONEY:
e4f6b39e 444 case CHIP_POLARIS10:
be2c8cde
LL
445 case CHIP_POLARIS11:
446 case CHIP_VEGAM:
e4f6b39e
AD
447 dce_v11_0_disable_dce(adev);
448 break;
449 case CHIP_TOPAZ:
450#ifdef CONFIG_DRM_AMDGPU_SI
451 case CHIP_HAINAN:
452#endif
453 /* no DCE */
454 break;
4a70af40 455 case CHIP_VEGA10:
f79f3fc1 456 case CHIP_VEGA12:
a2c319b6 457 case CHIP_VEGA20:
4a70af40 458 break;
e4f6b39e
AD
459 default:
460 DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n", adev->asic_type);
461 }
c6e14f40
ED
462 return 0;
463}
464
465static int dce_virtual_hw_fini(void *handle)
466{
1719efc2
ML
467 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
468 int i = 0;
469
470 for (i = 0; i<adev->mode_info.num_crtc; i++)
471 if (adev->mode_info.crtcs[i])
472 dce_virtual_set_crtc_vblank_interrupt_state(adev, i, AMDGPU_IRQ_STATE_DISABLE);
473
c6e14f40
ED
474 return 0;
475}
476
477static int dce_virtual_suspend(void *handle)
478{
479 return dce_virtual_hw_fini(handle);
480}
481
482static int dce_virtual_resume(void *handle)
483{
d912adef 484 return dce_virtual_hw_init(handle);
c6e14f40
ED
485}
486
487static bool dce_virtual_is_idle(void *handle)
488{
489 return true;
490}
491
492static int dce_virtual_wait_for_idle(void *handle)
493{
494 return 0;
495}
496
497static int dce_virtual_soft_reset(void *handle)
498{
499 return 0;
500}
501
502static int dce_virtual_set_clockgating_state(void *handle,
503 enum amd_clockgating_state state)
504{
505 return 0;
506}
507
508static int dce_virtual_set_powergating_state(void *handle,
509 enum amd_powergating_state state)
510{
511 return 0;
512}
513
a1255107 514static const struct amd_ip_funcs dce_virtual_ip_funcs = {
c6e14f40
ED
515 .name = "dce_virtual",
516 .early_init = dce_virtual_early_init,
517 .late_init = NULL,
518 .sw_init = dce_virtual_sw_init,
519 .sw_fini = dce_virtual_sw_fini,
520 .hw_init = dce_virtual_hw_init,
521 .hw_fini = dce_virtual_hw_fini,
522 .suspend = dce_virtual_suspend,
523 .resume = dce_virtual_resume,
524 .is_idle = dce_virtual_is_idle,
525 .wait_for_idle = dce_virtual_wait_for_idle,
526 .soft_reset = dce_virtual_soft_reset,
527 .set_clockgating_state = dce_virtual_set_clockgating_state,
528 .set_powergating_state = dce_virtual_set_powergating_state,
529};
530
8e6de75b
ED
531/* these are handled by the primary encoders */
532static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
533{
534 return;
535}
536
537static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
538{
539 return;
540}
541
542static void
543dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
66264ba8
AD
544 struct drm_display_mode *mode,
545 struct drm_display_mode *adjusted_mode)
8e6de75b
ED
546{
547 return;
548}
549
550static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
551{
552 return;
553}
554
555static void
556dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
557{
558 return;
559}
560
561static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
562 const struct drm_display_mode *mode,
563 struct drm_display_mode *adjusted_mode)
564{
8e6de75b
ED
565 return true;
566}
567
568static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
569 .dpms = dce_virtual_encoder_dpms,
570 .mode_fixup = dce_virtual_encoder_mode_fixup,
571 .prepare = dce_virtual_encoder_prepare,
572 .mode_set = dce_virtual_encoder_mode_set,
573 .commit = dce_virtual_encoder_commit,
574 .disable = dce_virtual_encoder_disable,
575};
576
577static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
578{
8e6de75b 579 drm_encoder_cleanup(encoder);
3a1d19a2 580 kfree(encoder);
8e6de75b
ED
581}
582
583static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
584 .destroy = dce_virtual_encoder_destroy,
585};
586
66264ba8
AD
587static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
588 int index)
8e6de75b 589{
8e6de75b 590 struct drm_encoder *encoder;
66264ba8 591 struct drm_connector *connector;
8e6de75b 592
66264ba8
AD
593 /* add a new encoder */
594 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
595 if (!encoder)
596 return -ENOMEM;
597 encoder->possible_crtcs = 1 << index;
598 drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs,
599 DRM_MODE_ENCODER_VIRTUAL, NULL);
600 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
601
602 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
603 if (!connector) {
604 kfree(encoder);
605 return -ENOMEM;
8e6de75b
ED
606 }
607
66264ba8
AD
608 /* add a new connector */
609 drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs,
610 DRM_MODE_CONNECTOR_VIRTUAL);
611 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
612 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
613 connector->interlace_allowed = false;
614 connector->doublescan_allowed = false;
615 drm_connector_register(connector);
8e6de75b 616
66264ba8 617 /* link them */
cde4c44d 618 drm_connector_attach_encoder(connector, encoder);
66264ba8
AD
619
620 return 0;
8e6de75b
ED
621}
622
c6e14f40 623static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
8e6de75b
ED
624 .bandwidth_update = &dce_virtual_bandwidth_update,
625 .vblank_get_counter = &dce_virtual_vblank_get_counter,
c6e14f40
ED
626 .backlight_set_level = NULL,
627 .backlight_get_level = NULL,
8e6de75b
ED
628 .hpd_sense = &dce_virtual_hpd_sense,
629 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
630 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
631 .page_flip = &dce_virtual_page_flip,
632 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
66264ba8
AD
633 .add_encoder = NULL,
634 .add_connector = NULL,
c6e14f40
ED
635};
636
637static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
638{
f54b30d7 639 adev->mode_info.funcs = &dce_virtual_display_funcs;
c6e14f40
ED
640}
641
9405e47d
AD
642static int dce_virtual_pageflip(struct amdgpu_device *adev,
643 unsigned crtc_id)
644{
645 unsigned long flags;
646 struct amdgpu_crtc *amdgpu_crtc;
647 struct amdgpu_flip_work *works;
648
649 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
650
651 if (crtc_id >= adev->mode_info.num_crtc) {
652 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
653 return -EINVAL;
654 }
655
656 /* IRQ could occur when in initial stage */
657 if (amdgpu_crtc == NULL)
658 return 0;
659
660 spin_lock_irqsave(&adev->ddev->event_lock, flags);
661 works = amdgpu_crtc->pflip_works;
662 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
663 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
664 "AMDGPU_FLIP_SUBMITTED(%d)\n",
665 amdgpu_crtc->pflip_status,
666 AMDGPU_FLIP_SUBMITTED);
667 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
668 return 0;
669 }
670
671 /* page flip completed. clean up */
672 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
673 amdgpu_crtc->pflip_works = NULL;
674
675 /* wakeup usersapce */
676 if (works->event)
677 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
678
679 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
680
681 drm_crtc_vblank_put(&amdgpu_crtc->base);
47bbcc1e
ED
682 amdgpu_bo_unref(&works->old_abo);
683 kfree(works->shared);
684 kfree(works);
9405e47d
AD
685
686 return 0;
687}
688
46ac3622
ED
689static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
690{
0f66356d
ED
691 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
692 struct amdgpu_crtc, vblank_timer);
693 struct drm_device *ddev = amdgpu_crtc->base.dev;
694 struct amdgpu_device *adev = ddev->dev_private;
9405e47d 695
0f66356d
ED
696 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
697 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
8b0e1953 698 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
9405e47d
AD
699 HRTIMER_MODE_REL);
700
46ac3622
ED
701 return HRTIMER_NORESTART;
702}
703
e13273d4 704static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
82b9f817
AD
705 int crtc,
706 enum amdgpu_interrupt_state state)
e13273d4 707{
129d65c1 708 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
e13273d4
ED
709 DRM_DEBUG("invalid crtc %d\n", crtc);
710 return;
711 }
46ac3622 712
0f66356d 713 if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
46ac3622 714 DRM_DEBUG("Enable software vsync timer\n");
0f66356d
ED
715 hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer,
716 CLOCK_MONOTONIC, HRTIMER_MODE_REL);
717 hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer,
8b0e1953 718 DCE_VIRTUAL_VBLANK_PERIOD);
0f66356d
ED
719 adev->mode_info.crtcs[crtc]->vblank_timer.function =
720 dce_virtual_vblank_timer_handle;
721 hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer,
8b0e1953 722 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
0f66356d 723 } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
46ac3622 724 DRM_DEBUG("Disable software vsync timer\n");
0f66356d 725 hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer);
46ac3622
ED
726 }
727
0f66356d 728 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
46ac3622 729 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
e13273d4
ED
730}
731
46ac3622 732
e13273d4 733static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
82b9f817
AD
734 struct amdgpu_irq_src *source,
735 unsigned type,
736 enum amdgpu_interrupt_state state)
e13273d4 737{
0f66356d
ED
738 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
739 return -EINVAL;
740
741 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
742
e13273d4
ED
743 return 0;
744}
745
c6e14f40 746static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
e13273d4 747 .set = dce_virtual_set_crtc_irq_state,
bf2335a5 748 .process = NULL,
c6e14f40
ED
749};
750
c6e14f40
ED
751static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
752{
89a6c2ed 753 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
c6e14f40 754 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
c6e14f40
ED
755}
756
a1255107
AD
757const struct amdgpu_ip_block_version dce_virtual_ip_block =
758{
759 .type = AMD_IP_BLOCK_TYPE_DCE,
760 .major = 1,
761 .minor = 0,
762 .rev = 0,
763 .funcs = &dce_virtual_ip_funcs,
764};