2 * Copyright 2014 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include "amdgpu_pm.h"
26 #include "amdgpu_i2c.h"
28 #include "amdgpu_pll.h"
29 #include "amdgpu_connectors.h"
30 #ifdef CONFIG_DRM_AMDGPU_SI
33 #ifdef CONFIG_DRM_AMDGPU_CIK
36 #include "dce_v10_0.h"
37 #include "dce_v11_0.h"
38 #include "dce_virtual.h"
40 #define DCE_VIRTUAL_VBLANK_PERIOD 16666666
43 static void dce_virtual_set_display_funcs(struct amdgpu_device
*adev
);
44 static void dce_virtual_set_irq_funcs(struct amdgpu_device
*adev
);
45 static int dce_virtual_connector_encoder_init(struct amdgpu_device
*adev
,
49 * dce_virtual_vblank_wait - vblank wait asic callback.
51 * @adev: amdgpu_device pointer
52 * @crtc: crtc to wait for vblank on
54 * Wait for vblank on the requested crtc (evergreen+).
56 static void dce_virtual_vblank_wait(struct amdgpu_device
*adev
, int crtc
)
61 static u32
dce_virtual_vblank_get_counter(struct amdgpu_device
*adev
, int crtc
)
66 static void dce_virtual_page_flip(struct amdgpu_device
*adev
,
67 int crtc_id
, u64 crtc_base
, bool async
)
72 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device
*adev
, int crtc
,
73 u32
*vbl
, u32
*position
)
81 static bool dce_virtual_hpd_sense(struct amdgpu_device
*adev
,
82 enum amdgpu_hpd_id hpd
)
87 static void dce_virtual_hpd_set_polarity(struct amdgpu_device
*adev
,
88 enum amdgpu_hpd_id hpd
)
93 static u32
dce_virtual_hpd_get_gpio_reg(struct amdgpu_device
*adev
)
98 static void dce_virtual_stop_mc_access(struct amdgpu_device
*adev
,
99 struct amdgpu_mode_mc_save
*save
)
101 switch (adev
->asic_type
) {
102 #ifdef CONFIG_DRM_AMDGPU_SI
107 dce_v6_0_disable_dce(adev
);
110 #ifdef CONFIG_DRM_AMDGPU_CIK
116 dce_v8_0_disable_dce(adev
);
121 dce_v10_0_disable_dce(adev
);
128 dce_v11_0_disable_dce(adev
);
131 #ifdef CONFIG_DRM_AMDGPU_SI
137 DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n", adev
->asic_type
);
142 static void dce_virtual_resume_mc_access(struct amdgpu_device
*adev
,
143 struct amdgpu_mode_mc_save
*save
)
148 static void dce_virtual_set_vga_render_state(struct amdgpu_device
*adev
,
155 * dce_virtual_bandwidth_update - program display watermarks
157 * @adev: amdgpu_device pointer
159 * Calculate and program the display watermarks and line
160 * buffer allocation (CIK).
162 static void dce_virtual_bandwidth_update(struct amdgpu_device
*adev
)
167 static int dce_virtual_crtc_gamma_set(struct drm_crtc
*crtc
, u16
*red
,
168 u16
*green
, u16
*blue
, uint32_t size
,
169 struct drm_modeset_acquire_ctx
*ctx
)
171 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
174 /* userspace palettes are always correct as is */
175 for (i
= 0; i
< size
; i
++) {
176 amdgpu_crtc
->lut_r
[i
] = red
[i
] >> 6;
177 amdgpu_crtc
->lut_g
[i
] = green
[i
] >> 6;
178 amdgpu_crtc
->lut_b
[i
] = blue
[i
] >> 6;
184 static void dce_virtual_crtc_destroy(struct drm_crtc
*crtc
)
186 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
188 drm_crtc_cleanup(crtc
);
192 static const struct drm_crtc_funcs dce_virtual_crtc_funcs
= {
195 .gamma_set
= dce_virtual_crtc_gamma_set
,
196 .set_config
= amdgpu_crtc_set_config
,
197 .destroy
= dce_virtual_crtc_destroy
,
198 .page_flip_target
= amdgpu_crtc_page_flip_target
,
201 static void dce_virtual_crtc_dpms(struct drm_crtc
*crtc
, int mode
)
203 struct drm_device
*dev
= crtc
->dev
;
204 struct amdgpu_device
*adev
= dev
->dev_private
;
205 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
208 if (amdgpu_sriov_vf(adev
))
212 case DRM_MODE_DPMS_ON
:
213 amdgpu_crtc
->enabled
= true;
214 /* Make sure VBLANK interrupts are still enabled */
215 type
= amdgpu_crtc_idx_to_irq_type(adev
, amdgpu_crtc
->crtc_id
);
216 amdgpu_irq_update(adev
, &adev
->crtc_irq
, type
);
217 drm_crtc_vblank_on(crtc
);
219 case DRM_MODE_DPMS_STANDBY
:
220 case DRM_MODE_DPMS_SUSPEND
:
221 case DRM_MODE_DPMS_OFF
:
222 drm_crtc_vblank_off(crtc
);
223 amdgpu_crtc
->enabled
= false;
229 static void dce_virtual_crtc_prepare(struct drm_crtc
*crtc
)
231 dce_virtual_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
234 static void dce_virtual_crtc_commit(struct drm_crtc
*crtc
)
236 dce_virtual_crtc_dpms(crtc
, DRM_MODE_DPMS_ON
);
239 static void dce_virtual_crtc_disable(struct drm_crtc
*crtc
)
241 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
243 dce_virtual_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
244 if (crtc
->primary
->fb
) {
246 struct amdgpu_framebuffer
*amdgpu_fb
;
247 struct amdgpu_bo
*abo
;
249 amdgpu_fb
= to_amdgpu_framebuffer(crtc
->primary
->fb
);
250 abo
= gem_to_amdgpu_bo(amdgpu_fb
->obj
);
251 r
= amdgpu_bo_reserve(abo
, true);
253 DRM_ERROR("failed to reserve abo before unpin\n");
255 amdgpu_bo_unpin(abo
);
256 amdgpu_bo_unreserve(abo
);
260 amdgpu_crtc
->pll_id
= ATOM_PPLL_INVALID
;
261 amdgpu_crtc
->encoder
= NULL
;
262 amdgpu_crtc
->connector
= NULL
;
265 static int dce_virtual_crtc_mode_set(struct drm_crtc
*crtc
,
266 struct drm_display_mode
*mode
,
267 struct drm_display_mode
*adjusted_mode
,
268 int x
, int y
, struct drm_framebuffer
*old_fb
)
270 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
272 /* update the hw version fpr dpm */
273 amdgpu_crtc
->hw_mode
= *adjusted_mode
;
278 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc
*crtc
,
279 const struct drm_display_mode
*mode
,
280 struct drm_display_mode
*adjusted_mode
)
286 static int dce_virtual_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
287 struct drm_framebuffer
*old_fb
)
292 static void dce_virtual_crtc_load_lut(struct drm_crtc
*crtc
)
297 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc
*crtc
,
298 struct drm_framebuffer
*fb
,
299 int x
, int y
, enum mode_set_atomic state
)
304 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs
= {
305 .dpms
= dce_virtual_crtc_dpms
,
306 .mode_fixup
= dce_virtual_crtc_mode_fixup
,
307 .mode_set
= dce_virtual_crtc_mode_set
,
308 .mode_set_base
= dce_virtual_crtc_set_base
,
309 .mode_set_base_atomic
= dce_virtual_crtc_set_base_atomic
,
310 .prepare
= dce_virtual_crtc_prepare
,
311 .commit
= dce_virtual_crtc_commit
,
312 .load_lut
= dce_virtual_crtc_load_lut
,
313 .disable
= dce_virtual_crtc_disable
,
316 static int dce_virtual_crtc_init(struct amdgpu_device
*adev
, int index
)
318 struct amdgpu_crtc
*amdgpu_crtc
;
321 amdgpu_crtc
= kzalloc(sizeof(struct amdgpu_crtc
) +
322 (AMDGPUFB_CONN_LIMIT
* sizeof(struct drm_connector
*)), GFP_KERNEL
);
323 if (amdgpu_crtc
== NULL
)
326 drm_crtc_init(adev
->ddev
, &amdgpu_crtc
->base
, &dce_virtual_crtc_funcs
);
328 drm_mode_crtc_set_gamma_size(&amdgpu_crtc
->base
, 256);
329 amdgpu_crtc
->crtc_id
= index
;
330 adev
->mode_info
.crtcs
[index
] = amdgpu_crtc
;
332 for (i
= 0; i
< 256; i
++) {
333 amdgpu_crtc
->lut_r
[i
] = i
<< 2;
334 amdgpu_crtc
->lut_g
[i
] = i
<< 2;
335 amdgpu_crtc
->lut_b
[i
] = i
<< 2;
338 amdgpu_crtc
->pll_id
= ATOM_PPLL_INVALID
;
339 amdgpu_crtc
->encoder
= NULL
;
340 amdgpu_crtc
->connector
= NULL
;
341 amdgpu_crtc
->vsync_timer_enabled
= AMDGPU_IRQ_STATE_DISABLE
;
342 drm_crtc_helper_add(&amdgpu_crtc
->base
, &dce_virtual_crtc_helper_funcs
);
347 static int dce_virtual_early_init(void *handle
)
349 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
351 dce_virtual_set_display_funcs(adev
);
352 dce_virtual_set_irq_funcs(adev
);
354 adev
->mode_info
.num_hpd
= 1;
355 adev
->mode_info
.num_dig
= 1;
359 static struct drm_encoder
*
360 dce_virtual_encoder(struct drm_connector
*connector
)
362 int enc_id
= connector
->encoder_ids
[0];
363 struct drm_encoder
*encoder
;
366 for (i
= 0; i
< DRM_CONNECTOR_MAX_ENCODER
; i
++) {
367 if (connector
->encoder_ids
[i
] == 0)
370 encoder
= drm_encoder_find(connector
->dev
, connector
->encoder_ids
[i
]);
374 if (encoder
->encoder_type
== DRM_MODE_ENCODER_VIRTUAL
)
378 /* pick the first one */
380 return drm_encoder_find(connector
->dev
, enc_id
);
384 static int dce_virtual_get_modes(struct drm_connector
*connector
)
386 struct drm_device
*dev
= connector
->dev
;
387 struct drm_display_mode
*mode
= NULL
;
389 static const struct mode_size
{
392 } common_modes
[17] = {
412 for (i
= 0; i
< 17; i
++) {
413 mode
= drm_cvt_mode(dev
, common_modes
[i
].w
, common_modes
[i
].h
, 60, false, false, false);
414 drm_mode_probed_add(connector
, mode
);
420 static int dce_virtual_mode_valid(struct drm_connector
*connector
,
421 struct drm_display_mode
*mode
)
427 dce_virtual_dpms(struct drm_connector
*connector
, int mode
)
433 dce_virtual_set_property(struct drm_connector
*connector
,
434 struct drm_property
*property
,
440 static void dce_virtual_destroy(struct drm_connector
*connector
)
442 drm_connector_unregister(connector
);
443 drm_connector_cleanup(connector
);
447 static void dce_virtual_force(struct drm_connector
*connector
)
452 static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs
= {
453 .get_modes
= dce_virtual_get_modes
,
454 .mode_valid
= dce_virtual_mode_valid
,
455 .best_encoder
= dce_virtual_encoder
,
458 static const struct drm_connector_funcs dce_virtual_connector_funcs
= {
459 .dpms
= dce_virtual_dpms
,
460 .fill_modes
= drm_helper_probe_single_connector_modes
,
461 .set_property
= dce_virtual_set_property
,
462 .destroy
= dce_virtual_destroy
,
463 .force
= dce_virtual_force
,
466 static int dce_virtual_sw_init(void *handle
)
469 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
471 r
= amdgpu_irq_add_id(adev
, AMDGPU_IH_CLIENTID_LEGACY
, 229, &adev
->crtc_irq
);
475 adev
->ddev
->max_vblank_count
= 0;
477 adev
->ddev
->mode_config
.funcs
= &amdgpu_mode_funcs
;
479 adev
->ddev
->mode_config
.max_width
= 16384;
480 adev
->ddev
->mode_config
.max_height
= 16384;
482 adev
->ddev
->mode_config
.preferred_depth
= 24;
483 adev
->ddev
->mode_config
.prefer_shadow
= 1;
485 adev
->ddev
->mode_config
.fb_base
= adev
->mc
.aper_base
;
487 r
= amdgpu_modeset_create_props(adev
);
491 adev
->ddev
->mode_config
.max_width
= 16384;
492 adev
->ddev
->mode_config
.max_height
= 16384;
494 /* allocate crtcs, encoders, connectors */
495 for (i
= 0; i
< adev
->mode_info
.num_crtc
; i
++) {
496 r
= dce_virtual_crtc_init(adev
, i
);
499 r
= dce_virtual_connector_encoder_init(adev
, i
);
504 drm_kms_helper_poll_init(adev
->ddev
);
506 adev
->mode_info
.mode_config_initialized
= true;
510 static int dce_virtual_sw_fini(void *handle
)
512 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
514 kfree(adev
->mode_info
.bios_hardcoded_edid
);
516 drm_kms_helper_poll_fini(adev
->ddev
);
518 drm_mode_config_cleanup(adev
->ddev
);
519 adev
->mode_info
.mode_config_initialized
= false;
523 static int dce_virtual_hw_init(void *handle
)
528 static int dce_virtual_hw_fini(void *handle
)
533 static int dce_virtual_suspend(void *handle
)
535 return dce_virtual_hw_fini(handle
);
538 static int dce_virtual_resume(void *handle
)
540 return dce_virtual_hw_init(handle
);
543 static bool dce_virtual_is_idle(void *handle
)
548 static int dce_virtual_wait_for_idle(void *handle
)
553 static int dce_virtual_soft_reset(void *handle
)
558 static int dce_virtual_set_clockgating_state(void *handle
,
559 enum amd_clockgating_state state
)
564 static int dce_virtual_set_powergating_state(void *handle
,
565 enum amd_powergating_state state
)
570 static const struct amd_ip_funcs dce_virtual_ip_funcs
= {
571 .name
= "dce_virtual",
572 .early_init
= dce_virtual_early_init
,
574 .sw_init
= dce_virtual_sw_init
,
575 .sw_fini
= dce_virtual_sw_fini
,
576 .hw_init
= dce_virtual_hw_init
,
577 .hw_fini
= dce_virtual_hw_fini
,
578 .suspend
= dce_virtual_suspend
,
579 .resume
= dce_virtual_resume
,
580 .is_idle
= dce_virtual_is_idle
,
581 .wait_for_idle
= dce_virtual_wait_for_idle
,
582 .soft_reset
= dce_virtual_soft_reset
,
583 .set_clockgating_state
= dce_virtual_set_clockgating_state
,
584 .set_powergating_state
= dce_virtual_set_powergating_state
,
587 /* these are handled by the primary encoders */
588 static void dce_virtual_encoder_prepare(struct drm_encoder
*encoder
)
593 static void dce_virtual_encoder_commit(struct drm_encoder
*encoder
)
599 dce_virtual_encoder_mode_set(struct drm_encoder
*encoder
,
600 struct drm_display_mode
*mode
,
601 struct drm_display_mode
*adjusted_mode
)
606 static void dce_virtual_encoder_disable(struct drm_encoder
*encoder
)
612 dce_virtual_encoder_dpms(struct drm_encoder
*encoder
, int mode
)
617 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder
*encoder
,
618 const struct drm_display_mode
*mode
,
619 struct drm_display_mode
*adjusted_mode
)
624 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs
= {
625 .dpms
= dce_virtual_encoder_dpms
,
626 .mode_fixup
= dce_virtual_encoder_mode_fixup
,
627 .prepare
= dce_virtual_encoder_prepare
,
628 .mode_set
= dce_virtual_encoder_mode_set
,
629 .commit
= dce_virtual_encoder_commit
,
630 .disable
= dce_virtual_encoder_disable
,
633 static void dce_virtual_encoder_destroy(struct drm_encoder
*encoder
)
635 drm_encoder_cleanup(encoder
);
639 static const struct drm_encoder_funcs dce_virtual_encoder_funcs
= {
640 .destroy
= dce_virtual_encoder_destroy
,
643 static int dce_virtual_connector_encoder_init(struct amdgpu_device
*adev
,
646 struct drm_encoder
*encoder
;
647 struct drm_connector
*connector
;
649 /* add a new encoder */
650 encoder
= kzalloc(sizeof(struct drm_encoder
), GFP_KERNEL
);
653 encoder
->possible_crtcs
= 1 << index
;
654 drm_encoder_init(adev
->ddev
, encoder
, &dce_virtual_encoder_funcs
,
655 DRM_MODE_ENCODER_VIRTUAL
, NULL
);
656 drm_encoder_helper_add(encoder
, &dce_virtual_encoder_helper_funcs
);
658 connector
= kzalloc(sizeof(struct drm_connector
), GFP_KERNEL
);
664 /* add a new connector */
665 drm_connector_init(adev
->ddev
, connector
, &dce_virtual_connector_funcs
,
666 DRM_MODE_CONNECTOR_VIRTUAL
);
667 drm_connector_helper_add(connector
, &dce_virtual_connector_helper_funcs
);
668 connector
->display_info
.subpixel_order
= SubPixelHorizontalRGB
;
669 connector
->interlace_allowed
= false;
670 connector
->doublescan_allowed
= false;
671 drm_connector_register(connector
);
674 drm_mode_connector_attach_encoder(connector
, encoder
);
679 static const struct amdgpu_display_funcs dce_virtual_display_funcs
= {
680 .set_vga_render_state
= &dce_virtual_set_vga_render_state
,
681 .bandwidth_update
= &dce_virtual_bandwidth_update
,
682 .vblank_get_counter
= &dce_virtual_vblank_get_counter
,
683 .vblank_wait
= &dce_virtual_vblank_wait
,
684 .backlight_set_level
= NULL
,
685 .backlight_get_level
= NULL
,
686 .hpd_sense
= &dce_virtual_hpd_sense
,
687 .hpd_set_polarity
= &dce_virtual_hpd_set_polarity
,
688 .hpd_get_gpio_reg
= &dce_virtual_hpd_get_gpio_reg
,
689 .page_flip
= &dce_virtual_page_flip
,
690 .page_flip_get_scanoutpos
= &dce_virtual_crtc_get_scanoutpos
,
692 .add_connector
= NULL
,
693 .stop_mc_access
= &dce_virtual_stop_mc_access
,
694 .resume_mc_access
= &dce_virtual_resume_mc_access
,
697 static void dce_virtual_set_display_funcs(struct amdgpu_device
*adev
)
699 if (adev
->mode_info
.funcs
== NULL
)
700 adev
->mode_info
.funcs
= &dce_virtual_display_funcs
;
703 static int dce_virtual_pageflip(struct amdgpu_device
*adev
,
707 struct amdgpu_crtc
*amdgpu_crtc
;
708 struct amdgpu_flip_work
*works
;
710 amdgpu_crtc
= adev
->mode_info
.crtcs
[crtc_id
];
712 if (crtc_id
>= adev
->mode_info
.num_crtc
) {
713 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id
);
717 /* IRQ could occur when in initial stage */
718 if (amdgpu_crtc
== NULL
)
721 spin_lock_irqsave(&adev
->ddev
->event_lock
, flags
);
722 works
= amdgpu_crtc
->pflip_works
;
723 if (amdgpu_crtc
->pflip_status
!= AMDGPU_FLIP_SUBMITTED
) {
724 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
725 "AMDGPU_FLIP_SUBMITTED(%d)\n",
726 amdgpu_crtc
->pflip_status
,
727 AMDGPU_FLIP_SUBMITTED
);
728 spin_unlock_irqrestore(&adev
->ddev
->event_lock
, flags
);
732 /* page flip completed. clean up */
733 amdgpu_crtc
->pflip_status
= AMDGPU_FLIP_NONE
;
734 amdgpu_crtc
->pflip_works
= NULL
;
736 /* wakeup usersapce */
738 drm_crtc_send_vblank_event(&amdgpu_crtc
->base
, works
->event
);
740 spin_unlock_irqrestore(&adev
->ddev
->event_lock
, flags
);
742 drm_crtc_vblank_put(&amdgpu_crtc
->base
);
743 schedule_work(&works
->unpin_work
);
748 static enum hrtimer_restart
dce_virtual_vblank_timer_handle(struct hrtimer
*vblank_timer
)
750 struct amdgpu_crtc
*amdgpu_crtc
= container_of(vblank_timer
,
751 struct amdgpu_crtc
, vblank_timer
);
752 struct drm_device
*ddev
= amdgpu_crtc
->base
.dev
;
753 struct amdgpu_device
*adev
= ddev
->dev_private
;
755 drm_handle_vblank(ddev
, amdgpu_crtc
->crtc_id
);
756 dce_virtual_pageflip(adev
, amdgpu_crtc
->crtc_id
);
757 hrtimer_start(vblank_timer
, DCE_VIRTUAL_VBLANK_PERIOD
,
760 return HRTIMER_NORESTART
;
763 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device
*adev
,
765 enum amdgpu_interrupt_state state
)
767 if (crtc
>= adev
->mode_info
.num_crtc
) {
768 DRM_DEBUG("invalid crtc %d\n", crtc
);
772 if (state
&& !adev
->mode_info
.crtcs
[crtc
]->vsync_timer_enabled
) {
773 DRM_DEBUG("Enable software vsync timer\n");
774 hrtimer_init(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
,
775 CLOCK_MONOTONIC
, HRTIMER_MODE_REL
);
776 hrtimer_set_expires(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
,
777 DCE_VIRTUAL_VBLANK_PERIOD
);
778 adev
->mode_info
.crtcs
[crtc
]->vblank_timer
.function
=
779 dce_virtual_vblank_timer_handle
;
780 hrtimer_start(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
,
781 DCE_VIRTUAL_VBLANK_PERIOD
, HRTIMER_MODE_REL
);
782 } else if (!state
&& adev
->mode_info
.crtcs
[crtc
]->vsync_timer_enabled
) {
783 DRM_DEBUG("Disable software vsync timer\n");
784 hrtimer_cancel(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
);
787 adev
->mode_info
.crtcs
[crtc
]->vsync_timer_enabled
= state
;
788 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc
, state
);
792 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device
*adev
,
793 struct amdgpu_irq_src
*source
,
795 enum amdgpu_interrupt_state state
)
797 if (type
> AMDGPU_CRTC_IRQ_VBLANK6
)
800 dce_virtual_set_crtc_vblank_interrupt_state(adev
, type
, state
);
805 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs
= {
806 .set
= dce_virtual_set_crtc_irq_state
,
810 static void dce_virtual_set_irq_funcs(struct amdgpu_device
*adev
)
812 adev
->crtc_irq
.num_types
= AMDGPU_CRTC_IRQ_LAST
;
813 adev
->crtc_irq
.funcs
= &dce_virtual_crtc_irq_funcs
;
816 const struct amdgpu_ip_block_version dce_virtual_ip_block
=
818 .type
= AMD_IP_BLOCK_TYPE_DCE
,
822 .funcs
= &dce_virtual_ip_funcs
,