2 * Copyright 2014 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include "amdgpu_pm.h"
26 #include "amdgpu_i2c.h"
28 #include "amdgpu_pll.h"
29 #include "amdgpu_connectors.h"
30 #ifdef CONFIG_DRM_AMDGPU_SI
33 #ifdef CONFIG_DRM_AMDGPU_CIK
36 #include "dce_v10_0.h"
37 #include "dce_v11_0.h"
38 #include "dce_virtual.h"
40 #define DCE_VIRTUAL_VBLANK_PERIOD 16666666
43 static void dce_virtual_set_display_funcs(struct amdgpu_device
*adev
);
44 static void dce_virtual_set_irq_funcs(struct amdgpu_device
*adev
);
45 static int dce_virtual_connector_encoder_init(struct amdgpu_device
*adev
,
47 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device
*adev
,
49 enum amdgpu_interrupt_state state
);
52 * dce_virtual_vblank_wait - vblank wait asic callback.
54 * @adev: amdgpu_device pointer
55 * @crtc: crtc to wait for vblank on
57 * Wait for vblank on the requested crtc (evergreen+).
59 static void dce_virtual_vblank_wait(struct amdgpu_device
*adev
, int crtc
)
64 static u32
dce_virtual_vblank_get_counter(struct amdgpu_device
*adev
, int crtc
)
69 static void dce_virtual_page_flip(struct amdgpu_device
*adev
,
70 int crtc_id
, u64 crtc_base
, bool async
)
75 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device
*adev
, int crtc
,
76 u32
*vbl
, u32
*position
)
84 static bool dce_virtual_hpd_sense(struct amdgpu_device
*adev
,
85 enum amdgpu_hpd_id hpd
)
90 static void dce_virtual_hpd_set_polarity(struct amdgpu_device
*adev
,
91 enum amdgpu_hpd_id hpd
)
96 static u32
dce_virtual_hpd_get_gpio_reg(struct amdgpu_device
*adev
)
102 * dce_virtual_bandwidth_update - program display watermarks
104 * @adev: amdgpu_device pointer
106 * Calculate and program the display watermarks and line
107 * buffer allocation (CIK).
109 static void dce_virtual_bandwidth_update(struct amdgpu_device
*adev
)
114 static int dce_virtual_crtc_gamma_set(struct drm_crtc
*crtc
, u16
*red
,
115 u16
*green
, u16
*blue
, uint32_t size
,
116 struct drm_modeset_acquire_ctx
*ctx
)
121 static void dce_virtual_crtc_destroy(struct drm_crtc
*crtc
)
123 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
125 drm_crtc_cleanup(crtc
);
129 static const struct drm_crtc_funcs dce_virtual_crtc_funcs
= {
132 .gamma_set
= dce_virtual_crtc_gamma_set
,
133 .set_config
= amdgpu_crtc_set_config
,
134 .destroy
= dce_virtual_crtc_destroy
,
135 .page_flip_target
= amdgpu_crtc_page_flip_target
,
138 static void dce_virtual_crtc_dpms(struct drm_crtc
*crtc
, int mode
)
140 struct drm_device
*dev
= crtc
->dev
;
141 struct amdgpu_device
*adev
= dev
->dev_private
;
142 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
145 if (amdgpu_sriov_vf(adev
))
149 case DRM_MODE_DPMS_ON
:
150 amdgpu_crtc
->enabled
= true;
151 /* Make sure VBLANK interrupts are still enabled */
152 type
= amdgpu_crtc_idx_to_irq_type(adev
, amdgpu_crtc
->crtc_id
);
153 amdgpu_irq_update(adev
, &adev
->crtc_irq
, type
);
154 drm_crtc_vblank_on(crtc
);
156 case DRM_MODE_DPMS_STANDBY
:
157 case DRM_MODE_DPMS_SUSPEND
:
158 case DRM_MODE_DPMS_OFF
:
159 drm_crtc_vblank_off(crtc
);
160 amdgpu_crtc
->enabled
= false;
166 static void dce_virtual_crtc_prepare(struct drm_crtc
*crtc
)
168 dce_virtual_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
171 static void dce_virtual_crtc_commit(struct drm_crtc
*crtc
)
173 dce_virtual_crtc_dpms(crtc
, DRM_MODE_DPMS_ON
);
176 static void dce_virtual_crtc_disable(struct drm_crtc
*crtc
)
178 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
180 dce_virtual_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
181 if (crtc
->primary
->fb
) {
183 struct amdgpu_framebuffer
*amdgpu_fb
;
184 struct amdgpu_bo
*abo
;
186 amdgpu_fb
= to_amdgpu_framebuffer(crtc
->primary
->fb
);
187 abo
= gem_to_amdgpu_bo(amdgpu_fb
->obj
);
188 r
= amdgpu_bo_reserve(abo
, true);
190 DRM_ERROR("failed to reserve abo before unpin\n");
192 amdgpu_bo_unpin(abo
);
193 amdgpu_bo_unreserve(abo
);
197 amdgpu_crtc
->pll_id
= ATOM_PPLL_INVALID
;
198 amdgpu_crtc
->encoder
= NULL
;
199 amdgpu_crtc
->connector
= NULL
;
202 static int dce_virtual_crtc_mode_set(struct drm_crtc
*crtc
,
203 struct drm_display_mode
*mode
,
204 struct drm_display_mode
*adjusted_mode
,
205 int x
, int y
, struct drm_framebuffer
*old_fb
)
207 struct amdgpu_crtc
*amdgpu_crtc
= to_amdgpu_crtc(crtc
);
209 /* update the hw version fpr dpm */
210 amdgpu_crtc
->hw_mode
= *adjusted_mode
;
215 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc
*crtc
,
216 const struct drm_display_mode
*mode
,
217 struct drm_display_mode
*adjusted_mode
)
223 static int dce_virtual_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
224 struct drm_framebuffer
*old_fb
)
229 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc
*crtc
,
230 struct drm_framebuffer
*fb
,
231 int x
, int y
, enum mode_set_atomic state
)
236 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs
= {
237 .dpms
= dce_virtual_crtc_dpms
,
238 .mode_fixup
= dce_virtual_crtc_mode_fixup
,
239 .mode_set
= dce_virtual_crtc_mode_set
,
240 .mode_set_base
= dce_virtual_crtc_set_base
,
241 .mode_set_base_atomic
= dce_virtual_crtc_set_base_atomic
,
242 .prepare
= dce_virtual_crtc_prepare
,
243 .commit
= dce_virtual_crtc_commit
,
244 .disable
= dce_virtual_crtc_disable
,
247 static int dce_virtual_crtc_init(struct amdgpu_device
*adev
, int index
)
249 struct amdgpu_crtc
*amdgpu_crtc
;
251 amdgpu_crtc
= kzalloc(sizeof(struct amdgpu_crtc
) +
252 (AMDGPUFB_CONN_LIMIT
* sizeof(struct drm_connector
*)), GFP_KERNEL
);
253 if (amdgpu_crtc
== NULL
)
256 drm_crtc_init(adev
->ddev
, &amdgpu_crtc
->base
, &dce_virtual_crtc_funcs
);
258 drm_mode_crtc_set_gamma_size(&amdgpu_crtc
->base
, 256);
259 amdgpu_crtc
->crtc_id
= index
;
260 adev
->mode_info
.crtcs
[index
] = amdgpu_crtc
;
262 amdgpu_crtc
->pll_id
= ATOM_PPLL_INVALID
;
263 amdgpu_crtc
->encoder
= NULL
;
264 amdgpu_crtc
->connector
= NULL
;
265 amdgpu_crtc
->vsync_timer_enabled
= AMDGPU_IRQ_STATE_DISABLE
;
266 drm_crtc_helper_add(&amdgpu_crtc
->base
, &dce_virtual_crtc_helper_funcs
);
271 static int dce_virtual_early_init(void *handle
)
273 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
275 dce_virtual_set_display_funcs(adev
);
276 dce_virtual_set_irq_funcs(adev
);
278 adev
->mode_info
.num_hpd
= 1;
279 adev
->mode_info
.num_dig
= 1;
283 static struct drm_encoder
*
284 dce_virtual_encoder(struct drm_connector
*connector
)
286 int enc_id
= connector
->encoder_ids
[0];
287 struct drm_encoder
*encoder
;
290 for (i
= 0; i
< DRM_CONNECTOR_MAX_ENCODER
; i
++) {
291 if (connector
->encoder_ids
[i
] == 0)
294 encoder
= drm_encoder_find(connector
->dev
, NULL
, connector
->encoder_ids
[i
]);
298 if (encoder
->encoder_type
== DRM_MODE_ENCODER_VIRTUAL
)
302 /* pick the first one */
304 return drm_encoder_find(connector
->dev
, NULL
, enc_id
);
308 static int dce_virtual_get_modes(struct drm_connector
*connector
)
310 struct drm_device
*dev
= connector
->dev
;
311 struct drm_display_mode
*mode
= NULL
;
313 static const struct mode_size
{
316 } common_modes
[17] = {
336 for (i
= 0; i
< 17; i
++) {
337 mode
= drm_cvt_mode(dev
, common_modes
[i
].w
, common_modes
[i
].h
, 60, false, false, false);
338 drm_mode_probed_add(connector
, mode
);
344 static int dce_virtual_mode_valid(struct drm_connector
*connector
,
345 struct drm_display_mode
*mode
)
351 dce_virtual_dpms(struct drm_connector
*connector
, int mode
)
357 dce_virtual_set_property(struct drm_connector
*connector
,
358 struct drm_property
*property
,
364 static void dce_virtual_destroy(struct drm_connector
*connector
)
366 drm_connector_unregister(connector
);
367 drm_connector_cleanup(connector
);
371 static void dce_virtual_force(struct drm_connector
*connector
)
376 static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs
= {
377 .get_modes
= dce_virtual_get_modes
,
378 .mode_valid
= dce_virtual_mode_valid
,
379 .best_encoder
= dce_virtual_encoder
,
382 static const struct drm_connector_funcs dce_virtual_connector_funcs
= {
383 .dpms
= dce_virtual_dpms
,
384 .fill_modes
= drm_helper_probe_single_connector_modes
,
385 .set_property
= dce_virtual_set_property
,
386 .destroy
= dce_virtual_destroy
,
387 .force
= dce_virtual_force
,
390 static int dce_virtual_sw_init(void *handle
)
393 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
395 r
= amdgpu_irq_add_id(adev
, AMDGPU_IH_CLIENTID_LEGACY
, 229, &adev
->crtc_irq
);
399 adev
->ddev
->max_vblank_count
= 0;
401 adev
->ddev
->mode_config
.funcs
= &amdgpu_mode_funcs
;
403 adev
->ddev
->mode_config
.max_width
= 16384;
404 adev
->ddev
->mode_config
.max_height
= 16384;
406 adev
->ddev
->mode_config
.preferred_depth
= 24;
407 adev
->ddev
->mode_config
.prefer_shadow
= 1;
409 adev
->ddev
->mode_config
.fb_base
= adev
->mc
.aper_base
;
411 r
= amdgpu_modeset_create_props(adev
);
415 adev
->ddev
->mode_config
.max_width
= 16384;
416 adev
->ddev
->mode_config
.max_height
= 16384;
418 /* allocate crtcs, encoders, connectors */
419 for (i
= 0; i
< adev
->mode_info
.num_crtc
; i
++) {
420 r
= dce_virtual_crtc_init(adev
, i
);
423 r
= dce_virtual_connector_encoder_init(adev
, i
);
428 drm_kms_helper_poll_init(adev
->ddev
);
430 adev
->mode_info
.mode_config_initialized
= true;
434 static int dce_virtual_sw_fini(void *handle
)
436 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
438 kfree(adev
->mode_info
.bios_hardcoded_edid
);
440 drm_kms_helper_poll_fini(adev
->ddev
);
442 drm_mode_config_cleanup(adev
->ddev
);
443 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
444 memset(adev
->mode_info
.crtcs
, 0, sizeof(adev
->mode_info
.crtcs
[0]) * AMDGPU_MAX_CRTCS
);
445 adev
->mode_info
.mode_config_initialized
= false;
449 static int dce_virtual_hw_init(void *handle
)
451 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
453 switch (adev
->asic_type
) {
454 #ifdef CONFIG_DRM_AMDGPU_SI
459 dce_v6_0_disable_dce(adev
);
462 #ifdef CONFIG_DRM_AMDGPU_CIK
468 dce_v8_0_disable_dce(adev
);
473 dce_v10_0_disable_dce(adev
);
479 dce_v11_0_disable_dce(adev
);
482 #ifdef CONFIG_DRM_AMDGPU_SI
490 DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n", adev
->asic_type
);
495 static int dce_virtual_hw_fini(void *handle
)
497 struct amdgpu_device
*adev
= (struct amdgpu_device
*)handle
;
500 for (i
= 0; i
<adev
->mode_info
.num_crtc
; i
++)
501 if (adev
->mode_info
.crtcs
[i
])
502 dce_virtual_set_crtc_vblank_interrupt_state(adev
, i
, AMDGPU_IRQ_STATE_DISABLE
);
507 static int dce_virtual_suspend(void *handle
)
509 return dce_virtual_hw_fini(handle
);
512 static int dce_virtual_resume(void *handle
)
514 return dce_virtual_hw_init(handle
);
517 static bool dce_virtual_is_idle(void *handle
)
522 static int dce_virtual_wait_for_idle(void *handle
)
527 static int dce_virtual_soft_reset(void *handle
)
532 static int dce_virtual_set_clockgating_state(void *handle
,
533 enum amd_clockgating_state state
)
538 static int dce_virtual_set_powergating_state(void *handle
,
539 enum amd_powergating_state state
)
544 static const struct amd_ip_funcs dce_virtual_ip_funcs
= {
545 .name
= "dce_virtual",
546 .early_init
= dce_virtual_early_init
,
548 .sw_init
= dce_virtual_sw_init
,
549 .sw_fini
= dce_virtual_sw_fini
,
550 .hw_init
= dce_virtual_hw_init
,
551 .hw_fini
= dce_virtual_hw_fini
,
552 .suspend
= dce_virtual_suspend
,
553 .resume
= dce_virtual_resume
,
554 .is_idle
= dce_virtual_is_idle
,
555 .wait_for_idle
= dce_virtual_wait_for_idle
,
556 .soft_reset
= dce_virtual_soft_reset
,
557 .set_clockgating_state
= dce_virtual_set_clockgating_state
,
558 .set_powergating_state
= dce_virtual_set_powergating_state
,
561 /* these are handled by the primary encoders */
562 static void dce_virtual_encoder_prepare(struct drm_encoder
*encoder
)
567 static void dce_virtual_encoder_commit(struct drm_encoder
*encoder
)
573 dce_virtual_encoder_mode_set(struct drm_encoder
*encoder
,
574 struct drm_display_mode
*mode
,
575 struct drm_display_mode
*adjusted_mode
)
580 static void dce_virtual_encoder_disable(struct drm_encoder
*encoder
)
586 dce_virtual_encoder_dpms(struct drm_encoder
*encoder
, int mode
)
591 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder
*encoder
,
592 const struct drm_display_mode
*mode
,
593 struct drm_display_mode
*adjusted_mode
)
598 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs
= {
599 .dpms
= dce_virtual_encoder_dpms
,
600 .mode_fixup
= dce_virtual_encoder_mode_fixup
,
601 .prepare
= dce_virtual_encoder_prepare
,
602 .mode_set
= dce_virtual_encoder_mode_set
,
603 .commit
= dce_virtual_encoder_commit
,
604 .disable
= dce_virtual_encoder_disable
,
607 static void dce_virtual_encoder_destroy(struct drm_encoder
*encoder
)
609 drm_encoder_cleanup(encoder
);
613 static const struct drm_encoder_funcs dce_virtual_encoder_funcs
= {
614 .destroy
= dce_virtual_encoder_destroy
,
617 static int dce_virtual_connector_encoder_init(struct amdgpu_device
*adev
,
620 struct drm_encoder
*encoder
;
621 struct drm_connector
*connector
;
623 /* add a new encoder */
624 encoder
= kzalloc(sizeof(struct drm_encoder
), GFP_KERNEL
);
627 encoder
->possible_crtcs
= 1 << index
;
628 drm_encoder_init(adev
->ddev
, encoder
, &dce_virtual_encoder_funcs
,
629 DRM_MODE_ENCODER_VIRTUAL
, NULL
);
630 drm_encoder_helper_add(encoder
, &dce_virtual_encoder_helper_funcs
);
632 connector
= kzalloc(sizeof(struct drm_connector
), GFP_KERNEL
);
638 /* add a new connector */
639 drm_connector_init(adev
->ddev
, connector
, &dce_virtual_connector_funcs
,
640 DRM_MODE_CONNECTOR_VIRTUAL
);
641 drm_connector_helper_add(connector
, &dce_virtual_connector_helper_funcs
);
642 connector
->display_info
.subpixel_order
= SubPixelHorizontalRGB
;
643 connector
->interlace_allowed
= false;
644 connector
->doublescan_allowed
= false;
645 drm_connector_register(connector
);
648 drm_mode_connector_attach_encoder(connector
, encoder
);
653 static const struct amdgpu_display_funcs dce_virtual_display_funcs
= {
654 .bandwidth_update
= &dce_virtual_bandwidth_update
,
655 .vblank_get_counter
= &dce_virtual_vblank_get_counter
,
656 .vblank_wait
= &dce_virtual_vblank_wait
,
657 .backlight_set_level
= NULL
,
658 .backlight_get_level
= NULL
,
659 .hpd_sense
= &dce_virtual_hpd_sense
,
660 .hpd_set_polarity
= &dce_virtual_hpd_set_polarity
,
661 .hpd_get_gpio_reg
= &dce_virtual_hpd_get_gpio_reg
,
662 .page_flip
= &dce_virtual_page_flip
,
663 .page_flip_get_scanoutpos
= &dce_virtual_crtc_get_scanoutpos
,
665 .add_connector
= NULL
,
668 static void dce_virtual_set_display_funcs(struct amdgpu_device
*adev
)
670 if (adev
->mode_info
.funcs
== NULL
)
671 adev
->mode_info
.funcs
= &dce_virtual_display_funcs
;
674 static int dce_virtual_pageflip(struct amdgpu_device
*adev
,
678 struct amdgpu_crtc
*amdgpu_crtc
;
679 struct amdgpu_flip_work
*works
;
681 amdgpu_crtc
= adev
->mode_info
.crtcs
[crtc_id
];
683 if (crtc_id
>= adev
->mode_info
.num_crtc
) {
684 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id
);
688 /* IRQ could occur when in initial stage */
689 if (amdgpu_crtc
== NULL
)
692 spin_lock_irqsave(&adev
->ddev
->event_lock
, flags
);
693 works
= amdgpu_crtc
->pflip_works
;
694 if (amdgpu_crtc
->pflip_status
!= AMDGPU_FLIP_SUBMITTED
) {
695 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
696 "AMDGPU_FLIP_SUBMITTED(%d)\n",
697 amdgpu_crtc
->pflip_status
,
698 AMDGPU_FLIP_SUBMITTED
);
699 spin_unlock_irqrestore(&adev
->ddev
->event_lock
, flags
);
703 /* page flip completed. clean up */
704 amdgpu_crtc
->pflip_status
= AMDGPU_FLIP_NONE
;
705 amdgpu_crtc
->pflip_works
= NULL
;
707 /* wakeup usersapce */
709 drm_crtc_send_vblank_event(&amdgpu_crtc
->base
, works
->event
);
711 spin_unlock_irqrestore(&adev
->ddev
->event_lock
, flags
);
713 drm_crtc_vblank_put(&amdgpu_crtc
->base
);
714 schedule_work(&works
->unpin_work
);
719 static enum hrtimer_restart
dce_virtual_vblank_timer_handle(struct hrtimer
*vblank_timer
)
721 struct amdgpu_crtc
*amdgpu_crtc
= container_of(vblank_timer
,
722 struct amdgpu_crtc
, vblank_timer
);
723 struct drm_device
*ddev
= amdgpu_crtc
->base
.dev
;
724 struct amdgpu_device
*adev
= ddev
->dev_private
;
726 drm_handle_vblank(ddev
, amdgpu_crtc
->crtc_id
);
727 dce_virtual_pageflip(adev
, amdgpu_crtc
->crtc_id
);
728 hrtimer_start(vblank_timer
, DCE_VIRTUAL_VBLANK_PERIOD
,
731 return HRTIMER_NORESTART
;
734 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device
*adev
,
736 enum amdgpu_interrupt_state state
)
738 if (crtc
>= adev
->mode_info
.num_crtc
|| !adev
->mode_info
.crtcs
[crtc
]) {
739 DRM_DEBUG("invalid crtc %d\n", crtc
);
743 if (state
&& !adev
->mode_info
.crtcs
[crtc
]->vsync_timer_enabled
) {
744 DRM_DEBUG("Enable software vsync timer\n");
745 hrtimer_init(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
,
746 CLOCK_MONOTONIC
, HRTIMER_MODE_REL
);
747 hrtimer_set_expires(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
,
748 DCE_VIRTUAL_VBLANK_PERIOD
);
749 adev
->mode_info
.crtcs
[crtc
]->vblank_timer
.function
=
750 dce_virtual_vblank_timer_handle
;
751 hrtimer_start(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
,
752 DCE_VIRTUAL_VBLANK_PERIOD
, HRTIMER_MODE_REL
);
753 } else if (!state
&& adev
->mode_info
.crtcs
[crtc
]->vsync_timer_enabled
) {
754 DRM_DEBUG("Disable software vsync timer\n");
755 hrtimer_cancel(&adev
->mode_info
.crtcs
[crtc
]->vblank_timer
);
758 adev
->mode_info
.crtcs
[crtc
]->vsync_timer_enabled
= state
;
759 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc
, state
);
763 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device
*adev
,
764 struct amdgpu_irq_src
*source
,
766 enum amdgpu_interrupt_state state
)
768 if (type
> AMDGPU_CRTC_IRQ_VBLANK6
)
771 dce_virtual_set_crtc_vblank_interrupt_state(adev
, type
, state
);
776 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs
= {
777 .set
= dce_virtual_set_crtc_irq_state
,
781 static void dce_virtual_set_irq_funcs(struct amdgpu_device
*adev
)
783 adev
->crtc_irq
.num_types
= AMDGPU_CRTC_IRQ_VBLANK6
+ 1;
784 adev
->crtc_irq
.funcs
= &dce_virtual_crtc_irq_funcs
;
787 const struct amdgpu_ip_block_version dce_virtual_ip_block
=
789 .type
= AMD_IP_BLOCK_TYPE_DCE
,
793 .funcs
= &dce_virtual_ip_funcs
,