1 // SPDX-License-Identifier: GPL-2.0+
3 #include <drm/drm_atomic.h>
4 #include <drm/drm_atomic_helper.h>
5 #include <drm/drm_probe_helper.h>
6 #include <drm/drm_vblank.h>
10 static enum hrtimer_restart
vkms_vblank_simulate(struct hrtimer
*timer
)
12 struct vkms_output
*output
= container_of(timer
, struct vkms_output
,
14 struct drm_crtc
*crtc
= &output
->crtc
;
15 struct vkms_crtc_state
*state
;
19 ret_overrun
= hrtimer_forward_now(&output
->vblank_hrtimer
,
21 WARN_ON(ret_overrun
!= 1);
23 spin_lock(&output
->lock
);
24 ret
= drm_crtc_handle_vblank(crtc
);
26 DRM_ERROR("vkms failure on handling vblank");
28 state
= output
->composer_state
;
29 spin_unlock(&output
->lock
);
31 if (state
&& output
->composer_enabled
) {
32 u64 frame
= drm_crtc_accurate_vblank_count(crtc
);
34 /* update frame_start only if a queued vkms_composer_worker()
37 spin_lock(&output
->composer_lock
);
38 if (!state
->crc_pending
)
39 state
->frame_start
= frame
;
41 DRM_DEBUG_DRIVER("crc worker falling behind, frame_start: %llu, frame_end: %llu\n",
42 state
->frame_start
, frame
);
43 state
->frame_end
= frame
;
44 state
->crc_pending
= true;
45 spin_unlock(&output
->composer_lock
);
47 ret
= queue_work(output
->composer_workq
, &state
->composer_work
);
49 DRM_DEBUG_DRIVER("Composer worker already queued\n");
52 return HRTIMER_RESTART
;
55 static int vkms_enable_vblank(struct drm_crtc
*crtc
)
57 struct drm_device
*dev
= crtc
->dev
;
58 unsigned int pipe
= drm_crtc_index(crtc
);
59 struct drm_vblank_crtc
*vblank
= &dev
->vblank
[pipe
];
60 struct vkms_output
*out
= drm_crtc_to_vkms_output(crtc
);
62 drm_calc_timestamping_constants(crtc
, &crtc
->mode
);
64 hrtimer_init(&out
->vblank_hrtimer
, CLOCK_MONOTONIC
, HRTIMER_MODE_REL
);
65 out
->vblank_hrtimer
.function
= &vkms_vblank_simulate
;
66 out
->period_ns
= ktime_set(0, vblank
->framedur_ns
);
67 hrtimer_start(&out
->vblank_hrtimer
, out
->period_ns
, HRTIMER_MODE_REL
);
72 static void vkms_disable_vblank(struct drm_crtc
*crtc
)
74 struct vkms_output
*out
= drm_crtc_to_vkms_output(crtc
);
76 hrtimer_cancel(&out
->vblank_hrtimer
);
79 static bool vkms_get_vblank_timestamp(struct drm_crtc
*crtc
,
80 int *max_error
, ktime_t
*vblank_time
,
83 struct drm_device
*dev
= crtc
->dev
;
84 unsigned int pipe
= crtc
->index
;
85 struct vkms_device
*vkmsdev
= drm_device_to_vkms_device(dev
);
86 struct vkms_output
*output
= &vkmsdev
->output
;
87 struct drm_vblank_crtc
*vblank
= &dev
->vblank
[pipe
];
89 if (!READ_ONCE(vblank
->enabled
)) {
90 *vblank_time
= ktime_get();
94 *vblank_time
= READ_ONCE(output
->vblank_hrtimer
.node
.expires
);
96 if (WARN_ON(*vblank_time
== vblank
->time
))
100 * To prevent races we roll the hrtimer forward before we do any
101 * interrupt processing - this is how real hw works (the interrupt is
102 * only generated after all the vblank registers are updated) and what
103 * the vblank core expects. Therefore we need to always correct the
104 * timestampe by one frame.
106 *vblank_time
-= output
->period_ns
;
111 static struct drm_crtc_state
*
112 vkms_atomic_crtc_duplicate_state(struct drm_crtc
*crtc
)
114 struct vkms_crtc_state
*vkms_state
;
116 if (WARN_ON(!crtc
->state
))
119 vkms_state
= kzalloc(sizeof(*vkms_state
), GFP_KERNEL
);
123 __drm_atomic_helper_crtc_duplicate_state(crtc
, &vkms_state
->base
);
125 INIT_WORK(&vkms_state
->composer_work
, vkms_composer_worker
);
127 return &vkms_state
->base
;
130 static void vkms_atomic_crtc_destroy_state(struct drm_crtc
*crtc
,
131 struct drm_crtc_state
*state
)
133 struct vkms_crtc_state
*vkms_state
= to_vkms_crtc_state(state
);
135 __drm_atomic_helper_crtc_destroy_state(state
);
137 WARN_ON(work_pending(&vkms_state
->composer_work
));
138 kfree(vkms_state
->active_planes
);
142 static void vkms_atomic_crtc_reset(struct drm_crtc
*crtc
)
144 struct vkms_crtc_state
*vkms_state
=
145 kzalloc(sizeof(*vkms_state
), GFP_KERNEL
);
148 vkms_atomic_crtc_destroy_state(crtc
, crtc
->state
);
150 __drm_atomic_helper_crtc_reset(crtc
, &vkms_state
->base
);
152 INIT_WORK(&vkms_state
->composer_work
, vkms_composer_worker
);
155 static const struct drm_crtc_funcs vkms_crtc_funcs
= {
156 .set_config
= drm_atomic_helper_set_config
,
157 .destroy
= drm_crtc_cleanup
,
158 .page_flip
= drm_atomic_helper_page_flip
,
159 .reset
= vkms_atomic_crtc_reset
,
160 .atomic_duplicate_state
= vkms_atomic_crtc_duplicate_state
,
161 .atomic_destroy_state
= vkms_atomic_crtc_destroy_state
,
162 .enable_vblank
= vkms_enable_vblank
,
163 .disable_vblank
= vkms_disable_vblank
,
164 .get_vblank_timestamp
= vkms_get_vblank_timestamp
,
165 .get_crc_sources
= vkms_get_crc_sources
,
166 .set_crc_source
= vkms_set_crc_source
,
167 .verify_crc_source
= vkms_verify_crc_source
,
170 static int vkms_crtc_atomic_check(struct drm_crtc
*crtc
,
171 struct drm_atomic_state
*state
)
173 struct drm_crtc_state
*crtc_state
= drm_atomic_get_new_crtc_state(state
,
175 struct vkms_crtc_state
*vkms_state
= to_vkms_crtc_state(crtc_state
);
176 struct drm_plane
*plane
;
177 struct drm_plane_state
*plane_state
;
180 if (vkms_state
->active_planes
)
183 ret
= drm_atomic_add_affected_planes(crtc_state
->state
, crtc
);
187 drm_for_each_plane_mask(plane
, crtc
->dev
, crtc_state
->plane_mask
) {
188 plane_state
= drm_atomic_get_existing_plane_state(crtc_state
->state
,
190 WARN_ON(!plane_state
);
192 if (!plane_state
->visible
)
198 vkms_state
->active_planes
= kcalloc(i
, sizeof(plane
), GFP_KERNEL
);
199 if (!vkms_state
->active_planes
)
201 vkms_state
->num_active_planes
= i
;
204 drm_for_each_plane_mask(plane
, crtc
->dev
, crtc_state
->plane_mask
) {
205 plane_state
= drm_atomic_get_existing_plane_state(crtc_state
->state
,
208 if (!plane_state
->visible
)
211 vkms_state
->active_planes
[i
++] =
212 to_vkms_plane_state(plane_state
);
218 static void vkms_crtc_atomic_enable(struct drm_crtc
*crtc
,
219 struct drm_atomic_state
*state
)
221 drm_crtc_vblank_on(crtc
);
224 static void vkms_crtc_atomic_disable(struct drm_crtc
*crtc
,
225 struct drm_atomic_state
*state
)
227 drm_crtc_vblank_off(crtc
);
230 static void vkms_crtc_atomic_begin(struct drm_crtc
*crtc
,
231 struct drm_atomic_state
*state
)
233 struct vkms_output
*vkms_output
= drm_crtc_to_vkms_output(crtc
);
235 /* This lock is held across the atomic commit to block vblank timer
236 * from scheduling vkms_composer_worker until the composer is updated
238 spin_lock_irq(&vkms_output
->lock
);
241 static void vkms_crtc_atomic_flush(struct drm_crtc
*crtc
,
242 struct drm_atomic_state
*state
)
244 struct vkms_output
*vkms_output
= drm_crtc_to_vkms_output(crtc
);
246 if (crtc
->state
->event
) {
247 spin_lock(&crtc
->dev
->event_lock
);
249 if (drm_crtc_vblank_get(crtc
) != 0)
250 drm_crtc_send_vblank_event(crtc
, crtc
->state
->event
);
252 drm_crtc_arm_vblank_event(crtc
, crtc
->state
->event
);
254 spin_unlock(&crtc
->dev
->event_lock
);
256 crtc
->state
->event
= NULL
;
259 vkms_output
->composer_state
= to_vkms_crtc_state(crtc
->state
);
261 spin_unlock_irq(&vkms_output
->lock
);
264 static const struct drm_crtc_helper_funcs vkms_crtc_helper_funcs
= {
265 .atomic_check
= vkms_crtc_atomic_check
,
266 .atomic_begin
= vkms_crtc_atomic_begin
,
267 .atomic_flush
= vkms_crtc_atomic_flush
,
268 .atomic_enable
= vkms_crtc_atomic_enable
,
269 .atomic_disable
= vkms_crtc_atomic_disable
,
272 int vkms_crtc_init(struct drm_device
*dev
, struct drm_crtc
*crtc
,
273 struct drm_plane
*primary
, struct drm_plane
*cursor
)
275 struct vkms_output
*vkms_out
= drm_crtc_to_vkms_output(crtc
);
278 ret
= drm_crtc_init_with_planes(dev
, crtc
, primary
, cursor
,
279 &vkms_crtc_funcs
, NULL
);
281 DRM_ERROR("Failed to init CRTC\n");
285 drm_crtc_helper_add(crtc
, &vkms_crtc_helper_funcs
);
287 spin_lock_init(&vkms_out
->lock
);
288 spin_lock_init(&vkms_out
->composer_lock
);
290 vkms_out
->composer_workq
= alloc_ordered_workqueue("vkms_composer", 0);
291 if (!vkms_out
->composer_workq
)