1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
5 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
6 #include "dpu_encoder_phys.h"
7 #include "dpu_hw_interrupts.h"
8 #include "dpu_core_irq.h"
9 #include "dpu_formats.h"
10 #include "dpu_trace.h"
12 #define DPU_DEBUG_VIDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
13 (e) && (e)->parent ? \
14 (e)->parent->base.id : -1, \
15 (e) && (e)->hw_intf ? \
16 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
18 #define DPU_ERROR_VIDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
19 (e) && (e)->parent ? \
20 (e)->parent->base.id : -1, \
21 (e) && (e)->hw_intf ? \
22 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
24 #define to_dpu_encoder_phys_vid(x) \
25 container_of(x, struct dpu_encoder_phys_vid, base)
27 static bool dpu_encoder_phys_vid_is_master(
28 struct dpu_encoder_phys
*phys_enc
)
32 if (phys_enc
->split_role
!= ENC_ROLE_SLAVE
)
38 static void drm_mode_to_intf_timing_params(
39 const struct dpu_encoder_phys
*phys_enc
,
40 const struct drm_display_mode
*mode
,
41 struct intf_timing_params
*timing
)
43 memset(timing
, 0, sizeof(*timing
));
45 if ((mode
->htotal
< mode
->hsync_end
)
46 || (mode
->hsync_start
< mode
->hdisplay
)
47 || (mode
->vtotal
< mode
->vsync_end
)
48 || (mode
->vsync_start
< mode
->vdisplay
)
49 || (mode
->hsync_end
< mode
->hsync_start
)
50 || (mode
->vsync_end
< mode
->vsync_start
)) {
52 "invalid params - hstart:%d,hend:%d,htot:%d,hdisplay:%d\n",
53 mode
->hsync_start
, mode
->hsync_end
,
54 mode
->htotal
, mode
->hdisplay
);
55 DPU_ERROR("vstart:%d,vend:%d,vtot:%d,vdisplay:%d\n",
56 mode
->vsync_start
, mode
->vsync_end
,
57 mode
->vtotal
, mode
->vdisplay
);
62 * https://www.kernel.org/doc/htmldocs/drm/ch02s05.html
63 * Active Region Front Porch Sync Back Porch
64 * <-----------------><------------><-----><----------->
66 * <--------- [hv]sync_start ------>
67 * <----------------- [hv]sync_end ------->
68 * <---------------------------- [hv]total ------------->
70 timing
->width
= mode
->hdisplay
; /* active width */
71 timing
->height
= mode
->vdisplay
; /* active height */
72 timing
->xres
= timing
->width
;
73 timing
->yres
= timing
->height
;
74 timing
->h_back_porch
= mode
->htotal
- mode
->hsync_end
;
75 timing
->h_front_porch
= mode
->hsync_start
- mode
->hdisplay
;
76 timing
->v_back_porch
= mode
->vtotal
- mode
->vsync_end
;
77 timing
->v_front_porch
= mode
->vsync_start
- mode
->vdisplay
;
78 timing
->hsync_pulse_width
= mode
->hsync_end
- mode
->hsync_start
;
79 timing
->vsync_pulse_width
= mode
->vsync_end
- mode
->vsync_start
;
80 timing
->hsync_polarity
= (mode
->flags
& DRM_MODE_FLAG_NHSYNC
) ? 1 : 0;
81 timing
->vsync_polarity
= (mode
->flags
& DRM_MODE_FLAG_NVSYNC
) ? 1 : 0;
82 timing
->border_clr
= 0;
83 timing
->underflow_clr
= 0xff;
84 timing
->hsync_skew
= mode
->hskew
;
86 /* DSI controller cannot handle active-low sync signals. */
87 if (phys_enc
->hw_intf
->cap
->type
== INTF_DSI
) {
88 timing
->hsync_polarity
= 0;
89 timing
->vsync_polarity
= 0;
94 * DISPLAY_V_START = (VBP * HCYCLE) + HBP
95 * DISPLAY_V_END = (VBP + VACTIVE) * HCYCLE - 1 - HFP
98 * if (vid_enc->hw->cap->type == INTF_EDP) {
99 * display_v_start += mode->htotal - mode->hsync_start;
100 * display_v_end -= mode->hsync_start - mode->hdisplay;
105 static u32
get_horizontal_total(const struct intf_timing_params
*timing
)
107 u32 active
= timing
->xres
;
109 timing
->h_back_porch
+ timing
->h_front_porch
+
110 timing
->hsync_pulse_width
;
111 return active
+ inactive
;
114 static u32
get_vertical_total(const struct intf_timing_params
*timing
)
116 u32 active
= timing
->yres
;
118 timing
->v_back_porch
+ timing
->v_front_porch
+
119 timing
->vsync_pulse_width
;
120 return active
+ inactive
;
124 * programmable_fetch_get_num_lines:
125 * Number of fetch lines in vertical front porch
126 * @timing: Pointer to the intf timing information for the requested mode
128 * Returns the number of fetch lines in vertical front porch at which mdp
129 * can start fetching the next frame.
131 * Number of needed prefetch lines is anything that cannot be absorbed in the
132 * start of frame time (back porch + vsync pulse width).
134 * Some panels have very large VFP, however we only need a total number of
135 * lines based on the chip worst case latencies.
137 static u32
programmable_fetch_get_num_lines(
138 struct dpu_encoder_phys
*phys_enc
,
139 const struct intf_timing_params
*timing
)
141 u32 worst_case_needed_lines
=
142 phys_enc
->hw_intf
->cap
->prog_fetch_lines_worst_case
;
143 u32 start_of_frame_lines
=
144 timing
->v_back_porch
+ timing
->vsync_pulse_width
;
145 u32 needed_vfp_lines
= worst_case_needed_lines
- start_of_frame_lines
;
146 u32 actual_vfp_lines
= 0;
148 /* Fetch must be outside active lines, otherwise undefined. */
149 if (start_of_frame_lines
>= worst_case_needed_lines
) {
150 DPU_DEBUG_VIDENC(phys_enc
,
151 "prog fetch is not needed, large vbp+vsw\n");
152 actual_vfp_lines
= 0;
153 } else if (timing
->v_front_porch
< needed_vfp_lines
) {
154 /* Warn fetch needed, but not enough porch in panel config */
156 ("low vbp+vfp may lead to perf issues in some cases\n");
157 DPU_DEBUG_VIDENC(phys_enc
,
158 "less vfp than fetch req, using entire vfp\n");
159 actual_vfp_lines
= timing
->v_front_porch
;
161 DPU_DEBUG_VIDENC(phys_enc
, "room in vfp for needed prefetch\n");
162 actual_vfp_lines
= needed_vfp_lines
;
165 DPU_DEBUG_VIDENC(phys_enc
,
166 "v_front_porch %u v_back_porch %u vsync_pulse_width %u\n",
167 timing
->v_front_porch
, timing
->v_back_porch
,
168 timing
->vsync_pulse_width
);
169 DPU_DEBUG_VIDENC(phys_enc
,
170 "wc_lines %u needed_vfp_lines %u actual_vfp_lines %u\n",
171 worst_case_needed_lines
, needed_vfp_lines
, actual_vfp_lines
);
173 return actual_vfp_lines
;
177 * programmable_fetch_config: Programs HW to prefetch lines by offsetting
178 * the start of fetch into the vertical front porch for cases where the
179 * vsync pulse width and vertical back porch time is insufficient
181 * Gets # of lines to pre-fetch, then calculate VSYNC counter value.
182 * HW layer requires VSYNC counter of first pixel of tgt VFP line.
184 * @timing: Pointer to the intf timing information for the requested mode
186 static void programmable_fetch_config(struct dpu_encoder_phys
*phys_enc
,
187 const struct intf_timing_params
*timing
)
189 struct intf_prog_fetch f
= { 0 };
190 u32 vfp_fetch_lines
= 0;
193 u32 vfp_fetch_start_vsync_counter
= 0;
194 unsigned long lock_flags
;
196 if (WARN_ON_ONCE(!phys_enc
->hw_intf
->ops
.setup_prg_fetch
))
199 vfp_fetch_lines
= programmable_fetch_get_num_lines(phys_enc
, timing
);
200 if (vfp_fetch_lines
) {
201 vert_total
= get_vertical_total(timing
);
202 horiz_total
= get_horizontal_total(timing
);
203 vfp_fetch_start_vsync_counter
=
204 (vert_total
- vfp_fetch_lines
) * horiz_total
+ 1;
206 f
.fetch_start
= vfp_fetch_start_vsync_counter
;
209 DPU_DEBUG_VIDENC(phys_enc
,
210 "vfp_fetch_lines %u vfp_fetch_start_vsync_counter %u\n",
211 vfp_fetch_lines
, vfp_fetch_start_vsync_counter
);
213 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
214 phys_enc
->hw_intf
->ops
.setup_prg_fetch(phys_enc
->hw_intf
, &f
);
215 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
218 static bool dpu_encoder_phys_vid_mode_fixup(
219 struct dpu_encoder_phys
*phys_enc
,
220 const struct drm_display_mode
*mode
,
221 struct drm_display_mode
*adj_mode
)
223 DPU_DEBUG_VIDENC(phys_enc
, "\n");
226 * Modifying mode has consequences when the mode comes back to us
231 static void dpu_encoder_phys_vid_setup_timing_engine(
232 struct dpu_encoder_phys
*phys_enc
)
234 struct drm_display_mode mode
;
235 struct intf_timing_params timing_params
= { 0 };
236 const struct dpu_format
*fmt
= NULL
;
237 u32 fmt_fourcc
= DRM_FORMAT_RGB888
;
238 unsigned long lock_flags
;
239 struct dpu_hw_intf_cfg intf_cfg
= { 0 };
241 if (!phys_enc
->hw_ctl
->ops
.setup_intf_cfg
) {
242 DPU_ERROR("invalid encoder %d\n", phys_enc
!= 0);
246 mode
= phys_enc
->cached_mode
;
247 if (!phys_enc
->hw_intf
->ops
.setup_timing_gen
) {
248 DPU_ERROR("timing engine setup is not supported\n");
252 DPU_DEBUG_VIDENC(phys_enc
, "enabling mode:\n");
253 drm_mode_debug_printmodeline(&mode
);
255 if (phys_enc
->split_role
!= ENC_ROLE_SOLO
) {
258 mode
.hsync_start
>>= 1;
259 mode
.hsync_end
>>= 1;
261 DPU_DEBUG_VIDENC(phys_enc
,
262 "split_role %d, halve horizontal %d %d %d %d\n",
263 phys_enc
->split_role
,
264 mode
.hdisplay
, mode
.htotal
,
265 mode
.hsync_start
, mode
.hsync_end
);
268 drm_mode_to_intf_timing_params(phys_enc
, &mode
, &timing_params
);
270 fmt
= dpu_get_dpu_format(fmt_fourcc
);
271 DPU_DEBUG_VIDENC(phys_enc
, "fmt_fourcc 0x%X\n", fmt_fourcc
);
273 intf_cfg
.intf
= phys_enc
->hw_intf
->idx
;
274 intf_cfg
.intf_mode_sel
= DPU_CTL_MODE_SEL_VID
;
275 intf_cfg
.stream_sel
= 0; /* Don't care value for video mode */
276 intf_cfg
.mode_3d
= dpu_encoder_helper_get_3d_blend_mode(phys_enc
);
278 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
279 phys_enc
->hw_intf
->ops
.setup_timing_gen(phys_enc
->hw_intf
,
280 &timing_params
, fmt
);
281 phys_enc
->hw_ctl
->ops
.setup_intf_cfg(phys_enc
->hw_ctl
, &intf_cfg
);
283 /* setup which pp blk will connect to this intf */
284 if (phys_enc
->hw_intf
->ops
.bind_pingpong_blk
)
285 phys_enc
->hw_intf
->ops
.bind_pingpong_blk(
288 phys_enc
->hw_pp
->idx
);
290 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
292 programmable_fetch_config(phys_enc
, &timing_params
);
295 static void dpu_encoder_phys_vid_vblank_irq(void *arg
, int irq_idx
)
297 struct dpu_encoder_phys
*phys_enc
= arg
;
298 struct dpu_hw_ctl
*hw_ctl
;
299 unsigned long lock_flags
;
300 u32 flush_register
= 0;
301 int new_cnt
= -1, old_cnt
= -1;
303 hw_ctl
= phys_enc
->hw_ctl
;
305 DPU_ATRACE_BEGIN("vblank_irq");
307 if (phys_enc
->parent_ops
->handle_vblank_virt
)
308 phys_enc
->parent_ops
->handle_vblank_virt(phys_enc
->parent
,
311 old_cnt
= atomic_read(&phys_enc
->pending_kickoff_cnt
);
314 * only decrement the pending flush count if we've actually flushed
315 * hardware. due to sw irq latency, vblank may have already happened
316 * so we need to double-check with hw that it accepted the flush bits
318 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
319 if (hw_ctl
->ops
.get_flush_register
)
320 flush_register
= hw_ctl
->ops
.get_flush_register(hw_ctl
);
322 if (!(flush_register
& hw_ctl
->ops
.get_pending_flush(hw_ctl
)))
323 new_cnt
= atomic_add_unless(&phys_enc
->pending_kickoff_cnt
,
325 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
327 /* Signal any waiting atomic commit thread */
328 wake_up_all(&phys_enc
->pending_kickoff_wq
);
330 phys_enc
->parent_ops
->handle_frame_done(phys_enc
->parent
, phys_enc
,
331 DPU_ENCODER_FRAME_EVENT_DONE
);
333 DPU_ATRACE_END("vblank_irq");
336 static void dpu_encoder_phys_vid_underrun_irq(void *arg
, int irq_idx
)
338 struct dpu_encoder_phys
*phys_enc
= arg
;
340 if (phys_enc
->parent_ops
->handle_underrun_virt
)
341 phys_enc
->parent_ops
->handle_underrun_virt(phys_enc
->parent
,
345 static bool dpu_encoder_phys_vid_needs_single_flush(
346 struct dpu_encoder_phys
*phys_enc
)
348 return phys_enc
->split_role
!= ENC_ROLE_SOLO
;
351 static void _dpu_encoder_phys_vid_setup_irq_hw_idx(
352 struct dpu_encoder_phys
*phys_enc
)
354 struct dpu_encoder_irq
*irq
;
357 * Initialize irq->hw_idx only when irq is not registered.
358 * Prevent invalidating irq->irq_idx as modeset may be
359 * called many times during dfps.
362 irq
= &phys_enc
->irq
[INTR_IDX_VSYNC
];
363 if (irq
->irq_idx
< 0)
364 irq
->hw_idx
= phys_enc
->intf_idx
;
366 irq
= &phys_enc
->irq
[INTR_IDX_UNDERRUN
];
367 if (irq
->irq_idx
< 0)
368 irq
->hw_idx
= phys_enc
->intf_idx
;
371 static void dpu_encoder_phys_vid_mode_set(
372 struct dpu_encoder_phys
*phys_enc
,
373 struct drm_display_mode
*mode
,
374 struct drm_display_mode
*adj_mode
)
377 phys_enc
->cached_mode
= *adj_mode
;
378 drm_mode_debug_printmodeline(adj_mode
);
379 DPU_DEBUG_VIDENC(phys_enc
, "caching mode:\n");
382 _dpu_encoder_phys_vid_setup_irq_hw_idx(phys_enc
);
385 static int dpu_encoder_phys_vid_control_vblank_irq(
386 struct dpu_encoder_phys
*phys_enc
,
392 refcount
= atomic_read(&phys_enc
->vblank_refcount
);
394 /* Slave encoders don't report vblank */
395 if (!dpu_encoder_phys_vid_is_master(phys_enc
))
398 /* protect against negative */
399 if (!enable
&& refcount
== 0) {
404 DRM_DEBUG_KMS("id:%u enable=%d/%d\n", DRMID(phys_enc
->parent
), enable
,
405 atomic_read(&phys_enc
->vblank_refcount
));
407 if (enable
&& atomic_inc_return(&phys_enc
->vblank_refcount
) == 1)
408 ret
= dpu_encoder_helper_register_irq(phys_enc
, INTR_IDX_VSYNC
);
409 else if (!enable
&& atomic_dec_return(&phys_enc
->vblank_refcount
) == 0)
410 ret
= dpu_encoder_helper_unregister_irq(phys_enc
,
415 DRM_ERROR("failed: id:%u intf:%d ret:%d enable:%d refcnt:%d\n",
416 DRMID(phys_enc
->parent
),
417 phys_enc
->hw_intf
->idx
- INTF_0
, ret
, enable
,
423 static void dpu_encoder_phys_vid_enable(struct dpu_encoder_phys
*phys_enc
)
425 struct dpu_hw_ctl
*ctl
;
427 u32 intf_flush_mask
= 0;
429 ctl
= phys_enc
->hw_ctl
;
431 DPU_DEBUG_VIDENC(phys_enc
, "\n");
433 if (WARN_ON(!phys_enc
->hw_intf
->ops
.enable_timing
))
436 dpu_encoder_helper_split_config(phys_enc
, phys_enc
->hw_intf
->idx
);
438 dpu_encoder_phys_vid_setup_timing_engine(phys_enc
);
441 * For single flush cases (dual-ctl or pp-split), skip setting the
442 * flush bit for the slave intf, since both intfs use same ctl
443 * and HW will only flush the master.
445 if (dpu_encoder_phys_vid_needs_single_flush(phys_enc
) &&
446 !dpu_encoder_phys_vid_is_master(phys_enc
))
449 ctl
->ops
.get_bitmask_intf(ctl
, &flush_mask
, phys_enc
->hw_intf
->idx
);
450 ctl
->ops
.update_pending_flush(ctl
, flush_mask
);
452 if (ctl
->ops
.get_bitmask_active_intf
)
453 ctl
->ops
.get_bitmask_active_intf(ctl
, &intf_flush_mask
,
454 phys_enc
->hw_intf
->idx
);
456 if (ctl
->ops
.update_pending_intf_flush
)
457 ctl
->ops
.update_pending_intf_flush(ctl
, intf_flush_mask
);
460 DPU_DEBUG_VIDENC(phys_enc
,
461 "update pending flush ctl %d flush_mask 0%x intf_mask 0x%x\n",
462 ctl
->idx
- CTL_0
, flush_mask
, intf_flush_mask
);
465 /* ctl_flush & timing engine enable will be triggered by framework */
466 if (phys_enc
->enable_state
== DPU_ENC_DISABLED
)
467 phys_enc
->enable_state
= DPU_ENC_ENABLING
;
470 static void dpu_encoder_phys_vid_destroy(struct dpu_encoder_phys
*phys_enc
)
472 DPU_DEBUG_VIDENC(phys_enc
, "\n");
476 static void dpu_encoder_phys_vid_get_hw_resources(
477 struct dpu_encoder_phys
*phys_enc
,
478 struct dpu_encoder_hw_resources
*hw_res
)
480 hw_res
->intfs
[phys_enc
->intf_idx
- INTF_0
] = INTF_MODE_VIDEO
;
483 static int dpu_encoder_phys_vid_wait_for_vblank(
484 struct dpu_encoder_phys
*phys_enc
)
486 struct dpu_encoder_wait_info wait_info
;
489 wait_info
.wq
= &phys_enc
->pending_kickoff_wq
;
490 wait_info
.atomic_cnt
= &phys_enc
->pending_kickoff_cnt
;
491 wait_info
.timeout_ms
= KICKOFF_TIMEOUT_MS
;
493 if (!dpu_encoder_phys_vid_is_master(phys_enc
)) {
497 /* Wait for kickoff to complete */
498 ret
= dpu_encoder_helper_wait_for_irq(phys_enc
, INTR_IDX_VSYNC
,
501 if (ret
== -ETIMEDOUT
) {
502 dpu_encoder_helper_report_irq_timeout(phys_enc
, INTR_IDX_VSYNC
);
508 static int dpu_encoder_phys_vid_wait_for_commit_done(
509 struct dpu_encoder_phys
*phys_enc
)
511 struct dpu_hw_ctl
*hw_ctl
= phys_enc
->hw_ctl
;
517 ret
= wait_event_timeout(phys_enc
->pending_kickoff_wq
,
518 (hw_ctl
->ops
.get_flush_register(hw_ctl
) == 0),
519 msecs_to_jiffies(50));
521 DPU_ERROR("vblank timeout\n");
528 static void dpu_encoder_phys_vid_prepare_for_kickoff(
529 struct dpu_encoder_phys
*phys_enc
)
531 struct dpu_hw_ctl
*ctl
;
534 ctl
= phys_enc
->hw_ctl
;
535 if (!ctl
->ops
.wait_reset_status
)
539 * hw supports hardware initiated ctl reset, so before we kickoff a new
540 * frame, need to check and wait for hw initiated ctl reset completion
542 rc
= ctl
->ops
.wait_reset_status(ctl
);
544 DPU_ERROR_VIDENC(phys_enc
, "ctl %d reset failure: %d\n",
546 dpu_encoder_helper_unregister_irq(phys_enc
, INTR_IDX_VSYNC
);
550 static void dpu_encoder_phys_vid_disable(struct dpu_encoder_phys
*phys_enc
)
552 unsigned long lock_flags
;
555 if (!phys_enc
->parent
|| !phys_enc
->parent
->dev
) {
556 DPU_ERROR("invalid encoder/device\n");
560 if (!phys_enc
->hw_intf
) {
561 DPU_ERROR("invalid hw_intf %d hw_ctl %d\n",
562 phys_enc
->hw_intf
!= 0, phys_enc
->hw_ctl
!= 0);
566 if (WARN_ON(!phys_enc
->hw_intf
->ops
.enable_timing
))
569 if (phys_enc
->enable_state
== DPU_ENC_DISABLED
) {
570 DPU_ERROR("already disabled\n");
574 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
575 phys_enc
->hw_intf
->ops
.enable_timing(phys_enc
->hw_intf
, 0);
576 if (dpu_encoder_phys_vid_is_master(phys_enc
))
577 dpu_encoder_phys_inc_pending(phys_enc
);
578 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
581 * Wait for a vsync so we know the ENABLE=0 latched before
582 * the (connector) source of the vsync's gets disabled,
583 * otherwise we end up in a funny state if we re-enable
584 * before the disable latches, which results that some of
585 * the settings changes for the new modeset (like new
586 * scanout buffer) don't latch properly..
588 if (dpu_encoder_phys_vid_is_master(phys_enc
)) {
589 ret
= dpu_encoder_phys_vid_wait_for_vblank(phys_enc
);
591 atomic_set(&phys_enc
->pending_kickoff_cnt
, 0);
592 DRM_ERROR("wait disable failed: id:%u intf:%d ret:%d\n",
593 DRMID(phys_enc
->parent
),
594 phys_enc
->hw_intf
->idx
- INTF_0
, ret
);
598 phys_enc
->enable_state
= DPU_ENC_DISABLED
;
601 static void dpu_encoder_phys_vid_handle_post_kickoff(
602 struct dpu_encoder_phys
*phys_enc
)
604 unsigned long lock_flags
;
607 * Video mode must flush CTL before enabling timing engine
608 * Video encoders need to turn on their interfaces now
610 if (phys_enc
->enable_state
== DPU_ENC_ENABLING
) {
611 trace_dpu_enc_phys_vid_post_kickoff(DRMID(phys_enc
->parent
),
612 phys_enc
->hw_intf
->idx
- INTF_0
);
613 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
614 phys_enc
->hw_intf
->ops
.enable_timing(phys_enc
->hw_intf
, 1);
615 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
616 phys_enc
->enable_state
= DPU_ENC_ENABLED
;
620 static void dpu_encoder_phys_vid_irq_control(struct dpu_encoder_phys
*phys_enc
,
625 trace_dpu_enc_phys_vid_irq_ctrl(DRMID(phys_enc
->parent
),
626 phys_enc
->hw_intf
->idx
- INTF_0
,
628 atomic_read(&phys_enc
->vblank_refcount
));
631 ret
= dpu_encoder_phys_vid_control_vblank_irq(phys_enc
, true);
635 dpu_encoder_helper_register_irq(phys_enc
, INTR_IDX_UNDERRUN
);
637 dpu_encoder_phys_vid_control_vblank_irq(phys_enc
, false);
638 dpu_encoder_helper_unregister_irq(phys_enc
, INTR_IDX_UNDERRUN
);
642 static int dpu_encoder_phys_vid_get_line_count(
643 struct dpu_encoder_phys
*phys_enc
)
645 if (!dpu_encoder_phys_vid_is_master(phys_enc
))
648 if (!phys_enc
->hw_intf
|| !phys_enc
->hw_intf
->ops
.get_line_count
)
651 return phys_enc
->hw_intf
->ops
.get_line_count(phys_enc
->hw_intf
);
654 static void dpu_encoder_phys_vid_init_ops(struct dpu_encoder_phys_ops
*ops
)
656 ops
->is_master
= dpu_encoder_phys_vid_is_master
;
657 ops
->mode_set
= dpu_encoder_phys_vid_mode_set
;
658 ops
->mode_fixup
= dpu_encoder_phys_vid_mode_fixup
;
659 ops
->enable
= dpu_encoder_phys_vid_enable
;
660 ops
->disable
= dpu_encoder_phys_vid_disable
;
661 ops
->destroy
= dpu_encoder_phys_vid_destroy
;
662 ops
->get_hw_resources
= dpu_encoder_phys_vid_get_hw_resources
;
663 ops
->control_vblank_irq
= dpu_encoder_phys_vid_control_vblank_irq
;
664 ops
->wait_for_commit_done
= dpu_encoder_phys_vid_wait_for_commit_done
;
665 ops
->wait_for_vblank
= dpu_encoder_phys_vid_wait_for_vblank
;
666 ops
->wait_for_tx_complete
= dpu_encoder_phys_vid_wait_for_vblank
;
667 ops
->irq_control
= dpu_encoder_phys_vid_irq_control
;
668 ops
->prepare_for_kickoff
= dpu_encoder_phys_vid_prepare_for_kickoff
;
669 ops
->handle_post_kickoff
= dpu_encoder_phys_vid_handle_post_kickoff
;
670 ops
->needs_single_flush
= dpu_encoder_phys_vid_needs_single_flush
;
671 ops
->get_line_count
= dpu_encoder_phys_vid_get_line_count
;
674 struct dpu_encoder_phys
*dpu_encoder_phys_vid_init(
675 struct dpu_enc_phys_init_params
*p
)
677 struct dpu_encoder_phys
*phys_enc
= NULL
;
678 struct dpu_encoder_irq
*irq
;
686 phys_enc
= kzalloc(sizeof(*phys_enc
), GFP_KERNEL
);
692 phys_enc
->hw_mdptop
= p
->dpu_kms
->hw_mdp
;
693 phys_enc
->intf_idx
= p
->intf_idx
;
695 DPU_DEBUG_VIDENC(phys_enc
, "\n");
697 dpu_encoder_phys_vid_init_ops(&phys_enc
->ops
);
698 phys_enc
->parent
= p
->parent
;
699 phys_enc
->parent_ops
= p
->parent_ops
;
700 phys_enc
->dpu_kms
= p
->dpu_kms
;
701 phys_enc
->split_role
= p
->split_role
;
702 phys_enc
->intf_mode
= INTF_MODE_VIDEO
;
703 phys_enc
->enc_spinlock
= p
->enc_spinlock
;
704 for (i
= 0; i
< INTR_IDX_MAX
; i
++) {
705 irq
= &phys_enc
->irq
[i
];
706 INIT_LIST_HEAD(&irq
->cb
.list
);
707 irq
->irq_idx
= -EINVAL
;
708 irq
->hw_idx
= -EINVAL
;
709 irq
->cb
.arg
= phys_enc
;
712 irq
= &phys_enc
->irq
[INTR_IDX_VSYNC
];
713 irq
->name
= "vsync_irq";
714 irq
->intr_type
= DPU_IRQ_TYPE_INTF_VSYNC
;
715 irq
->intr_idx
= INTR_IDX_VSYNC
;
716 irq
->cb
.func
= dpu_encoder_phys_vid_vblank_irq
;
718 irq
= &phys_enc
->irq
[INTR_IDX_UNDERRUN
];
719 irq
->name
= "underrun";
720 irq
->intr_type
= DPU_IRQ_TYPE_INTF_UNDER_RUN
;
721 irq
->intr_idx
= INTR_IDX_UNDERRUN
;
722 irq
->cb
.func
= dpu_encoder_phys_vid_underrun_irq
;
724 atomic_set(&phys_enc
->vblank_refcount
, 0);
725 atomic_set(&phys_enc
->pending_kickoff_cnt
, 0);
726 init_waitqueue_head(&phys_enc
->pending_kickoff_wq
);
727 phys_enc
->enable_state
= DPU_ENC_DISABLED
;
729 DPU_DEBUG_VIDENC(phys_enc
, "created intf idx:%d\n", p
->intf_idx
);
734 DPU_ERROR("failed to create encoder\n");
736 dpu_encoder_phys_vid_destroy(phys_enc
);