1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
5 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
6 #include "dpu_encoder_phys.h"
7 #include "dpu_hw_interrupts.h"
8 #include "dpu_hw_merge3d.h"
9 #include "dpu_core_irq.h"
10 #include "dpu_formats.h"
11 #include "dpu_trace.h"
13 #define DPU_DEBUG_VIDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
14 (e) && (e)->parent ? \
15 (e)->parent->base.id : -1, \
16 (e) && (e)->hw_intf ? \
17 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
19 #define DPU_ERROR_VIDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
20 (e) && (e)->parent ? \
21 (e)->parent->base.id : -1, \
22 (e) && (e)->hw_intf ? \
23 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
25 #define to_dpu_encoder_phys_vid(x) \
26 container_of(x, struct dpu_encoder_phys_vid, base)
28 static bool dpu_encoder_phys_vid_is_master(
29 struct dpu_encoder_phys
*phys_enc
)
33 if (phys_enc
->split_role
!= ENC_ROLE_SLAVE
)
39 static void drm_mode_to_intf_timing_params(
40 const struct dpu_encoder_phys
*phys_enc
,
41 const struct drm_display_mode
*mode
,
42 struct intf_timing_params
*timing
)
44 memset(timing
, 0, sizeof(*timing
));
46 if ((mode
->htotal
< mode
->hsync_end
)
47 || (mode
->hsync_start
< mode
->hdisplay
)
48 || (mode
->vtotal
< mode
->vsync_end
)
49 || (mode
->vsync_start
< mode
->vdisplay
)
50 || (mode
->hsync_end
< mode
->hsync_start
)
51 || (mode
->vsync_end
< mode
->vsync_start
)) {
53 "invalid params - hstart:%d,hend:%d,htot:%d,hdisplay:%d\n",
54 mode
->hsync_start
, mode
->hsync_end
,
55 mode
->htotal
, mode
->hdisplay
);
56 DPU_ERROR("vstart:%d,vend:%d,vtot:%d,vdisplay:%d\n",
57 mode
->vsync_start
, mode
->vsync_end
,
58 mode
->vtotal
, mode
->vdisplay
);
63 * https://www.kernel.org/doc/htmldocs/drm/ch02s05.html
64 * Active Region Front Porch Sync Back Porch
65 * <-----------------><------------><-----><----------->
67 * <--------- [hv]sync_start ------>
68 * <----------------- [hv]sync_end ------->
69 * <---------------------------- [hv]total ------------->
71 timing
->width
= mode
->hdisplay
; /* active width */
72 timing
->height
= mode
->vdisplay
; /* active height */
73 timing
->xres
= timing
->width
;
74 timing
->yres
= timing
->height
;
75 timing
->h_back_porch
= mode
->htotal
- mode
->hsync_end
;
76 timing
->h_front_porch
= mode
->hsync_start
- mode
->hdisplay
;
77 timing
->v_back_porch
= mode
->vtotal
- mode
->vsync_end
;
78 timing
->v_front_porch
= mode
->vsync_start
- mode
->vdisplay
;
79 timing
->hsync_pulse_width
= mode
->hsync_end
- mode
->hsync_start
;
80 timing
->vsync_pulse_width
= mode
->vsync_end
- mode
->vsync_start
;
81 timing
->hsync_polarity
= (mode
->flags
& DRM_MODE_FLAG_NHSYNC
) ? 1 : 0;
82 timing
->vsync_polarity
= (mode
->flags
& DRM_MODE_FLAG_NVSYNC
) ? 1 : 0;
83 timing
->border_clr
= 0;
84 timing
->underflow_clr
= 0xff;
85 timing
->hsync_skew
= mode
->hskew
;
87 /* DSI controller cannot handle active-low sync signals. */
88 if (phys_enc
->hw_intf
->cap
->type
== INTF_DSI
) {
89 timing
->hsync_polarity
= 0;
90 timing
->vsync_polarity
= 0;
95 * DISPLAY_V_START = (VBP * HCYCLE) + HBP
96 * DISPLAY_V_END = (VBP + VACTIVE) * HCYCLE - 1 - HFP
99 * if (vid_enc->hw->cap->type == INTF_EDP) {
100 * display_v_start += mode->htotal - mode->hsync_start;
101 * display_v_end -= mode->hsync_start - mode->hdisplay;
104 /* for DP/EDP, Shift timings to align it to bottom right */
105 if ((phys_enc
->hw_intf
->cap
->type
== INTF_DP
) ||
106 (phys_enc
->hw_intf
->cap
->type
== INTF_EDP
)) {
107 timing
->h_back_porch
+= timing
->h_front_porch
;
108 timing
->h_front_porch
= 0;
109 timing
->v_back_porch
+= timing
->v_front_porch
;
110 timing
->v_front_porch
= 0;
114 static u32
get_horizontal_total(const struct intf_timing_params
*timing
)
116 u32 active
= timing
->xres
;
118 timing
->h_back_porch
+ timing
->h_front_porch
+
119 timing
->hsync_pulse_width
;
120 return active
+ inactive
;
123 static u32
get_vertical_total(const struct intf_timing_params
*timing
)
125 u32 active
= timing
->yres
;
127 timing
->v_back_porch
+ timing
->v_front_porch
+
128 timing
->vsync_pulse_width
;
129 return active
+ inactive
;
133 * programmable_fetch_get_num_lines:
134 * Number of fetch lines in vertical front porch
135 * @timing: Pointer to the intf timing information for the requested mode
137 * Returns the number of fetch lines in vertical front porch at which mdp
138 * can start fetching the next frame.
140 * Number of needed prefetch lines is anything that cannot be absorbed in the
141 * start of frame time (back porch + vsync pulse width).
143 * Some panels have very large VFP, however we only need a total number of
144 * lines based on the chip worst case latencies.
146 static u32
programmable_fetch_get_num_lines(
147 struct dpu_encoder_phys
*phys_enc
,
148 const struct intf_timing_params
*timing
)
150 u32 worst_case_needed_lines
=
151 phys_enc
->hw_intf
->cap
->prog_fetch_lines_worst_case
;
152 u32 start_of_frame_lines
=
153 timing
->v_back_porch
+ timing
->vsync_pulse_width
;
154 u32 needed_vfp_lines
= worst_case_needed_lines
- start_of_frame_lines
;
155 u32 actual_vfp_lines
= 0;
157 /* Fetch must be outside active lines, otherwise undefined. */
158 if (start_of_frame_lines
>= worst_case_needed_lines
) {
159 DPU_DEBUG_VIDENC(phys_enc
,
160 "prog fetch is not needed, large vbp+vsw\n");
161 actual_vfp_lines
= 0;
162 } else if (timing
->v_front_porch
< needed_vfp_lines
) {
163 /* Warn fetch needed, but not enough porch in panel config */
165 ("low vbp+vfp may lead to perf issues in some cases\n");
166 DPU_DEBUG_VIDENC(phys_enc
,
167 "less vfp than fetch req, using entire vfp\n");
168 actual_vfp_lines
= timing
->v_front_porch
;
170 DPU_DEBUG_VIDENC(phys_enc
, "room in vfp for needed prefetch\n");
171 actual_vfp_lines
= needed_vfp_lines
;
174 DPU_DEBUG_VIDENC(phys_enc
,
175 "v_front_porch %u v_back_porch %u vsync_pulse_width %u\n",
176 timing
->v_front_porch
, timing
->v_back_porch
,
177 timing
->vsync_pulse_width
);
178 DPU_DEBUG_VIDENC(phys_enc
,
179 "wc_lines %u needed_vfp_lines %u actual_vfp_lines %u\n",
180 worst_case_needed_lines
, needed_vfp_lines
, actual_vfp_lines
);
182 return actual_vfp_lines
;
186 * programmable_fetch_config: Programs HW to prefetch lines by offsetting
187 * the start of fetch into the vertical front porch for cases where the
188 * vsync pulse width and vertical back porch time is insufficient
190 * Gets # of lines to pre-fetch, then calculate VSYNC counter value.
191 * HW layer requires VSYNC counter of first pixel of tgt VFP line.
193 * @timing: Pointer to the intf timing information for the requested mode
195 static void programmable_fetch_config(struct dpu_encoder_phys
*phys_enc
,
196 const struct intf_timing_params
*timing
)
198 struct intf_prog_fetch f
= { 0 };
199 u32 vfp_fetch_lines
= 0;
202 u32 vfp_fetch_start_vsync_counter
= 0;
203 unsigned long lock_flags
;
205 if (WARN_ON_ONCE(!phys_enc
->hw_intf
->ops
.setup_prg_fetch
))
208 vfp_fetch_lines
= programmable_fetch_get_num_lines(phys_enc
, timing
);
209 if (vfp_fetch_lines
) {
210 vert_total
= get_vertical_total(timing
);
211 horiz_total
= get_horizontal_total(timing
);
212 vfp_fetch_start_vsync_counter
=
213 (vert_total
- vfp_fetch_lines
) * horiz_total
+ 1;
215 f
.fetch_start
= vfp_fetch_start_vsync_counter
;
218 DPU_DEBUG_VIDENC(phys_enc
,
219 "vfp_fetch_lines %u vfp_fetch_start_vsync_counter %u\n",
220 vfp_fetch_lines
, vfp_fetch_start_vsync_counter
);
222 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
223 phys_enc
->hw_intf
->ops
.setup_prg_fetch(phys_enc
->hw_intf
, &f
);
224 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
227 static bool dpu_encoder_phys_vid_mode_fixup(
228 struct dpu_encoder_phys
*phys_enc
,
229 const struct drm_display_mode
*mode
,
230 struct drm_display_mode
*adj_mode
)
232 DPU_DEBUG_VIDENC(phys_enc
, "\n");
235 * Modifying mode has consequences when the mode comes back to us
240 static void dpu_encoder_phys_vid_setup_timing_engine(
241 struct dpu_encoder_phys
*phys_enc
)
243 struct drm_display_mode mode
;
244 struct intf_timing_params timing_params
= { 0 };
245 const struct dpu_format
*fmt
= NULL
;
246 u32 fmt_fourcc
= DRM_FORMAT_RGB888
;
247 unsigned long lock_flags
;
248 struct dpu_hw_intf_cfg intf_cfg
= { 0 };
250 if (!phys_enc
->hw_ctl
->ops
.setup_intf_cfg
) {
251 DPU_ERROR("invalid encoder %d\n", phys_enc
!= NULL
);
255 mode
= phys_enc
->cached_mode
;
256 if (!phys_enc
->hw_intf
->ops
.setup_timing_gen
) {
257 DPU_ERROR("timing engine setup is not supported\n");
261 DPU_DEBUG_VIDENC(phys_enc
, "enabling mode:\n");
262 drm_mode_debug_printmodeline(&mode
);
264 if (phys_enc
->split_role
!= ENC_ROLE_SOLO
) {
267 mode
.hsync_start
>>= 1;
268 mode
.hsync_end
>>= 1;
270 DPU_DEBUG_VIDENC(phys_enc
,
271 "split_role %d, halve horizontal %d %d %d %d\n",
272 phys_enc
->split_role
,
273 mode
.hdisplay
, mode
.htotal
,
274 mode
.hsync_start
, mode
.hsync_end
);
277 drm_mode_to_intf_timing_params(phys_enc
, &mode
, &timing_params
);
279 fmt
= dpu_get_dpu_format(fmt_fourcc
);
280 DPU_DEBUG_VIDENC(phys_enc
, "fmt_fourcc 0x%X\n", fmt_fourcc
);
282 intf_cfg
.intf
= phys_enc
->hw_intf
->idx
;
283 intf_cfg
.intf_mode_sel
= DPU_CTL_MODE_SEL_VID
;
284 intf_cfg
.stream_sel
= 0; /* Don't care value for video mode */
285 intf_cfg
.mode_3d
= dpu_encoder_helper_get_3d_blend_mode(phys_enc
);
286 if (phys_enc
->hw_pp
->merge_3d
)
287 intf_cfg
.merge_3d
= phys_enc
->hw_pp
->merge_3d
->id
;
289 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
290 phys_enc
->hw_intf
->ops
.setup_timing_gen(phys_enc
->hw_intf
,
291 &timing_params
, fmt
);
292 phys_enc
->hw_ctl
->ops
.setup_intf_cfg(phys_enc
->hw_ctl
, &intf_cfg
);
294 /* setup which pp blk will connect to this intf */
295 if (phys_enc
->hw_intf
->ops
.bind_pingpong_blk
)
296 phys_enc
->hw_intf
->ops
.bind_pingpong_blk(
299 phys_enc
->hw_pp
->idx
);
301 if (phys_enc
->hw_pp
->merge_3d
) {
302 struct dpu_hw_merge_3d
*merge_3d
= to_dpu_hw_merge_3d(phys_enc
->hw_pp
->merge_3d
);
304 merge_3d
->ops
.setup_3d_mode(merge_3d
, intf_cfg
.mode_3d
);
307 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
309 programmable_fetch_config(phys_enc
, &timing_params
);
312 static void dpu_encoder_phys_vid_vblank_irq(void *arg
, int irq_idx
)
314 struct dpu_encoder_phys
*phys_enc
= arg
;
315 struct dpu_hw_ctl
*hw_ctl
;
316 unsigned long lock_flags
;
317 u32 flush_register
= 0;
319 hw_ctl
= phys_enc
->hw_ctl
;
321 DPU_ATRACE_BEGIN("vblank_irq");
323 if (phys_enc
->parent_ops
->handle_vblank_virt
)
324 phys_enc
->parent_ops
->handle_vblank_virt(phys_enc
->parent
,
327 atomic_read(&phys_enc
->pending_kickoff_cnt
);
330 * only decrement the pending flush count if we've actually flushed
331 * hardware. due to sw irq latency, vblank may have already happened
332 * so we need to double-check with hw that it accepted the flush bits
334 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
335 if (hw_ctl
->ops
.get_flush_register
)
336 flush_register
= hw_ctl
->ops
.get_flush_register(hw_ctl
);
338 if (!(flush_register
& hw_ctl
->ops
.get_pending_flush(hw_ctl
)))
339 atomic_add_unless(&phys_enc
->pending_kickoff_cnt
, -1, 0);
340 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
342 /* Signal any waiting atomic commit thread */
343 wake_up_all(&phys_enc
->pending_kickoff_wq
);
345 phys_enc
->parent_ops
->handle_frame_done(phys_enc
->parent
, phys_enc
,
346 DPU_ENCODER_FRAME_EVENT_DONE
);
348 DPU_ATRACE_END("vblank_irq");
351 static void dpu_encoder_phys_vid_underrun_irq(void *arg
, int irq_idx
)
353 struct dpu_encoder_phys
*phys_enc
= arg
;
355 if (phys_enc
->parent_ops
->handle_underrun_virt
)
356 phys_enc
->parent_ops
->handle_underrun_virt(phys_enc
->parent
,
360 static bool dpu_encoder_phys_vid_needs_single_flush(
361 struct dpu_encoder_phys
*phys_enc
)
363 return phys_enc
->split_role
!= ENC_ROLE_SOLO
;
366 static void _dpu_encoder_phys_vid_setup_irq_hw_idx(
367 struct dpu_encoder_phys
*phys_enc
)
369 struct dpu_encoder_irq
*irq
;
372 * Initialize irq->hw_idx only when irq is not registered.
373 * Prevent invalidating irq->irq_idx as modeset may be
374 * called many times during dfps.
377 irq
= &phys_enc
->irq
[INTR_IDX_VSYNC
];
378 if (irq
->irq_idx
< 0)
379 irq
->hw_idx
= phys_enc
->intf_idx
;
381 irq
= &phys_enc
->irq
[INTR_IDX_UNDERRUN
];
382 if (irq
->irq_idx
< 0)
383 irq
->hw_idx
= phys_enc
->intf_idx
;
386 static void dpu_encoder_phys_vid_mode_set(
387 struct dpu_encoder_phys
*phys_enc
,
388 struct drm_display_mode
*mode
,
389 struct drm_display_mode
*adj_mode
)
392 phys_enc
->cached_mode
= *adj_mode
;
393 drm_mode_debug_printmodeline(adj_mode
);
394 DPU_DEBUG_VIDENC(phys_enc
, "caching mode:\n");
397 _dpu_encoder_phys_vid_setup_irq_hw_idx(phys_enc
);
400 static int dpu_encoder_phys_vid_control_vblank_irq(
401 struct dpu_encoder_phys
*phys_enc
,
407 refcount
= atomic_read(&phys_enc
->vblank_refcount
);
409 /* Slave encoders don't report vblank */
410 if (!dpu_encoder_phys_vid_is_master(phys_enc
))
413 /* protect against negative */
414 if (!enable
&& refcount
== 0) {
419 DRM_DEBUG_KMS("id:%u enable=%d/%d\n", DRMID(phys_enc
->parent
), enable
,
420 atomic_read(&phys_enc
->vblank_refcount
));
422 if (enable
&& atomic_inc_return(&phys_enc
->vblank_refcount
) == 1)
423 ret
= dpu_encoder_helper_register_irq(phys_enc
, INTR_IDX_VSYNC
);
424 else if (!enable
&& atomic_dec_return(&phys_enc
->vblank_refcount
) == 0)
425 ret
= dpu_encoder_helper_unregister_irq(phys_enc
,
430 DRM_ERROR("failed: id:%u intf:%d ret:%d enable:%d refcnt:%d\n",
431 DRMID(phys_enc
->parent
),
432 phys_enc
->hw_intf
->idx
- INTF_0
, ret
, enable
,
438 static void dpu_encoder_phys_vid_enable(struct dpu_encoder_phys
*phys_enc
)
440 struct dpu_hw_ctl
*ctl
;
442 ctl
= phys_enc
->hw_ctl
;
444 DPU_DEBUG_VIDENC(phys_enc
, "\n");
446 if (WARN_ON(!phys_enc
->hw_intf
->ops
.enable_timing
))
449 dpu_encoder_helper_split_config(phys_enc
, phys_enc
->hw_intf
->idx
);
451 dpu_encoder_phys_vid_setup_timing_engine(phys_enc
);
454 * For single flush cases (dual-ctl or pp-split), skip setting the
455 * flush bit for the slave intf, since both intfs use same ctl
456 * and HW will only flush the master.
458 if (dpu_encoder_phys_vid_needs_single_flush(phys_enc
) &&
459 !dpu_encoder_phys_vid_is_master(phys_enc
))
462 ctl
->ops
.update_pending_flush_intf(ctl
, phys_enc
->hw_intf
->idx
);
463 if (ctl
->ops
.update_pending_flush_merge_3d
&& phys_enc
->hw_pp
->merge_3d
)
464 ctl
->ops
.update_pending_flush_merge_3d(ctl
, phys_enc
->hw_pp
->merge_3d
->id
);
467 DPU_DEBUG_VIDENC(phys_enc
,
468 "update pending flush ctl %d intf %d\n",
469 ctl
->idx
- CTL_0
, phys_enc
->hw_intf
->idx
);
472 /* ctl_flush & timing engine enable will be triggered by framework */
473 if (phys_enc
->enable_state
== DPU_ENC_DISABLED
)
474 phys_enc
->enable_state
= DPU_ENC_ENABLING
;
477 static void dpu_encoder_phys_vid_destroy(struct dpu_encoder_phys
*phys_enc
)
479 DPU_DEBUG_VIDENC(phys_enc
, "\n");
483 static void dpu_encoder_phys_vid_get_hw_resources(
484 struct dpu_encoder_phys
*phys_enc
,
485 struct dpu_encoder_hw_resources
*hw_res
)
487 hw_res
->intfs
[phys_enc
->intf_idx
- INTF_0
] = INTF_MODE_VIDEO
;
490 static int dpu_encoder_phys_vid_wait_for_vblank(
491 struct dpu_encoder_phys
*phys_enc
)
493 struct dpu_encoder_wait_info wait_info
;
496 wait_info
.wq
= &phys_enc
->pending_kickoff_wq
;
497 wait_info
.atomic_cnt
= &phys_enc
->pending_kickoff_cnt
;
498 wait_info
.timeout_ms
= KICKOFF_TIMEOUT_MS
;
500 if (!dpu_encoder_phys_vid_is_master(phys_enc
)) {
504 /* Wait for kickoff to complete */
505 ret
= dpu_encoder_helper_wait_for_irq(phys_enc
, INTR_IDX_VSYNC
,
508 if (ret
== -ETIMEDOUT
) {
509 dpu_encoder_helper_report_irq_timeout(phys_enc
, INTR_IDX_VSYNC
);
515 static int dpu_encoder_phys_vid_wait_for_commit_done(
516 struct dpu_encoder_phys
*phys_enc
)
518 struct dpu_hw_ctl
*hw_ctl
= phys_enc
->hw_ctl
;
524 ret
= wait_event_timeout(phys_enc
->pending_kickoff_wq
,
525 (hw_ctl
->ops
.get_flush_register(hw_ctl
) == 0),
526 msecs_to_jiffies(50));
528 DPU_ERROR("vblank timeout\n");
535 static void dpu_encoder_phys_vid_prepare_for_kickoff(
536 struct dpu_encoder_phys
*phys_enc
)
538 struct dpu_hw_ctl
*ctl
;
541 ctl
= phys_enc
->hw_ctl
;
542 if (!ctl
->ops
.wait_reset_status
)
546 * hw supports hardware initiated ctl reset, so before we kickoff a new
547 * frame, need to check and wait for hw initiated ctl reset completion
549 rc
= ctl
->ops
.wait_reset_status(ctl
);
551 DPU_ERROR_VIDENC(phys_enc
, "ctl %d reset failure: %d\n",
553 dpu_encoder_helper_unregister_irq(phys_enc
, INTR_IDX_VSYNC
);
557 static void dpu_encoder_phys_vid_disable(struct dpu_encoder_phys
*phys_enc
)
559 unsigned long lock_flags
;
562 if (!phys_enc
->parent
|| !phys_enc
->parent
->dev
) {
563 DPU_ERROR("invalid encoder/device\n");
567 if (!phys_enc
->hw_intf
) {
568 DPU_ERROR("invalid hw_intf %d hw_ctl %d\n",
569 phys_enc
->hw_intf
!= NULL
, phys_enc
->hw_ctl
!= NULL
);
573 if (WARN_ON(!phys_enc
->hw_intf
->ops
.enable_timing
))
576 if (phys_enc
->enable_state
== DPU_ENC_DISABLED
) {
577 DPU_ERROR("already disabled\n");
581 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
582 phys_enc
->hw_intf
->ops
.enable_timing(phys_enc
->hw_intf
, 0);
583 if (dpu_encoder_phys_vid_is_master(phys_enc
))
584 dpu_encoder_phys_inc_pending(phys_enc
);
585 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
588 * Wait for a vsync so we know the ENABLE=0 latched before
589 * the (connector) source of the vsync's gets disabled,
590 * otherwise we end up in a funny state if we re-enable
591 * before the disable latches, which results that some of
592 * the settings changes for the new modeset (like new
593 * scanout buffer) don't latch properly..
595 if (dpu_encoder_phys_vid_is_master(phys_enc
)) {
596 ret
= dpu_encoder_phys_vid_wait_for_vblank(phys_enc
);
598 atomic_set(&phys_enc
->pending_kickoff_cnt
, 0);
599 DRM_ERROR("wait disable failed: id:%u intf:%d ret:%d\n",
600 DRMID(phys_enc
->parent
),
601 phys_enc
->hw_intf
->idx
- INTF_0
, ret
);
605 phys_enc
->enable_state
= DPU_ENC_DISABLED
;
608 static void dpu_encoder_phys_vid_handle_post_kickoff(
609 struct dpu_encoder_phys
*phys_enc
)
611 unsigned long lock_flags
;
614 * Video mode must flush CTL before enabling timing engine
615 * Video encoders need to turn on their interfaces now
617 if (phys_enc
->enable_state
== DPU_ENC_ENABLING
) {
618 trace_dpu_enc_phys_vid_post_kickoff(DRMID(phys_enc
->parent
),
619 phys_enc
->hw_intf
->idx
- INTF_0
);
620 spin_lock_irqsave(phys_enc
->enc_spinlock
, lock_flags
);
621 phys_enc
->hw_intf
->ops
.enable_timing(phys_enc
->hw_intf
, 1);
622 spin_unlock_irqrestore(phys_enc
->enc_spinlock
, lock_flags
);
623 phys_enc
->enable_state
= DPU_ENC_ENABLED
;
627 static void dpu_encoder_phys_vid_irq_control(struct dpu_encoder_phys
*phys_enc
,
632 trace_dpu_enc_phys_vid_irq_ctrl(DRMID(phys_enc
->parent
),
633 phys_enc
->hw_intf
->idx
- INTF_0
,
635 atomic_read(&phys_enc
->vblank_refcount
));
638 ret
= dpu_encoder_phys_vid_control_vblank_irq(phys_enc
, true);
642 dpu_encoder_helper_register_irq(phys_enc
, INTR_IDX_UNDERRUN
);
644 dpu_encoder_phys_vid_control_vblank_irq(phys_enc
, false);
645 dpu_encoder_helper_unregister_irq(phys_enc
, INTR_IDX_UNDERRUN
);
649 static int dpu_encoder_phys_vid_get_line_count(
650 struct dpu_encoder_phys
*phys_enc
)
652 if (!dpu_encoder_phys_vid_is_master(phys_enc
))
655 if (!phys_enc
->hw_intf
|| !phys_enc
->hw_intf
->ops
.get_line_count
)
658 return phys_enc
->hw_intf
->ops
.get_line_count(phys_enc
->hw_intf
);
661 static void dpu_encoder_phys_vid_init_ops(struct dpu_encoder_phys_ops
*ops
)
663 ops
->is_master
= dpu_encoder_phys_vid_is_master
;
664 ops
->mode_set
= dpu_encoder_phys_vid_mode_set
;
665 ops
->mode_fixup
= dpu_encoder_phys_vid_mode_fixup
;
666 ops
->enable
= dpu_encoder_phys_vid_enable
;
667 ops
->disable
= dpu_encoder_phys_vid_disable
;
668 ops
->destroy
= dpu_encoder_phys_vid_destroy
;
669 ops
->get_hw_resources
= dpu_encoder_phys_vid_get_hw_resources
;
670 ops
->control_vblank_irq
= dpu_encoder_phys_vid_control_vblank_irq
;
671 ops
->wait_for_commit_done
= dpu_encoder_phys_vid_wait_for_commit_done
;
672 ops
->wait_for_vblank
= dpu_encoder_phys_vid_wait_for_vblank
;
673 ops
->wait_for_tx_complete
= dpu_encoder_phys_vid_wait_for_vblank
;
674 ops
->irq_control
= dpu_encoder_phys_vid_irq_control
;
675 ops
->prepare_for_kickoff
= dpu_encoder_phys_vid_prepare_for_kickoff
;
676 ops
->handle_post_kickoff
= dpu_encoder_phys_vid_handle_post_kickoff
;
677 ops
->needs_single_flush
= dpu_encoder_phys_vid_needs_single_flush
;
678 ops
->get_line_count
= dpu_encoder_phys_vid_get_line_count
;
681 struct dpu_encoder_phys
*dpu_encoder_phys_vid_init(
682 struct dpu_enc_phys_init_params
*p
)
684 struct dpu_encoder_phys
*phys_enc
= NULL
;
685 struct dpu_encoder_irq
*irq
;
693 phys_enc
= kzalloc(sizeof(*phys_enc
), GFP_KERNEL
);
699 phys_enc
->hw_mdptop
= p
->dpu_kms
->hw_mdp
;
700 phys_enc
->intf_idx
= p
->intf_idx
;
702 DPU_DEBUG_VIDENC(phys_enc
, "\n");
704 dpu_encoder_phys_vid_init_ops(&phys_enc
->ops
);
705 phys_enc
->parent
= p
->parent
;
706 phys_enc
->parent_ops
= p
->parent_ops
;
707 phys_enc
->dpu_kms
= p
->dpu_kms
;
708 phys_enc
->split_role
= p
->split_role
;
709 phys_enc
->intf_mode
= INTF_MODE_VIDEO
;
710 phys_enc
->enc_spinlock
= p
->enc_spinlock
;
711 for (i
= 0; i
< INTR_IDX_MAX
; i
++) {
712 irq
= &phys_enc
->irq
[i
];
713 INIT_LIST_HEAD(&irq
->cb
.list
);
714 irq
->irq_idx
= -EINVAL
;
715 irq
->hw_idx
= -EINVAL
;
716 irq
->cb
.arg
= phys_enc
;
719 irq
= &phys_enc
->irq
[INTR_IDX_VSYNC
];
720 irq
->name
= "vsync_irq";
721 irq
->intr_type
= DPU_IRQ_TYPE_INTF_VSYNC
;
722 irq
->intr_idx
= INTR_IDX_VSYNC
;
723 irq
->cb
.func
= dpu_encoder_phys_vid_vblank_irq
;
725 irq
= &phys_enc
->irq
[INTR_IDX_UNDERRUN
];
726 irq
->name
= "underrun";
727 irq
->intr_type
= DPU_IRQ_TYPE_INTF_UNDER_RUN
;
728 irq
->intr_idx
= INTR_IDX_UNDERRUN
;
729 irq
->cb
.func
= dpu_encoder_phys_vid_underrun_irq
;
731 atomic_set(&phys_enc
->vblank_refcount
, 0);
732 atomic_set(&phys_enc
->pending_kickoff_cnt
, 0);
733 init_waitqueue_head(&phys_enc
->pending_kickoff_wq
);
734 phys_enc
->enable_state
= DPU_ENC_DISABLED
;
736 DPU_DEBUG_VIDENC(phys_enc
, "created intf idx:%d\n", p
->intf_idx
);
741 DPU_ERROR("failed to create encoder\n");
743 dpu_encoder_phys_vid_destroy(phys_enc
);