treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / drivers / gpu / drm / msm / disp / dpu1 / dpu_encoder_phys_cmd.c
blob39e1e280ba4432b37ba6eff5abd432b41ad4f5c2
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2015-2018 The Linux Foundation. All rights reserved.
4 */
6 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
7 #include "dpu_encoder_phys.h"
8 #include "dpu_hw_interrupts.h"
9 #include "dpu_core_irq.h"
10 #include "dpu_formats.h"
11 #include "dpu_trace.h"
13 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
14 (e) && (e)->base.parent ? \
15 (e)->base.parent->base.id : -1, \
16 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
18 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
19 (e) && (e)->base.parent ? \
20 (e)->base.parent->base.id : -1, \
21 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
23 #define to_dpu_encoder_phys_cmd(x) \
24 container_of(x, struct dpu_encoder_phys_cmd, base)
26 #define PP_TIMEOUT_MAX_TRIALS 10
29 * Tearcheck sync start and continue thresholds are empirically found
30 * based on common panels In the future, may want to allow panels to override
31 * these default values
33 #define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
34 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE 4
36 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
38 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
40 return (phys_enc->split_role != ENC_ROLE_SLAVE) ? true : false;
43 static bool dpu_encoder_phys_cmd_mode_fixup(
44 struct dpu_encoder_phys *phys_enc,
45 const struct drm_display_mode *mode,
46 struct drm_display_mode *adj_mode)
48 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc), "\n");
49 return true;
52 static void _dpu_encoder_phys_cmd_update_intf_cfg(
53 struct dpu_encoder_phys *phys_enc)
55 struct dpu_encoder_phys_cmd *cmd_enc =
56 to_dpu_encoder_phys_cmd(phys_enc);
57 struct dpu_hw_ctl *ctl;
58 struct dpu_hw_intf_cfg intf_cfg = { 0 };
60 ctl = phys_enc->hw_ctl;
61 if (!ctl->ops.setup_intf_cfg)
62 return;
64 intf_cfg.intf = phys_enc->intf_idx;
65 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
66 intf_cfg.stream_sel = cmd_enc->stream_sel;
67 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
68 ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
71 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
73 struct dpu_encoder_phys *phys_enc = arg;
74 unsigned long lock_flags;
75 int new_cnt;
76 u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
78 if (!phys_enc->hw_pp)
79 return;
81 DPU_ATRACE_BEGIN("pp_done_irq");
82 /* notify all synchronous clients first, then asynchronous clients */
83 if (phys_enc->parent_ops->handle_frame_done)
84 phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
85 phys_enc, event);
87 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
88 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
89 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
91 trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
92 phys_enc->hw_pp->idx - PINGPONG_0,
93 new_cnt, event);
95 /* Signal any waiting atomic commit thread */
96 wake_up_all(&phys_enc->pending_kickoff_wq);
97 DPU_ATRACE_END("pp_done_irq");
100 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
102 struct dpu_encoder_phys *phys_enc = arg;
103 struct dpu_encoder_phys_cmd *cmd_enc;
105 if (!phys_enc->hw_pp)
106 return;
108 DPU_ATRACE_BEGIN("rd_ptr_irq");
109 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
111 if (phys_enc->parent_ops->handle_vblank_virt)
112 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
113 phys_enc);
115 atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
116 wake_up_all(&cmd_enc->pending_vblank_wq);
117 DPU_ATRACE_END("rd_ptr_irq");
120 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
122 struct dpu_encoder_phys *phys_enc = arg;
124 DPU_ATRACE_BEGIN("ctl_start_irq");
126 atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
128 /* Signal any waiting ctl start interrupt */
129 wake_up_all(&phys_enc->pending_kickoff_wq);
130 DPU_ATRACE_END("ctl_start_irq");
133 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
135 struct dpu_encoder_phys *phys_enc = arg;
137 if (phys_enc->parent_ops->handle_underrun_virt)
138 phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
139 phys_enc);
142 static void _dpu_encoder_phys_cmd_setup_irq_hw_idx(
143 struct dpu_encoder_phys *phys_enc)
145 struct dpu_encoder_irq *irq;
147 irq = &phys_enc->irq[INTR_IDX_CTL_START];
148 irq->hw_idx = phys_enc->hw_ctl->idx;
149 irq->irq_idx = -EINVAL;
151 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
152 irq->hw_idx = phys_enc->hw_pp->idx;
153 irq->irq_idx = -EINVAL;
155 irq = &phys_enc->irq[INTR_IDX_RDPTR];
156 irq->hw_idx = phys_enc->hw_pp->idx;
157 irq->irq_idx = -EINVAL;
159 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
160 irq->hw_idx = phys_enc->intf_idx;
161 irq->irq_idx = -EINVAL;
164 static void dpu_encoder_phys_cmd_mode_set(
165 struct dpu_encoder_phys *phys_enc,
166 struct drm_display_mode *mode,
167 struct drm_display_mode *adj_mode)
169 struct dpu_encoder_phys_cmd *cmd_enc =
170 to_dpu_encoder_phys_cmd(phys_enc);
172 if (!mode || !adj_mode) {
173 DPU_ERROR("invalid args\n");
174 return;
176 phys_enc->cached_mode = *adj_mode;
177 DPU_DEBUG_CMDENC(cmd_enc, "caching mode:\n");
178 drm_mode_debug_printmodeline(adj_mode);
180 _dpu_encoder_phys_cmd_setup_irq_hw_idx(phys_enc);
183 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
184 struct dpu_encoder_phys *phys_enc)
186 struct dpu_encoder_phys_cmd *cmd_enc =
187 to_dpu_encoder_phys_cmd(phys_enc);
188 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
189 bool do_log = false;
191 if (!phys_enc->hw_pp)
192 return -EINVAL;
194 cmd_enc->pp_timeout_report_cnt++;
195 if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
196 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
197 do_log = true;
198 } else if (cmd_enc->pp_timeout_report_cnt == 1) {
199 do_log = true;
202 trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(phys_enc->parent),
203 phys_enc->hw_pp->idx - PINGPONG_0,
204 cmd_enc->pp_timeout_report_cnt,
205 atomic_read(&phys_enc->pending_kickoff_cnt),
206 frame_event);
208 /* to avoid flooding, only log first time, and "dead" time */
209 if (do_log) {
210 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
211 DRMID(phys_enc->parent),
212 phys_enc->hw_pp->idx - PINGPONG_0,
213 phys_enc->hw_ctl->idx - CTL_0,
214 cmd_enc->pp_timeout_report_cnt,
215 atomic_read(&phys_enc->pending_kickoff_cnt));
217 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_RDPTR);
220 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
222 /* request a ctl reset before the next kickoff */
223 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
225 if (phys_enc->parent_ops->handle_frame_done)
226 phys_enc->parent_ops->handle_frame_done(
227 phys_enc->parent, phys_enc, frame_event);
229 return -ETIMEDOUT;
232 static int _dpu_encoder_phys_cmd_wait_for_idle(
233 struct dpu_encoder_phys *phys_enc)
235 struct dpu_encoder_phys_cmd *cmd_enc =
236 to_dpu_encoder_phys_cmd(phys_enc);
237 struct dpu_encoder_wait_info wait_info;
238 int ret;
240 wait_info.wq = &phys_enc->pending_kickoff_wq;
241 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
242 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
244 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_PINGPONG,
245 &wait_info);
246 if (ret == -ETIMEDOUT)
247 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
248 else if (!ret)
249 cmd_enc->pp_timeout_report_cnt = 0;
251 return ret;
254 static int dpu_encoder_phys_cmd_control_vblank_irq(
255 struct dpu_encoder_phys *phys_enc,
256 bool enable)
258 int ret = 0;
259 int refcount;
261 if (!phys_enc->hw_pp) {
262 DPU_ERROR("invalid encoder\n");
263 return -EINVAL;
266 refcount = atomic_read(&phys_enc->vblank_refcount);
268 /* Slave encoders don't report vblank */
269 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
270 goto end;
272 /* protect against negative */
273 if (!enable && refcount == 0) {
274 ret = -EINVAL;
275 goto end;
278 DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
279 phys_enc->hw_pp->idx - PINGPONG_0,
280 enable ? "true" : "false", refcount);
282 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
283 ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_RDPTR);
284 else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
285 ret = dpu_encoder_helper_unregister_irq(phys_enc,
286 INTR_IDX_RDPTR);
288 end:
289 if (ret) {
290 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
291 DRMID(phys_enc->parent),
292 phys_enc->hw_pp->idx - PINGPONG_0, ret,
293 enable ? "true" : "false", refcount);
296 return ret;
299 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
300 bool enable)
302 trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
303 phys_enc->hw_pp->idx - PINGPONG_0,
304 enable, atomic_read(&phys_enc->vblank_refcount));
306 if (enable) {
307 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_PINGPONG);
308 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
309 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
311 if (dpu_encoder_phys_cmd_is_master(phys_enc))
312 dpu_encoder_helper_register_irq(phys_enc,
313 INTR_IDX_CTL_START);
314 } else {
315 if (dpu_encoder_phys_cmd_is_master(phys_enc))
316 dpu_encoder_helper_unregister_irq(phys_enc,
317 INTR_IDX_CTL_START);
319 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
320 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
321 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_PINGPONG);
325 static void dpu_encoder_phys_cmd_tearcheck_config(
326 struct dpu_encoder_phys *phys_enc)
328 struct dpu_encoder_phys_cmd *cmd_enc =
329 to_dpu_encoder_phys_cmd(phys_enc);
330 struct dpu_hw_tear_check tc_cfg = { 0 };
331 struct drm_display_mode *mode;
332 bool tc_enable = true;
333 u32 vsync_hz;
334 struct dpu_kms *dpu_kms;
336 if (!phys_enc->hw_pp) {
337 DPU_ERROR("invalid encoder\n");
338 return;
340 mode = &phys_enc->cached_mode;
342 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
344 if (!phys_enc->hw_pp->ops.setup_tearcheck ||
345 !phys_enc->hw_pp->ops.enable_tearcheck) {
346 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
347 return;
350 dpu_kms = phys_enc->dpu_kms;
353 * TE default: dsi byte clock calculated base on 70 fps;
354 * around 14 ms to complete a kickoff cycle if te disabled;
355 * vclk_line base on 60 fps; write is faster than read;
356 * init == start == rdptr;
358 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
359 * frequency divided by the no. of rows (lines) in the LCDpanel.
361 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
362 if (vsync_hz <= 0) {
363 DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
364 vsync_hz);
365 return;
368 tc_cfg.vsync_count = vsync_hz /
369 (mode->vtotal * drm_mode_vrefresh(mode));
371 /* enable external TE after kickoff to avoid premature autorefresh */
372 tc_cfg.hw_vsync_mode = 0;
375 * By setting sync_cfg_height to near max register value, we essentially
376 * disable dpu hw generated TE signal, since hw TE will arrive first.
377 * Only caveat is if due to error, we hit wrap-around.
379 tc_cfg.sync_cfg_height = 0xFFF0;
380 tc_cfg.vsync_init_val = mode->vdisplay;
381 tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
382 tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
383 tc_cfg.start_pos = mode->vdisplay;
384 tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
386 DPU_DEBUG_CMDENC(cmd_enc,
387 "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
388 phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
389 mode->vtotal, drm_mode_vrefresh(mode));
390 DPU_DEBUG_CMDENC(cmd_enc,
391 "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
392 phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
393 tc_cfg.rd_ptr_irq);
394 DPU_DEBUG_CMDENC(cmd_enc,
395 "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
396 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
397 tc_cfg.vsync_count, tc_cfg.vsync_init_val);
398 DPU_DEBUG_CMDENC(cmd_enc,
399 "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
400 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
401 tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
403 phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
404 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
407 static void _dpu_encoder_phys_cmd_pingpong_config(
408 struct dpu_encoder_phys *phys_enc)
410 struct dpu_encoder_phys_cmd *cmd_enc =
411 to_dpu_encoder_phys_cmd(phys_enc);
413 if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
414 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != 0);
415 return;
418 DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
419 phys_enc->hw_pp->idx - PINGPONG_0);
420 drm_mode_debug_printmodeline(&phys_enc->cached_mode);
422 _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
423 dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
426 static bool dpu_encoder_phys_cmd_needs_single_flush(
427 struct dpu_encoder_phys *phys_enc)
430 * we do separate flush for each CTL and let
431 * CTL_START synchronize them
433 return false;
436 static void dpu_encoder_phys_cmd_enable_helper(
437 struct dpu_encoder_phys *phys_enc)
439 struct dpu_hw_ctl *ctl;
440 u32 flush_mask = 0;
442 if (!phys_enc->hw_pp) {
443 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
444 return;
447 dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
449 _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
451 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
452 return;
454 ctl = phys_enc->hw_ctl;
455 ctl->ops.get_bitmask_intf(ctl, &flush_mask, phys_enc->intf_idx);
456 ctl->ops.update_pending_flush(ctl, flush_mask);
459 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
461 struct dpu_encoder_phys_cmd *cmd_enc =
462 to_dpu_encoder_phys_cmd(phys_enc);
464 if (!phys_enc->hw_pp) {
465 DPU_ERROR("invalid phys encoder\n");
466 return;
469 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
471 if (phys_enc->enable_state == DPU_ENC_ENABLED) {
472 DPU_ERROR("already enabled\n");
473 return;
476 dpu_encoder_phys_cmd_enable_helper(phys_enc);
477 phys_enc->enable_state = DPU_ENC_ENABLED;
480 static void _dpu_encoder_phys_cmd_connect_te(
481 struct dpu_encoder_phys *phys_enc, bool enable)
483 if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
484 return;
486 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
487 phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
490 static void dpu_encoder_phys_cmd_prepare_idle_pc(
491 struct dpu_encoder_phys *phys_enc)
493 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
496 static int dpu_encoder_phys_cmd_get_line_count(
497 struct dpu_encoder_phys *phys_enc)
499 struct dpu_hw_pingpong *hw_pp;
501 if (!phys_enc->hw_pp)
502 return -EINVAL;
504 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
505 return -EINVAL;
507 hw_pp = phys_enc->hw_pp;
508 if (!hw_pp->ops.get_line_count)
509 return -EINVAL;
511 return hw_pp->ops.get_line_count(hw_pp);
514 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
516 struct dpu_encoder_phys_cmd *cmd_enc =
517 to_dpu_encoder_phys_cmd(phys_enc);
519 if (!phys_enc->hw_pp) {
520 DPU_ERROR("invalid encoder\n");
521 return;
523 DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
524 phys_enc->hw_pp->idx - PINGPONG_0,
525 phys_enc->enable_state);
527 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
528 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
529 return;
532 if (phys_enc->hw_pp->ops.enable_tearcheck)
533 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
534 phys_enc->enable_state = DPU_ENC_DISABLED;
537 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
539 struct dpu_encoder_phys_cmd *cmd_enc =
540 to_dpu_encoder_phys_cmd(phys_enc);
542 kfree(cmd_enc);
545 static void dpu_encoder_phys_cmd_get_hw_resources(
546 struct dpu_encoder_phys *phys_enc,
547 struct dpu_encoder_hw_resources *hw_res)
549 hw_res->intfs[phys_enc->intf_idx - INTF_0] = INTF_MODE_CMD;
552 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
553 struct dpu_encoder_phys *phys_enc)
555 struct dpu_encoder_phys_cmd *cmd_enc =
556 to_dpu_encoder_phys_cmd(phys_enc);
557 int ret;
559 if (!phys_enc->hw_pp) {
560 DPU_ERROR("invalid encoder\n");
561 return;
563 DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
564 phys_enc->hw_pp->idx - PINGPONG_0,
565 atomic_read(&phys_enc->pending_kickoff_cnt));
568 * Mark kickoff request as outstanding. If there are more than one,
569 * outstanding, then we have to wait for the previous one to complete
571 ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
572 if (ret) {
573 /* force pending_kickoff_cnt 0 to discard failed kickoff */
574 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
575 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
576 DRMID(phys_enc->parent), ret,
577 phys_enc->hw_pp->idx - PINGPONG_0);
580 DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
581 phys_enc->hw_pp->idx - PINGPONG_0,
582 atomic_read(&phys_enc->pending_kickoff_cnt));
585 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
586 struct dpu_encoder_phys *phys_enc)
588 struct dpu_encoder_phys_cmd *cmd_enc =
589 to_dpu_encoder_phys_cmd(phys_enc);
590 struct dpu_encoder_wait_info wait_info;
591 int ret;
593 wait_info.wq = &phys_enc->pending_kickoff_wq;
594 wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
595 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
597 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_CTL_START,
598 &wait_info);
599 if (ret == -ETIMEDOUT) {
600 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
601 ret = -EINVAL;
602 } else if (!ret)
603 ret = 0;
605 return ret;
608 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
609 struct dpu_encoder_phys *phys_enc)
611 int rc;
613 rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
614 if (rc) {
615 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
616 DRMID(phys_enc->parent), rc,
617 phys_enc->intf_idx - INTF_0);
620 return rc;
623 static int dpu_encoder_phys_cmd_wait_for_commit_done(
624 struct dpu_encoder_phys *phys_enc)
626 int rc = 0;
627 struct dpu_encoder_phys_cmd *cmd_enc;
629 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
631 /* only required for master controller */
632 if (dpu_encoder_phys_cmd_is_master(phys_enc))
633 rc = _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
635 /* required for both controllers */
636 if (!rc && cmd_enc->serialize_wait4pp)
637 dpu_encoder_phys_cmd_prepare_for_kickoff(phys_enc);
639 return rc;
642 static int dpu_encoder_phys_cmd_wait_for_vblank(
643 struct dpu_encoder_phys *phys_enc)
645 int rc = 0;
646 struct dpu_encoder_phys_cmd *cmd_enc;
647 struct dpu_encoder_wait_info wait_info;
649 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
651 /* only required for master controller */
652 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
653 return rc;
655 wait_info.wq = &cmd_enc->pending_vblank_wq;
656 wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
657 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
659 atomic_inc(&cmd_enc->pending_vblank_cnt);
661 rc = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_RDPTR,
662 &wait_info);
664 return rc;
667 static void dpu_encoder_phys_cmd_handle_post_kickoff(
668 struct dpu_encoder_phys *phys_enc)
671 * re-enable external TE, either for the first time after enabling
672 * or if disabled for Autorefresh
674 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
677 static void dpu_encoder_phys_cmd_trigger_start(
678 struct dpu_encoder_phys *phys_enc)
680 dpu_encoder_helper_trigger_start(phys_enc);
683 static void dpu_encoder_phys_cmd_init_ops(
684 struct dpu_encoder_phys_ops *ops)
686 ops->is_master = dpu_encoder_phys_cmd_is_master;
687 ops->mode_set = dpu_encoder_phys_cmd_mode_set;
688 ops->mode_fixup = dpu_encoder_phys_cmd_mode_fixup;
689 ops->enable = dpu_encoder_phys_cmd_enable;
690 ops->disable = dpu_encoder_phys_cmd_disable;
691 ops->destroy = dpu_encoder_phys_cmd_destroy;
692 ops->get_hw_resources = dpu_encoder_phys_cmd_get_hw_resources;
693 ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
694 ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
695 ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
696 ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
697 ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
698 ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
699 ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
700 ops->irq_control = dpu_encoder_phys_cmd_irq_control;
701 ops->restore = dpu_encoder_phys_cmd_enable_helper;
702 ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
703 ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
704 ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
707 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
708 struct dpu_enc_phys_init_params *p)
710 struct dpu_encoder_phys *phys_enc = NULL;
711 struct dpu_encoder_phys_cmd *cmd_enc = NULL;
712 struct dpu_encoder_irq *irq;
713 int i, ret = 0;
715 DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
717 cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
718 if (!cmd_enc) {
719 ret = -ENOMEM;
720 DPU_ERROR("failed to allocate\n");
721 return ERR_PTR(ret);
723 phys_enc = &cmd_enc->base;
724 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
725 phys_enc->intf_idx = p->intf_idx;
727 dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
728 phys_enc->parent = p->parent;
729 phys_enc->parent_ops = p->parent_ops;
730 phys_enc->dpu_kms = p->dpu_kms;
731 phys_enc->split_role = p->split_role;
732 phys_enc->intf_mode = INTF_MODE_CMD;
733 phys_enc->enc_spinlock = p->enc_spinlock;
734 cmd_enc->stream_sel = 0;
735 phys_enc->enable_state = DPU_ENC_DISABLED;
736 for (i = 0; i < INTR_IDX_MAX; i++) {
737 irq = &phys_enc->irq[i];
738 INIT_LIST_HEAD(&irq->cb.list);
739 irq->irq_idx = -EINVAL;
740 irq->hw_idx = -EINVAL;
741 irq->cb.arg = phys_enc;
744 irq = &phys_enc->irq[INTR_IDX_CTL_START];
745 irq->name = "ctl_start";
746 irq->intr_type = DPU_IRQ_TYPE_CTL_START;
747 irq->intr_idx = INTR_IDX_CTL_START;
748 irq->cb.func = dpu_encoder_phys_cmd_ctl_start_irq;
750 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
751 irq->name = "pp_done";
752 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_COMP;
753 irq->intr_idx = INTR_IDX_PINGPONG;
754 irq->cb.func = dpu_encoder_phys_cmd_pp_tx_done_irq;
756 irq = &phys_enc->irq[INTR_IDX_RDPTR];
757 irq->name = "pp_rd_ptr";
758 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_RD_PTR;
759 irq->intr_idx = INTR_IDX_RDPTR;
760 irq->cb.func = dpu_encoder_phys_cmd_pp_rd_ptr_irq;
762 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
763 irq->name = "underrun";
764 irq->intr_type = DPU_IRQ_TYPE_INTF_UNDER_RUN;
765 irq->intr_idx = INTR_IDX_UNDERRUN;
766 irq->cb.func = dpu_encoder_phys_cmd_underrun_irq;
768 atomic_set(&phys_enc->vblank_refcount, 0);
769 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
770 atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
771 atomic_set(&cmd_enc->pending_vblank_cnt, 0);
772 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
773 init_waitqueue_head(&cmd_enc->pending_vblank_wq);
775 DPU_DEBUG_CMDENC(cmd_enc, "created\n");
777 return phys_enc;