Merge tag 'io_uring-5.11-2021-01-16' of git://git.kernel.dk/linux-block
[linux/fpc-iii.git] / drivers / gpu / drm / rcar-du / rcar_du_crtc.c
blobb5fb941e0f534c590da278445620983599f3e523
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * rcar_du_crtc.c -- R-Car Display Unit CRTCs
5 * Copyright (C) 2013-2015 Renesas Electronics Corporation
7 * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
8 */
10 #include <linux/clk.h>
11 #include <linux/mutex.h>
12 #include <linux/platform_device.h>
13 #include <linux/sys_soc.h>
15 #include <drm/drm_atomic.h>
16 #include <drm/drm_atomic_helper.h>
17 #include <drm/drm_bridge.h>
18 #include <drm/drm_crtc.h>
19 #include <drm/drm_device.h>
20 #include <drm/drm_fb_cma_helper.h>
21 #include <drm/drm_gem_cma_helper.h>
22 #include <drm/drm_plane_helper.h>
23 #include <drm/drm_vblank.h>
25 #include "rcar_cmm.h"
26 #include "rcar_du_crtc.h"
27 #include "rcar_du_drv.h"
28 #include "rcar_du_encoder.h"
29 #include "rcar_du_kms.h"
30 #include "rcar_du_plane.h"
31 #include "rcar_du_regs.h"
32 #include "rcar_du_vsp.h"
33 #include "rcar_lvds.h"
35 static u32 rcar_du_crtc_read(struct rcar_du_crtc *rcrtc, u32 reg)
37 struct rcar_du_device *rcdu = rcrtc->dev;
39 return rcar_du_read(rcdu, rcrtc->mmio_offset + reg);
42 static void rcar_du_crtc_write(struct rcar_du_crtc *rcrtc, u32 reg, u32 data)
44 struct rcar_du_device *rcdu = rcrtc->dev;
46 rcar_du_write(rcdu, rcrtc->mmio_offset + reg, data);
49 static void rcar_du_crtc_clr(struct rcar_du_crtc *rcrtc, u32 reg, u32 clr)
51 struct rcar_du_device *rcdu = rcrtc->dev;
53 rcar_du_write(rcdu, rcrtc->mmio_offset + reg,
54 rcar_du_read(rcdu, rcrtc->mmio_offset + reg) & ~clr);
57 static void rcar_du_crtc_set(struct rcar_du_crtc *rcrtc, u32 reg, u32 set)
59 struct rcar_du_device *rcdu = rcrtc->dev;
61 rcar_du_write(rcdu, rcrtc->mmio_offset + reg,
62 rcar_du_read(rcdu, rcrtc->mmio_offset + reg) | set);
65 void rcar_du_crtc_dsysr_clr_set(struct rcar_du_crtc *rcrtc, u32 clr, u32 set)
67 struct rcar_du_device *rcdu = rcrtc->dev;
69 rcrtc->dsysr = (rcrtc->dsysr & ~clr) | set;
70 rcar_du_write(rcdu, rcrtc->mmio_offset + DSYSR, rcrtc->dsysr);
73 /* -----------------------------------------------------------------------------
74 * Hardware Setup
77 struct dpll_info {
78 unsigned int output;
79 unsigned int fdpll;
80 unsigned int n;
81 unsigned int m;
84 static void rcar_du_dpll_divider(struct rcar_du_crtc *rcrtc,
85 struct dpll_info *dpll,
86 unsigned long input,
87 unsigned long target)
89 unsigned long best_diff = (unsigned long)-1;
90 unsigned long diff;
91 unsigned int fdpll;
92 unsigned int m;
93 unsigned int n;
96 * fin fvco fout fclkout
97 * in --> [1/M] --> |PD| -> [LPF] -> [VCO] -> [1/P] -+-> [1/FDPLL] -> out
98 * +-> | | |
99 * | |
100 * +---------------- [1/N] <------------+
102 * fclkout = fvco / P / FDPLL -- (1)
104 * fin/M = fvco/P/N
106 * fvco = fin * P * N / M -- (2)
108 * (1) + (2) indicates
110 * fclkout = fin * N / M / FDPLL
112 * NOTES
113 * N : (n + 1)
114 * M : (m + 1)
115 * FDPLL : (fdpll + 1)
116 * P : 2
117 * 2kHz < fvco < 4096MHz
119 * To minimize the jitter,
120 * N : as large as possible
121 * M : as small as possible
123 for (m = 0; m < 4; m++) {
124 for (n = 119; n > 38; n--) {
126 * This code only runs on 64-bit architectures, the
127 * unsigned long type can thus be used for 64-bit
128 * computation. It will still compile without any
129 * warning on 32-bit architectures.
131 * To optimize calculations, use fout instead of fvco
132 * to verify the VCO frequency constraint.
134 unsigned long fout = input * (n + 1) / (m + 1);
136 if (fout < 1000 || fout > 2048 * 1000 * 1000U)
137 continue;
139 for (fdpll = 1; fdpll < 32; fdpll++) {
140 unsigned long output;
142 output = fout / (fdpll + 1);
143 if (output >= 400 * 1000 * 1000)
144 continue;
146 diff = abs((long)output - (long)target);
147 if (best_diff > diff) {
148 best_diff = diff;
149 dpll->n = n;
150 dpll->m = m;
151 dpll->fdpll = fdpll;
152 dpll->output = output;
155 if (diff == 0)
156 goto done;
161 done:
162 dev_dbg(rcrtc->dev->dev,
163 "output:%u, fdpll:%u, n:%u, m:%u, diff:%lu\n",
164 dpll->output, dpll->fdpll, dpll->n, dpll->m, best_diff);
167 struct du_clk_params {
168 struct clk *clk;
169 unsigned long rate;
170 unsigned long diff;
171 u32 escr;
174 static void rcar_du_escr_divider(struct clk *clk, unsigned long target,
175 u32 escr, struct du_clk_params *params)
177 unsigned long rate;
178 unsigned long diff;
179 u32 div;
182 * If the target rate has already been achieved perfectly we can't do
183 * better.
185 if (params->diff == 0)
186 return;
189 * Compute the input clock rate and internal divisor values to obtain
190 * the clock rate closest to the target frequency.
192 rate = clk_round_rate(clk, target);
193 div = clamp(DIV_ROUND_CLOSEST(rate, target), 1UL, 64UL) - 1;
194 diff = abs(rate / (div + 1) - target);
197 * Store the parameters if the resulting frequency is better than any
198 * previously calculated value.
200 if (diff < params->diff) {
201 params->clk = clk;
202 params->rate = rate;
203 params->diff = diff;
204 params->escr = escr | div;
208 static const struct soc_device_attribute rcar_du_r8a7795_es1[] = {
209 { .soc_id = "r8a7795", .revision = "ES1.*" },
210 { /* sentinel */ }
213 static void rcar_du_crtc_set_display_timing(struct rcar_du_crtc *rcrtc)
215 const struct drm_display_mode *mode = &rcrtc->crtc.state->adjusted_mode;
216 struct rcar_du_device *rcdu = rcrtc->dev;
217 unsigned long mode_clock = mode->clock * 1000;
218 u32 dsmr;
219 u32 escr;
221 if (rcdu->info->dpll_mask & (1 << rcrtc->index)) {
222 unsigned long target = mode_clock;
223 struct dpll_info dpll = { 0 };
224 unsigned long extclk;
225 u32 dpllcr;
226 u32 div = 0;
229 * DU channels that have a display PLL can't use the internal
230 * system clock, and have no internal clock divider.
234 * The H3 ES1.x exhibits dot clock duty cycle stability issues.
235 * We can work around them by configuring the DPLL to twice the
236 * desired frequency, coupled with a /2 post-divider. Restrict
237 * the workaround to H3 ES1.x as ES2.0 and all other SoCs have
238 * no post-divider when a display PLL is present (as shown by
239 * the workaround breaking HDMI output on M3-W during testing).
241 if (soc_device_match(rcar_du_r8a7795_es1)) {
242 target *= 2;
243 div = 1;
246 extclk = clk_get_rate(rcrtc->extclock);
247 rcar_du_dpll_divider(rcrtc, &dpll, extclk, target);
249 dpllcr = DPLLCR_CODE | DPLLCR_CLKE
250 | DPLLCR_FDPLL(dpll.fdpll)
251 | DPLLCR_N(dpll.n) | DPLLCR_M(dpll.m)
252 | DPLLCR_STBY;
254 if (rcrtc->index == 1)
255 dpllcr |= DPLLCR_PLCS1
256 | DPLLCR_INCS_DOTCLKIN1;
257 else
258 dpllcr |= DPLLCR_PLCS0
259 | DPLLCR_INCS_DOTCLKIN0;
261 rcar_du_group_write(rcrtc->group, DPLLCR, dpllcr);
263 escr = ESCR_DCLKSEL_DCLKIN | div;
264 } else if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index)) {
266 * Use the LVDS PLL output as the dot clock when outputting to
267 * the LVDS encoder on an SoC that supports this clock routing
268 * option. We use the clock directly in that case, without any
269 * additional divider.
271 escr = ESCR_DCLKSEL_DCLKIN;
272 } else {
273 struct du_clk_params params = { .diff = (unsigned long)-1 };
275 rcar_du_escr_divider(rcrtc->clock, mode_clock,
276 ESCR_DCLKSEL_CLKS, &params);
277 if (rcrtc->extclock)
278 rcar_du_escr_divider(rcrtc->extclock, mode_clock,
279 ESCR_DCLKSEL_DCLKIN, &params);
281 dev_dbg(rcrtc->dev->dev, "mode clock %lu %s rate %lu\n",
282 mode_clock, params.clk == rcrtc->clock ? "cpg" : "ext",
283 params.rate);
285 clk_set_rate(params.clk, params.rate);
286 escr = params.escr;
289 dev_dbg(rcrtc->dev->dev, "%s: ESCR 0x%08x\n", __func__, escr);
291 rcar_du_crtc_write(rcrtc, rcrtc->index % 2 ? ESCR13 : ESCR02, escr);
292 rcar_du_crtc_write(rcrtc, rcrtc->index % 2 ? OTAR13 : OTAR02, 0);
294 /* Signal polarities */
295 dsmr = ((mode->flags & DRM_MODE_FLAG_PVSYNC) ? DSMR_VSL : 0)
296 | ((mode->flags & DRM_MODE_FLAG_PHSYNC) ? DSMR_HSL : 0)
297 | ((mode->flags & DRM_MODE_FLAG_INTERLACE) ? DSMR_ODEV : 0)
298 | DSMR_DIPM_DISP | DSMR_CSPM;
299 rcar_du_crtc_write(rcrtc, DSMR, dsmr);
301 /* Display timings */
302 rcar_du_crtc_write(rcrtc, HDSR, mode->htotal - mode->hsync_start - 19);
303 rcar_du_crtc_write(rcrtc, HDER, mode->htotal - mode->hsync_start +
304 mode->hdisplay - 19);
305 rcar_du_crtc_write(rcrtc, HSWR, mode->hsync_end -
306 mode->hsync_start - 1);
307 rcar_du_crtc_write(rcrtc, HCR, mode->htotal - 1);
309 rcar_du_crtc_write(rcrtc, VDSR, mode->crtc_vtotal -
310 mode->crtc_vsync_end - 2);
311 rcar_du_crtc_write(rcrtc, VDER, mode->crtc_vtotal -
312 mode->crtc_vsync_end +
313 mode->crtc_vdisplay - 2);
314 rcar_du_crtc_write(rcrtc, VSPR, mode->crtc_vtotal -
315 mode->crtc_vsync_end +
316 mode->crtc_vsync_start - 1);
317 rcar_du_crtc_write(rcrtc, VCR, mode->crtc_vtotal - 1);
319 rcar_du_crtc_write(rcrtc, DESR, mode->htotal - mode->hsync_start - 1);
320 rcar_du_crtc_write(rcrtc, DEWR, mode->hdisplay);
323 static unsigned int plane_zpos(struct rcar_du_plane *plane)
325 return plane->plane.state->normalized_zpos;
328 static const struct rcar_du_format_info *
329 plane_format(struct rcar_du_plane *plane)
331 return to_rcar_plane_state(plane->plane.state)->format;
334 static void rcar_du_crtc_update_planes(struct rcar_du_crtc *rcrtc)
336 struct rcar_du_plane *planes[RCAR_DU_NUM_HW_PLANES];
337 struct rcar_du_device *rcdu = rcrtc->dev;
338 unsigned int num_planes = 0;
339 unsigned int dptsr_planes;
340 unsigned int hwplanes = 0;
341 unsigned int prio = 0;
342 unsigned int i;
343 u32 dspr = 0;
345 for (i = 0; i < rcrtc->group->num_planes; ++i) {
346 struct rcar_du_plane *plane = &rcrtc->group->planes[i];
347 unsigned int j;
349 if (plane->plane.state->crtc != &rcrtc->crtc ||
350 !plane->plane.state->visible)
351 continue;
353 /* Insert the plane in the sorted planes array. */
354 for (j = num_planes++; j > 0; --j) {
355 if (plane_zpos(planes[j-1]) <= plane_zpos(plane))
356 break;
357 planes[j] = planes[j-1];
360 planes[j] = plane;
361 prio += plane_format(plane)->planes * 4;
364 for (i = 0; i < num_planes; ++i) {
365 struct rcar_du_plane *plane = planes[i];
366 struct drm_plane_state *state = plane->plane.state;
367 unsigned int index = to_rcar_plane_state(state)->hwindex;
369 prio -= 4;
370 dspr |= (index + 1) << prio;
371 hwplanes |= 1 << index;
373 if (plane_format(plane)->planes == 2) {
374 index = (index + 1) % 8;
376 prio -= 4;
377 dspr |= (index + 1) << prio;
378 hwplanes |= 1 << index;
382 /* If VSP+DU integration is enabled the plane assignment is fixed. */
383 if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) {
384 if (rcdu->info->gen < 3) {
385 dspr = (rcrtc->index % 2) + 1;
386 hwplanes = 1 << (rcrtc->index % 2);
387 } else {
388 dspr = (rcrtc->index % 2) ? 3 : 1;
389 hwplanes = 1 << ((rcrtc->index % 2) ? 2 : 0);
394 * Update the planes to display timing and dot clock generator
395 * associations.
397 * Updating the DPTSR register requires restarting the CRTC group,
398 * resulting in visible flicker. To mitigate the issue only update the
399 * association if needed by enabled planes. Planes being disabled will
400 * keep their current association.
402 mutex_lock(&rcrtc->group->lock);
404 dptsr_planes = rcrtc->index % 2 ? rcrtc->group->dptsr_planes | hwplanes
405 : rcrtc->group->dptsr_planes & ~hwplanes;
407 if (dptsr_planes != rcrtc->group->dptsr_planes) {
408 rcar_du_group_write(rcrtc->group, DPTSR,
409 (dptsr_planes << 16) | dptsr_planes);
410 rcrtc->group->dptsr_planes = dptsr_planes;
412 if (rcrtc->group->used_crtcs)
413 rcar_du_group_restart(rcrtc->group);
416 /* Restart the group if plane sources have changed. */
417 if (rcrtc->group->need_restart)
418 rcar_du_group_restart(rcrtc->group);
420 mutex_unlock(&rcrtc->group->lock);
422 rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR,
423 dspr);
426 /* -----------------------------------------------------------------------------
427 * Page Flip
430 void rcar_du_crtc_finish_page_flip(struct rcar_du_crtc *rcrtc)
432 struct drm_pending_vblank_event *event;
433 struct drm_device *dev = rcrtc->crtc.dev;
434 unsigned long flags;
436 spin_lock_irqsave(&dev->event_lock, flags);
437 event = rcrtc->event;
438 rcrtc->event = NULL;
439 spin_unlock_irqrestore(&dev->event_lock, flags);
441 if (event == NULL)
442 return;
444 spin_lock_irqsave(&dev->event_lock, flags);
445 drm_crtc_send_vblank_event(&rcrtc->crtc, event);
446 wake_up(&rcrtc->flip_wait);
447 spin_unlock_irqrestore(&dev->event_lock, flags);
449 drm_crtc_vblank_put(&rcrtc->crtc);
452 static bool rcar_du_crtc_page_flip_pending(struct rcar_du_crtc *rcrtc)
454 struct drm_device *dev = rcrtc->crtc.dev;
455 unsigned long flags;
456 bool pending;
458 spin_lock_irqsave(&dev->event_lock, flags);
459 pending = rcrtc->event != NULL;
460 spin_unlock_irqrestore(&dev->event_lock, flags);
462 return pending;
465 static void rcar_du_crtc_wait_page_flip(struct rcar_du_crtc *rcrtc)
467 struct rcar_du_device *rcdu = rcrtc->dev;
469 if (wait_event_timeout(rcrtc->flip_wait,
470 !rcar_du_crtc_page_flip_pending(rcrtc),
471 msecs_to_jiffies(50)))
472 return;
474 dev_warn(rcdu->dev, "page flip timeout\n");
476 rcar_du_crtc_finish_page_flip(rcrtc);
479 /* -----------------------------------------------------------------------------
480 * Color Management Module (CMM)
483 static int rcar_du_cmm_check(struct drm_crtc *crtc,
484 struct drm_crtc_state *state)
486 struct drm_property_blob *drm_lut = state->gamma_lut;
487 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
488 struct device *dev = rcrtc->dev->dev;
490 if (!drm_lut)
491 return 0;
493 /* We only accept fully populated LUT tables. */
494 if (drm_color_lut_size(drm_lut) != CM2_LUT_SIZE) {
495 dev_err(dev, "invalid gamma lut size: %zu bytes\n",
496 drm_lut->length);
497 return -EINVAL;
500 return 0;
503 static void rcar_du_cmm_setup(struct drm_crtc *crtc)
505 struct drm_property_blob *drm_lut = crtc->state->gamma_lut;
506 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
507 struct rcar_cmm_config cmm_config = {};
509 if (!rcrtc->cmm)
510 return;
512 if (drm_lut)
513 cmm_config.lut.table = (struct drm_color_lut *)drm_lut->data;
515 rcar_cmm_setup(rcrtc->cmm, &cmm_config);
518 /* -----------------------------------------------------------------------------
519 * Start/Stop and Suspend/Resume
522 static void rcar_du_crtc_setup(struct rcar_du_crtc *rcrtc)
524 /* Set display off and background to black */
525 rcar_du_crtc_write(rcrtc, DOOR, DOOR_RGB(0, 0, 0));
526 rcar_du_crtc_write(rcrtc, BPOR, BPOR_RGB(0, 0, 0));
528 /* Configure display timings and output routing */
529 rcar_du_crtc_set_display_timing(rcrtc);
530 rcar_du_group_set_routing(rcrtc->group);
532 /* Start with all planes disabled. */
533 rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR, 0);
535 /* Enable the VSP compositor. */
536 if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
537 rcar_du_vsp_enable(rcrtc);
539 /* Turn vertical blanking interrupt reporting on. */
540 drm_crtc_vblank_on(&rcrtc->crtc);
543 static int rcar_du_crtc_get(struct rcar_du_crtc *rcrtc)
545 int ret;
548 * Guard against double-get, as the function is called from both the
549 * .atomic_enable() and .atomic_begin() handlers.
551 if (rcrtc->initialized)
552 return 0;
554 ret = clk_prepare_enable(rcrtc->clock);
555 if (ret < 0)
556 return ret;
558 ret = clk_prepare_enable(rcrtc->extclock);
559 if (ret < 0)
560 goto error_clock;
562 ret = rcar_du_group_get(rcrtc->group);
563 if (ret < 0)
564 goto error_group;
566 rcar_du_crtc_setup(rcrtc);
567 rcrtc->initialized = true;
569 return 0;
571 error_group:
572 clk_disable_unprepare(rcrtc->extclock);
573 error_clock:
574 clk_disable_unprepare(rcrtc->clock);
575 return ret;
578 static void rcar_du_crtc_put(struct rcar_du_crtc *rcrtc)
580 rcar_du_group_put(rcrtc->group);
582 clk_disable_unprepare(rcrtc->extclock);
583 clk_disable_unprepare(rcrtc->clock);
585 rcrtc->initialized = false;
588 static void rcar_du_crtc_start(struct rcar_du_crtc *rcrtc)
590 bool interlaced;
593 * Select master sync mode. This enables display operation in master
594 * sync mode (with the HSYNC and VSYNC signals configured as outputs and
595 * actively driven).
597 interlaced = rcrtc->crtc.mode.flags & DRM_MODE_FLAG_INTERLACE;
598 rcar_du_crtc_dsysr_clr_set(rcrtc, DSYSR_TVM_MASK | DSYSR_SCM_MASK,
599 (interlaced ? DSYSR_SCM_INT_VIDEO : 0) |
600 DSYSR_TVM_MASTER);
602 rcar_du_group_start_stop(rcrtc->group, true);
605 static void rcar_du_crtc_disable_planes(struct rcar_du_crtc *rcrtc)
607 struct rcar_du_device *rcdu = rcrtc->dev;
608 struct drm_crtc *crtc = &rcrtc->crtc;
609 u32 status;
611 /* Make sure vblank interrupts are enabled. */
612 drm_crtc_vblank_get(crtc);
615 * Disable planes and calculate how many vertical blanking interrupts we
616 * have to wait for. If a vertical blanking interrupt has been triggered
617 * but not processed yet, we don't know whether it occurred before or
618 * after the planes got disabled. We thus have to wait for two vblank
619 * interrupts in that case.
621 spin_lock_irq(&rcrtc->vblank_lock);
622 rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR, 0);
623 status = rcar_du_crtc_read(rcrtc, DSSR);
624 rcrtc->vblank_count = status & DSSR_VBK ? 2 : 1;
625 spin_unlock_irq(&rcrtc->vblank_lock);
627 if (!wait_event_timeout(rcrtc->vblank_wait, rcrtc->vblank_count == 0,
628 msecs_to_jiffies(100)))
629 dev_warn(rcdu->dev, "vertical blanking timeout\n");
631 drm_crtc_vblank_put(crtc);
634 static void rcar_du_crtc_stop(struct rcar_du_crtc *rcrtc)
636 struct drm_crtc *crtc = &rcrtc->crtc;
639 * Disable all planes and wait for the change to take effect. This is
640 * required as the plane enable registers are updated on vblank, and no
641 * vblank will occur once the CRTC is stopped. Disabling planes when
642 * starting the CRTC thus wouldn't be enough as it would start scanning
643 * out immediately from old frame buffers until the next vblank.
645 * This increases the CRTC stop delay, especially when multiple CRTCs
646 * are stopped in one operation as we now wait for one vblank per CRTC.
647 * Whether this can be improved needs to be researched.
649 rcar_du_crtc_disable_planes(rcrtc);
652 * Disable vertical blanking interrupt reporting. We first need to wait
653 * for page flip completion before stopping the CRTC as userspace
654 * expects page flips to eventually complete.
656 rcar_du_crtc_wait_page_flip(rcrtc);
657 drm_crtc_vblank_off(crtc);
659 /* Disable the VSP compositor. */
660 if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
661 rcar_du_vsp_disable(rcrtc);
663 if (rcrtc->cmm)
664 rcar_cmm_disable(rcrtc->cmm);
667 * Select switch sync mode. This stops display operation and configures
668 * the HSYNC and VSYNC signals as inputs.
670 * TODO: Find another way to stop the display for DUs that don't support
671 * TVM sync.
673 if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_TVM_SYNC))
674 rcar_du_crtc_dsysr_clr_set(rcrtc, DSYSR_TVM_MASK,
675 DSYSR_TVM_SWITCH);
677 rcar_du_group_start_stop(rcrtc->group, false);
680 /* -----------------------------------------------------------------------------
681 * CRTC Functions
684 static int rcar_du_crtc_atomic_check(struct drm_crtc *crtc,
685 struct drm_atomic_state *state)
687 struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
688 crtc);
689 struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(crtc_state);
690 struct drm_encoder *encoder;
691 int ret;
693 ret = rcar_du_cmm_check(crtc, crtc_state);
694 if (ret)
695 return ret;
697 /* Store the routes from the CRTC output to the DU outputs. */
698 rstate->outputs = 0;
700 drm_for_each_encoder_mask(encoder, crtc->dev,
701 crtc_state->encoder_mask) {
702 struct rcar_du_encoder *renc;
704 /* Skip the writeback encoder. */
705 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
706 continue;
708 renc = to_rcar_encoder(encoder);
709 rstate->outputs |= BIT(renc->output);
712 return 0;
715 static void rcar_du_crtc_atomic_enable(struct drm_crtc *crtc,
716 struct drm_atomic_state *state)
718 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
719 struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(crtc->state);
720 struct rcar_du_device *rcdu = rcrtc->dev;
722 if (rcrtc->cmm)
723 rcar_cmm_enable(rcrtc->cmm);
724 rcar_du_crtc_get(rcrtc);
727 * On D3/E3 the dot clock is provided by the LVDS encoder attached to
728 * the DU channel. We need to enable its clock output explicitly if
729 * the LVDS output is disabled.
731 if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index) &&
732 rstate->outputs == BIT(RCAR_DU_OUTPUT_DPAD0)) {
733 struct rcar_du_encoder *encoder =
734 rcdu->encoders[RCAR_DU_OUTPUT_LVDS0 + rcrtc->index];
735 const struct drm_display_mode *mode =
736 &crtc->state->adjusted_mode;
737 struct drm_bridge *bridge;
739 bridge = drm_bridge_chain_get_first_bridge(&encoder->base);
740 rcar_lvds_clk_enable(bridge, mode->clock * 1000);
743 rcar_du_crtc_start(rcrtc);
746 * TODO: The chip manual indicates that CMM tables should be written
747 * after the DU channel has been activated. Investigate the impact
748 * of this restriction on the first displayed frame.
750 rcar_du_cmm_setup(crtc);
753 static void rcar_du_crtc_atomic_disable(struct drm_crtc *crtc,
754 struct drm_atomic_state *state)
756 struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state,
757 crtc);
758 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
759 struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(old_state);
760 struct rcar_du_device *rcdu = rcrtc->dev;
762 rcar_du_crtc_stop(rcrtc);
763 rcar_du_crtc_put(rcrtc);
765 if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index) &&
766 rstate->outputs == BIT(RCAR_DU_OUTPUT_DPAD0)) {
767 struct rcar_du_encoder *encoder =
768 rcdu->encoders[RCAR_DU_OUTPUT_LVDS0 + rcrtc->index];
769 struct drm_bridge *bridge;
772 * Disable the LVDS clock output, see
773 * rcar_du_crtc_atomic_enable().
775 bridge = drm_bridge_chain_get_first_bridge(&encoder->base);
776 rcar_lvds_clk_disable(bridge);
779 spin_lock_irq(&crtc->dev->event_lock);
780 if (crtc->state->event) {
781 drm_crtc_send_vblank_event(crtc, crtc->state->event);
782 crtc->state->event = NULL;
784 spin_unlock_irq(&crtc->dev->event_lock);
787 static void rcar_du_crtc_atomic_begin(struct drm_crtc *crtc,
788 struct drm_atomic_state *state)
790 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
792 WARN_ON(!crtc->state->enable);
795 * If a mode set is in progress we can be called with the CRTC disabled.
796 * We thus need to first get and setup the CRTC in order to configure
797 * planes. We must *not* put the CRTC in .atomic_flush(), as it must be
798 * kept awake until the .atomic_enable() call that will follow. The get
799 * operation in .atomic_enable() will in that case be a no-op, and the
800 * CRTC will be put later in .atomic_disable().
802 * If a mode set is not in progress the CRTC is enabled, and the
803 * following get call will be a no-op. There is thus no need to balance
804 * it in .atomic_flush() either.
806 rcar_du_crtc_get(rcrtc);
808 /* If the active state changed, we let .atomic_enable handle CMM. */
809 if (crtc->state->color_mgmt_changed && !crtc->state->active_changed)
810 rcar_du_cmm_setup(crtc);
812 if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
813 rcar_du_vsp_atomic_begin(rcrtc);
816 static void rcar_du_crtc_atomic_flush(struct drm_crtc *crtc,
817 struct drm_atomic_state *state)
819 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
820 struct drm_device *dev = rcrtc->crtc.dev;
821 unsigned long flags;
823 rcar_du_crtc_update_planes(rcrtc);
825 if (crtc->state->event) {
826 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
828 spin_lock_irqsave(&dev->event_lock, flags);
829 rcrtc->event = crtc->state->event;
830 crtc->state->event = NULL;
831 spin_unlock_irqrestore(&dev->event_lock, flags);
834 if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
835 rcar_du_vsp_atomic_flush(rcrtc);
838 static enum drm_mode_status
839 rcar_du_crtc_mode_valid(struct drm_crtc *crtc,
840 const struct drm_display_mode *mode)
842 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
843 struct rcar_du_device *rcdu = rcrtc->dev;
844 bool interlaced = mode->flags & DRM_MODE_FLAG_INTERLACE;
845 unsigned int vbp;
847 if (interlaced && !rcar_du_has(rcdu, RCAR_DU_FEATURE_INTERLACED))
848 return MODE_NO_INTERLACE;
851 * The hardware requires a minimum combined horizontal sync and back
852 * porch of 20 pixels and a minimum vertical back porch of 3 lines.
854 if (mode->htotal - mode->hsync_start < 20)
855 return MODE_HBLANK_NARROW;
857 vbp = (mode->vtotal - mode->vsync_end) / (interlaced ? 2 : 1);
858 if (vbp < 3)
859 return MODE_VBLANK_NARROW;
861 return MODE_OK;
864 static const struct drm_crtc_helper_funcs crtc_helper_funcs = {
865 .atomic_check = rcar_du_crtc_atomic_check,
866 .atomic_begin = rcar_du_crtc_atomic_begin,
867 .atomic_flush = rcar_du_crtc_atomic_flush,
868 .atomic_enable = rcar_du_crtc_atomic_enable,
869 .atomic_disable = rcar_du_crtc_atomic_disable,
870 .mode_valid = rcar_du_crtc_mode_valid,
873 static void rcar_du_crtc_crc_init(struct rcar_du_crtc *rcrtc)
875 struct rcar_du_device *rcdu = rcrtc->dev;
876 const char **sources;
877 unsigned int count;
878 int i = -1;
880 /* CRC available only on Gen3 HW. */
881 if (rcdu->info->gen < 3)
882 return;
884 /* Reserve 1 for "auto" source. */
885 count = rcrtc->vsp->num_planes + 1;
887 sources = kmalloc_array(count, sizeof(*sources), GFP_KERNEL);
888 if (!sources)
889 return;
891 sources[0] = kstrdup("auto", GFP_KERNEL);
892 if (!sources[0])
893 goto error;
895 for (i = 0; i < rcrtc->vsp->num_planes; ++i) {
896 struct drm_plane *plane = &rcrtc->vsp->planes[i].plane;
897 char name[16];
899 sprintf(name, "plane%u", plane->base.id);
900 sources[i + 1] = kstrdup(name, GFP_KERNEL);
901 if (!sources[i + 1])
902 goto error;
905 rcrtc->sources = sources;
906 rcrtc->sources_count = count;
907 return;
909 error:
910 while (i >= 0) {
911 kfree(sources[i]);
912 i--;
914 kfree(sources);
917 static void rcar_du_crtc_crc_cleanup(struct rcar_du_crtc *rcrtc)
919 unsigned int i;
921 if (!rcrtc->sources)
922 return;
924 for (i = 0; i < rcrtc->sources_count; i++)
925 kfree(rcrtc->sources[i]);
926 kfree(rcrtc->sources);
928 rcrtc->sources = NULL;
929 rcrtc->sources_count = 0;
932 static struct drm_crtc_state *
933 rcar_du_crtc_atomic_duplicate_state(struct drm_crtc *crtc)
935 struct rcar_du_crtc_state *state;
936 struct rcar_du_crtc_state *copy;
938 if (WARN_ON(!crtc->state))
939 return NULL;
941 state = to_rcar_crtc_state(crtc->state);
942 copy = kmemdup(state, sizeof(*state), GFP_KERNEL);
943 if (copy == NULL)
944 return NULL;
946 __drm_atomic_helper_crtc_duplicate_state(crtc, &copy->state);
948 return &copy->state;
951 static void rcar_du_crtc_atomic_destroy_state(struct drm_crtc *crtc,
952 struct drm_crtc_state *state)
954 __drm_atomic_helper_crtc_destroy_state(state);
955 kfree(to_rcar_crtc_state(state));
958 static void rcar_du_crtc_cleanup(struct drm_crtc *crtc)
960 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
962 rcar_du_crtc_crc_cleanup(rcrtc);
964 return drm_crtc_cleanup(crtc);
967 static void rcar_du_crtc_reset(struct drm_crtc *crtc)
969 struct rcar_du_crtc_state *state;
971 if (crtc->state) {
972 rcar_du_crtc_atomic_destroy_state(crtc, crtc->state);
973 crtc->state = NULL;
976 state = kzalloc(sizeof(*state), GFP_KERNEL);
977 if (state == NULL)
978 return;
980 state->crc.source = VSP1_DU_CRC_NONE;
981 state->crc.index = 0;
983 __drm_atomic_helper_crtc_reset(crtc, &state->state);
986 static int rcar_du_crtc_enable_vblank(struct drm_crtc *crtc)
988 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
990 rcar_du_crtc_write(rcrtc, DSRCR, DSRCR_VBCL);
991 rcar_du_crtc_set(rcrtc, DIER, DIER_VBE);
992 rcrtc->vblank_enable = true;
994 return 0;
997 static void rcar_du_crtc_disable_vblank(struct drm_crtc *crtc)
999 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1001 rcar_du_crtc_clr(rcrtc, DIER, DIER_VBE);
1002 rcrtc->vblank_enable = false;
1005 static int rcar_du_crtc_parse_crc_source(struct rcar_du_crtc *rcrtc,
1006 const char *source_name,
1007 enum vsp1_du_crc_source *source)
1009 unsigned int index;
1010 int ret;
1013 * Parse the source name. Supported values are "plane%u" to compute the
1014 * CRC on an input plane (%u is the plane ID), and "auto" to compute the
1015 * CRC on the composer (VSP) output.
1018 if (!source_name) {
1019 *source = VSP1_DU_CRC_NONE;
1020 return 0;
1021 } else if (!strcmp(source_name, "auto")) {
1022 *source = VSP1_DU_CRC_OUTPUT;
1023 return 0;
1024 } else if (strstarts(source_name, "plane")) {
1025 unsigned int i;
1027 *source = VSP1_DU_CRC_PLANE;
1029 ret = kstrtouint(source_name + strlen("plane"), 10, &index);
1030 if (ret < 0)
1031 return ret;
1033 for (i = 0; i < rcrtc->vsp->num_planes; ++i) {
1034 if (index == rcrtc->vsp->planes[i].plane.base.id)
1035 return i;
1039 return -EINVAL;
1042 static int rcar_du_crtc_verify_crc_source(struct drm_crtc *crtc,
1043 const char *source_name,
1044 size_t *values_cnt)
1046 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1047 enum vsp1_du_crc_source source;
1049 if (rcar_du_crtc_parse_crc_source(rcrtc, source_name, &source) < 0) {
1050 DRM_DEBUG_DRIVER("unknown source %s\n", source_name);
1051 return -EINVAL;
1054 *values_cnt = 1;
1055 return 0;
1058 static const char *const *
1059 rcar_du_crtc_get_crc_sources(struct drm_crtc *crtc, size_t *count)
1061 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1063 *count = rcrtc->sources_count;
1064 return rcrtc->sources;
1067 static int rcar_du_crtc_set_crc_source(struct drm_crtc *crtc,
1068 const char *source_name)
1070 struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1071 struct drm_modeset_acquire_ctx ctx;
1072 struct drm_crtc_state *crtc_state;
1073 struct drm_atomic_state *state;
1074 enum vsp1_du_crc_source source;
1075 unsigned int index;
1076 int ret;
1078 ret = rcar_du_crtc_parse_crc_source(rcrtc, source_name, &source);
1079 if (ret < 0)
1080 return ret;
1082 index = ret;
1084 /* Perform an atomic commit to set the CRC source. */
1085 drm_modeset_acquire_init(&ctx, 0);
1087 state = drm_atomic_state_alloc(crtc->dev);
1088 if (!state) {
1089 ret = -ENOMEM;
1090 goto unlock;
1093 state->acquire_ctx = &ctx;
1095 retry:
1096 crtc_state = drm_atomic_get_crtc_state(state, crtc);
1097 if (!IS_ERR(crtc_state)) {
1098 struct rcar_du_crtc_state *rcrtc_state;
1100 rcrtc_state = to_rcar_crtc_state(crtc_state);
1101 rcrtc_state->crc.source = source;
1102 rcrtc_state->crc.index = index;
1104 ret = drm_atomic_commit(state);
1105 } else {
1106 ret = PTR_ERR(crtc_state);
1109 if (ret == -EDEADLK) {
1110 drm_atomic_state_clear(state);
1111 drm_modeset_backoff(&ctx);
1112 goto retry;
1115 drm_atomic_state_put(state);
1117 unlock:
1118 drm_modeset_drop_locks(&ctx);
1119 drm_modeset_acquire_fini(&ctx);
1121 return ret;
1124 static const struct drm_crtc_funcs crtc_funcs_gen2 = {
1125 .reset = rcar_du_crtc_reset,
1126 .destroy = drm_crtc_cleanup,
1127 .set_config = drm_atomic_helper_set_config,
1128 .page_flip = drm_atomic_helper_page_flip,
1129 .atomic_duplicate_state = rcar_du_crtc_atomic_duplicate_state,
1130 .atomic_destroy_state = rcar_du_crtc_atomic_destroy_state,
1131 .enable_vblank = rcar_du_crtc_enable_vblank,
1132 .disable_vblank = rcar_du_crtc_disable_vblank,
1135 static const struct drm_crtc_funcs crtc_funcs_gen3 = {
1136 .reset = rcar_du_crtc_reset,
1137 .destroy = rcar_du_crtc_cleanup,
1138 .set_config = drm_atomic_helper_set_config,
1139 .page_flip = drm_atomic_helper_page_flip,
1140 .atomic_duplicate_state = rcar_du_crtc_atomic_duplicate_state,
1141 .atomic_destroy_state = rcar_du_crtc_atomic_destroy_state,
1142 .enable_vblank = rcar_du_crtc_enable_vblank,
1143 .disable_vblank = rcar_du_crtc_disable_vblank,
1144 .set_crc_source = rcar_du_crtc_set_crc_source,
1145 .verify_crc_source = rcar_du_crtc_verify_crc_source,
1146 .get_crc_sources = rcar_du_crtc_get_crc_sources,
1147 .gamma_set = drm_atomic_helper_legacy_gamma_set,
1150 /* -----------------------------------------------------------------------------
1151 * Interrupt Handling
1154 static irqreturn_t rcar_du_crtc_irq(int irq, void *arg)
1156 struct rcar_du_crtc *rcrtc = arg;
1157 struct rcar_du_device *rcdu = rcrtc->dev;
1158 irqreturn_t ret = IRQ_NONE;
1159 u32 status;
1161 spin_lock(&rcrtc->vblank_lock);
1163 status = rcar_du_crtc_read(rcrtc, DSSR);
1164 rcar_du_crtc_write(rcrtc, DSRCR, status & DSRCR_MASK);
1166 if (status & DSSR_VBK) {
1168 * Wake up the vblank wait if the counter reaches 0. This must
1169 * be protected by the vblank_lock to avoid races in
1170 * rcar_du_crtc_disable_planes().
1172 if (rcrtc->vblank_count) {
1173 if (--rcrtc->vblank_count == 0)
1174 wake_up(&rcrtc->vblank_wait);
1178 spin_unlock(&rcrtc->vblank_lock);
1180 if (status & DSSR_VBK) {
1181 if (rcdu->info->gen < 3) {
1182 drm_crtc_handle_vblank(&rcrtc->crtc);
1183 rcar_du_crtc_finish_page_flip(rcrtc);
1186 ret = IRQ_HANDLED;
1189 return ret;
1192 /* -----------------------------------------------------------------------------
1193 * Initialization
1196 int rcar_du_crtc_create(struct rcar_du_group *rgrp, unsigned int swindex,
1197 unsigned int hwindex)
1199 static const unsigned int mmio_offsets[] = {
1200 DU0_REG_OFFSET, DU1_REG_OFFSET, DU2_REG_OFFSET, DU3_REG_OFFSET
1203 struct rcar_du_device *rcdu = rgrp->dev;
1204 struct platform_device *pdev = to_platform_device(rcdu->dev);
1205 struct rcar_du_crtc *rcrtc = &rcdu->crtcs[swindex];
1206 struct drm_crtc *crtc = &rcrtc->crtc;
1207 struct drm_plane *primary;
1208 unsigned int irqflags;
1209 struct clk *clk;
1210 char clk_name[9];
1211 char *name;
1212 int irq;
1213 int ret;
1215 /* Get the CRTC clock and the optional external clock. */
1216 if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_IRQ_CLOCK)) {
1217 sprintf(clk_name, "du.%u", hwindex);
1218 name = clk_name;
1219 } else {
1220 name = NULL;
1223 rcrtc->clock = devm_clk_get(rcdu->dev, name);
1224 if (IS_ERR(rcrtc->clock)) {
1225 dev_err(rcdu->dev, "no clock for DU channel %u\n", hwindex);
1226 return PTR_ERR(rcrtc->clock);
1229 sprintf(clk_name, "dclkin.%u", hwindex);
1230 clk = devm_clk_get(rcdu->dev, clk_name);
1231 if (!IS_ERR(clk)) {
1232 rcrtc->extclock = clk;
1233 } else if (PTR_ERR(clk) == -EPROBE_DEFER) {
1234 return -EPROBE_DEFER;
1235 } else if (rcdu->info->dpll_mask & BIT(hwindex)) {
1237 * DU channels that have a display PLL can't use the internal
1238 * system clock and thus require an external clock.
1240 ret = PTR_ERR(clk);
1241 dev_err(rcdu->dev, "can't get dclkin.%u: %d\n", hwindex, ret);
1242 return ret;
1245 init_waitqueue_head(&rcrtc->flip_wait);
1246 init_waitqueue_head(&rcrtc->vblank_wait);
1247 spin_lock_init(&rcrtc->vblank_lock);
1249 rcrtc->dev = rcdu;
1250 rcrtc->group = rgrp;
1251 rcrtc->mmio_offset = mmio_offsets[hwindex];
1252 rcrtc->index = hwindex;
1253 rcrtc->dsysr = (rcrtc->index % 2 ? 0 : DSYSR_DRES) | DSYSR_TVM_TVSYNC;
1255 if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE))
1256 primary = &rcrtc->vsp->planes[rcrtc->vsp_pipe].plane;
1257 else
1258 primary = &rgrp->planes[swindex % 2].plane;
1260 ret = drm_crtc_init_with_planes(rcdu->ddev, crtc, primary, NULL,
1261 rcdu->info->gen <= 2 ?
1262 &crtc_funcs_gen2 : &crtc_funcs_gen3,
1263 NULL);
1264 if (ret < 0)
1265 return ret;
1267 /* CMM might be disabled for this CRTC. */
1268 if (rcdu->cmms[swindex]) {
1269 rcrtc->cmm = rcdu->cmms[swindex];
1270 rgrp->cmms_mask |= BIT(hwindex % 2);
1272 drm_mode_crtc_set_gamma_size(crtc, CM2_LUT_SIZE);
1273 drm_crtc_enable_color_mgmt(crtc, 0, false, CM2_LUT_SIZE);
1276 drm_crtc_helper_add(crtc, &crtc_helper_funcs);
1278 /* Register the interrupt handler. */
1279 if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_IRQ_CLOCK)) {
1280 /* The IRQ's are associated with the CRTC (sw)index. */
1281 irq = platform_get_irq(pdev, swindex);
1282 irqflags = 0;
1283 } else {
1284 irq = platform_get_irq(pdev, 0);
1285 irqflags = IRQF_SHARED;
1288 if (irq < 0) {
1289 dev_err(rcdu->dev, "no IRQ for CRTC %u\n", swindex);
1290 return irq;
1293 ret = devm_request_irq(rcdu->dev, irq, rcar_du_crtc_irq, irqflags,
1294 dev_name(rcdu->dev), rcrtc);
1295 if (ret < 0) {
1296 dev_err(rcdu->dev,
1297 "failed to register IRQ for CRTC %u\n", swindex);
1298 return ret;
1301 rcar_du_crtc_crc_init(rcrtc);
1303 return 0;