Merge tag 'io_uring-5.11-2021-01-16' of git://git.kernel.dk/linux-block
[linux/fpc-iii.git] / drivers / gpu / drm / atmel-hlcdc / atmel_hlcdc_plane.c
blob15bc93163833b1a41838b55f65bc455884d6e6ee
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (C) 2014 Free Electrons
4 * Copyright (C) 2014 Atmel
6 * Author: Boris BREZILLON <boris.brezillon@free-electrons.com>
7 */
9 #include <linux/dmapool.h>
10 #include <linux/mfd/atmel-hlcdc.h>
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_fb_cma_helper.h>
15 #include <drm/drm_fourcc.h>
16 #include <drm/drm_gem_cma_helper.h>
17 #include <drm/drm_plane_helper.h>
19 #include "atmel_hlcdc_dc.h"
21 /**
22 * struct atmel_hlcdc_plane_state - Atmel HLCDC Plane state structure.
24 * @base: DRM plane state
25 * @crtc_x: x position of the plane relative to the CRTC
26 * @crtc_y: y position of the plane relative to the CRTC
27 * @crtc_w: visible width of the plane
28 * @crtc_h: visible height of the plane
29 * @src_x: x buffer position
30 * @src_y: y buffer position
31 * @src_w: buffer width
32 * @src_h: buffer height
33 * @disc_x: x discard position
34 * @disc_y: y discard position
35 * @disc_w: discard width
36 * @disc_h: discard height
37 * @ahb_id: AHB identification number
38 * @bpp: bytes per pixel deduced from pixel_format
39 * @offsets: offsets to apply to the GEM buffers
40 * @xstride: value to add to the pixel pointer between each line
41 * @pstride: value to add to the pixel pointer between each pixel
42 * @nplanes: number of planes (deduced from pixel_format)
43 * @dscrs: DMA descriptors
45 struct atmel_hlcdc_plane_state {
46 struct drm_plane_state base;
47 int crtc_x;
48 int crtc_y;
49 unsigned int crtc_w;
50 unsigned int crtc_h;
51 uint32_t src_x;
52 uint32_t src_y;
53 uint32_t src_w;
54 uint32_t src_h;
56 int disc_x;
57 int disc_y;
58 int disc_w;
59 int disc_h;
61 int ahb_id;
63 /* These fields are private and should not be touched */
64 int bpp[ATMEL_HLCDC_LAYER_MAX_PLANES];
65 unsigned int offsets[ATMEL_HLCDC_LAYER_MAX_PLANES];
66 int xstride[ATMEL_HLCDC_LAYER_MAX_PLANES];
67 int pstride[ATMEL_HLCDC_LAYER_MAX_PLANES];
68 int nplanes;
70 /* DMA descriptors. */
71 struct atmel_hlcdc_dma_channel_dscr *dscrs[ATMEL_HLCDC_LAYER_MAX_PLANES];
74 static inline struct atmel_hlcdc_plane_state *
75 drm_plane_state_to_atmel_hlcdc_plane_state(struct drm_plane_state *s)
77 return container_of(s, struct atmel_hlcdc_plane_state, base);
80 #define SUBPIXEL_MASK 0xffff
82 static uint32_t rgb_formats[] = {
83 DRM_FORMAT_C8,
84 DRM_FORMAT_XRGB4444,
85 DRM_FORMAT_ARGB4444,
86 DRM_FORMAT_RGBA4444,
87 DRM_FORMAT_ARGB1555,
88 DRM_FORMAT_RGB565,
89 DRM_FORMAT_RGB888,
90 DRM_FORMAT_XRGB8888,
91 DRM_FORMAT_ARGB8888,
92 DRM_FORMAT_RGBA8888,
95 struct atmel_hlcdc_formats atmel_hlcdc_plane_rgb_formats = {
96 .formats = rgb_formats,
97 .nformats = ARRAY_SIZE(rgb_formats),
100 static uint32_t rgb_and_yuv_formats[] = {
101 DRM_FORMAT_C8,
102 DRM_FORMAT_XRGB4444,
103 DRM_FORMAT_ARGB4444,
104 DRM_FORMAT_RGBA4444,
105 DRM_FORMAT_ARGB1555,
106 DRM_FORMAT_RGB565,
107 DRM_FORMAT_RGB888,
108 DRM_FORMAT_XRGB8888,
109 DRM_FORMAT_ARGB8888,
110 DRM_FORMAT_RGBA8888,
111 DRM_FORMAT_AYUV,
112 DRM_FORMAT_YUYV,
113 DRM_FORMAT_UYVY,
114 DRM_FORMAT_YVYU,
115 DRM_FORMAT_VYUY,
116 DRM_FORMAT_NV21,
117 DRM_FORMAT_NV61,
118 DRM_FORMAT_YUV422,
119 DRM_FORMAT_YUV420,
122 struct atmel_hlcdc_formats atmel_hlcdc_plane_rgb_and_yuv_formats = {
123 .formats = rgb_and_yuv_formats,
124 .nformats = ARRAY_SIZE(rgb_and_yuv_formats),
127 static int atmel_hlcdc_format_to_plane_mode(u32 format, u32 *mode)
129 switch (format) {
130 case DRM_FORMAT_C8:
131 *mode = ATMEL_HLCDC_C8_MODE;
132 break;
133 case DRM_FORMAT_XRGB4444:
134 *mode = ATMEL_HLCDC_XRGB4444_MODE;
135 break;
136 case DRM_FORMAT_ARGB4444:
137 *mode = ATMEL_HLCDC_ARGB4444_MODE;
138 break;
139 case DRM_FORMAT_RGBA4444:
140 *mode = ATMEL_HLCDC_RGBA4444_MODE;
141 break;
142 case DRM_FORMAT_RGB565:
143 *mode = ATMEL_HLCDC_RGB565_MODE;
144 break;
145 case DRM_FORMAT_RGB888:
146 *mode = ATMEL_HLCDC_RGB888_MODE;
147 break;
148 case DRM_FORMAT_ARGB1555:
149 *mode = ATMEL_HLCDC_ARGB1555_MODE;
150 break;
151 case DRM_FORMAT_XRGB8888:
152 *mode = ATMEL_HLCDC_XRGB8888_MODE;
153 break;
154 case DRM_FORMAT_ARGB8888:
155 *mode = ATMEL_HLCDC_ARGB8888_MODE;
156 break;
157 case DRM_FORMAT_RGBA8888:
158 *mode = ATMEL_HLCDC_RGBA8888_MODE;
159 break;
160 case DRM_FORMAT_AYUV:
161 *mode = ATMEL_HLCDC_AYUV_MODE;
162 break;
163 case DRM_FORMAT_YUYV:
164 *mode = ATMEL_HLCDC_YUYV_MODE;
165 break;
166 case DRM_FORMAT_UYVY:
167 *mode = ATMEL_HLCDC_UYVY_MODE;
168 break;
169 case DRM_FORMAT_YVYU:
170 *mode = ATMEL_HLCDC_YVYU_MODE;
171 break;
172 case DRM_FORMAT_VYUY:
173 *mode = ATMEL_HLCDC_VYUY_MODE;
174 break;
175 case DRM_FORMAT_NV21:
176 *mode = ATMEL_HLCDC_NV21_MODE;
177 break;
178 case DRM_FORMAT_NV61:
179 *mode = ATMEL_HLCDC_NV61_MODE;
180 break;
181 case DRM_FORMAT_YUV420:
182 *mode = ATMEL_HLCDC_YUV420_MODE;
183 break;
184 case DRM_FORMAT_YUV422:
185 *mode = ATMEL_HLCDC_YUV422_MODE;
186 break;
187 default:
188 return -ENOTSUPP;
191 return 0;
194 static u32 heo_downscaling_xcoef[] = {
195 0x11343311,
196 0x000000f7,
197 0x1635300c,
198 0x000000f9,
199 0x1b362c08,
200 0x000000fb,
201 0x1f372804,
202 0x000000fe,
203 0x24382400,
204 0x00000000,
205 0x28371ffe,
206 0x00000004,
207 0x2c361bfb,
208 0x00000008,
209 0x303516f9,
210 0x0000000c,
213 static u32 heo_downscaling_ycoef[] = {
214 0x00123737,
215 0x00173732,
216 0x001b382d,
217 0x001f3928,
218 0x00243824,
219 0x0028391f,
220 0x002d381b,
221 0x00323717,
224 static u32 heo_upscaling_xcoef[] = {
225 0xf74949f7,
226 0x00000000,
227 0xf55f33fb,
228 0x000000fe,
229 0xf5701efe,
230 0x000000ff,
231 0xf87c0dff,
232 0x00000000,
233 0x00800000,
234 0x00000000,
235 0x0d7cf800,
236 0x000000ff,
237 0x1e70f5ff,
238 0x000000fe,
239 0x335ff5fe,
240 0x000000fb,
243 static u32 heo_upscaling_ycoef[] = {
244 0x00004040,
245 0x00075920,
246 0x00056f0c,
247 0x00027b03,
248 0x00008000,
249 0x00037b02,
250 0x000c6f05,
251 0x00205907,
254 #define ATMEL_HLCDC_XPHIDEF 4
255 #define ATMEL_HLCDC_YPHIDEF 4
257 static u32 atmel_hlcdc_plane_phiscaler_get_factor(u32 srcsize,
258 u32 dstsize,
259 u32 phidef)
261 u32 factor, max_memsize;
263 factor = (256 * ((8 * (srcsize - 1)) - phidef)) / (dstsize - 1);
264 max_memsize = ((factor * (dstsize - 1)) + (256 * phidef)) / 2048;
266 if (max_memsize > srcsize - 1)
267 factor--;
269 return factor;
272 static void
273 atmel_hlcdc_plane_scaler_set_phicoeff(struct atmel_hlcdc_plane *plane,
274 const u32 *coeff_tab, int size,
275 unsigned int cfg_offs)
277 int i;
279 for (i = 0; i < size; i++)
280 atmel_hlcdc_layer_write_cfg(&plane->layer, cfg_offs + i,
281 coeff_tab[i]);
284 static void atmel_hlcdc_plane_setup_scaler(struct atmel_hlcdc_plane *plane,
285 struct atmel_hlcdc_plane_state *state)
287 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
288 u32 xfactor, yfactor;
290 if (!desc->layout.scaler_config)
291 return;
293 if (state->crtc_w == state->src_w && state->crtc_h == state->src_h) {
294 atmel_hlcdc_layer_write_cfg(&plane->layer,
295 desc->layout.scaler_config, 0);
296 return;
299 if (desc->layout.phicoeffs.x) {
300 xfactor = atmel_hlcdc_plane_phiscaler_get_factor(state->src_w,
301 state->crtc_w,
302 ATMEL_HLCDC_XPHIDEF);
304 yfactor = atmel_hlcdc_plane_phiscaler_get_factor(state->src_h,
305 state->crtc_h,
306 ATMEL_HLCDC_YPHIDEF);
308 atmel_hlcdc_plane_scaler_set_phicoeff(plane,
309 state->crtc_w < state->src_w ?
310 heo_downscaling_xcoef :
311 heo_upscaling_xcoef,
312 ARRAY_SIZE(heo_upscaling_xcoef),
313 desc->layout.phicoeffs.x);
315 atmel_hlcdc_plane_scaler_set_phicoeff(plane,
316 state->crtc_h < state->src_h ?
317 heo_downscaling_ycoef :
318 heo_upscaling_ycoef,
319 ARRAY_SIZE(heo_upscaling_ycoef),
320 desc->layout.phicoeffs.y);
321 } else {
322 xfactor = (1024 * state->src_w) / state->crtc_w;
323 yfactor = (1024 * state->src_h) / state->crtc_h;
326 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.scaler_config,
327 ATMEL_HLCDC_LAYER_SCALER_ENABLE |
328 ATMEL_HLCDC_LAYER_SCALER_FACTORS(xfactor,
329 yfactor));
332 static void
333 atmel_hlcdc_plane_update_pos_and_size(struct atmel_hlcdc_plane *plane,
334 struct atmel_hlcdc_plane_state *state)
336 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
338 if (desc->layout.size)
339 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.size,
340 ATMEL_HLCDC_LAYER_SIZE(state->crtc_w,
341 state->crtc_h));
343 if (desc->layout.memsize)
344 atmel_hlcdc_layer_write_cfg(&plane->layer,
345 desc->layout.memsize,
346 ATMEL_HLCDC_LAYER_SIZE(state->src_w,
347 state->src_h));
349 if (desc->layout.pos)
350 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.pos,
351 ATMEL_HLCDC_LAYER_POS(state->crtc_x,
352 state->crtc_y));
354 atmel_hlcdc_plane_setup_scaler(plane, state);
357 static void
358 atmel_hlcdc_plane_update_general_settings(struct atmel_hlcdc_plane *plane,
359 struct atmel_hlcdc_plane_state *state)
361 unsigned int cfg = ATMEL_HLCDC_LAYER_DMA_BLEN_INCR16 | state->ahb_id;
362 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
363 const struct drm_format_info *format = state->base.fb->format;
366 * Rotation optimization is not working on RGB888 (rotation is still
367 * working but without any optimization).
369 if (format->format == DRM_FORMAT_RGB888)
370 cfg |= ATMEL_HLCDC_LAYER_DMA_ROTDIS;
372 atmel_hlcdc_layer_write_cfg(&plane->layer, ATMEL_HLCDC_LAYER_DMA_CFG,
373 cfg);
375 cfg = ATMEL_HLCDC_LAYER_DMA | ATMEL_HLCDC_LAYER_REP;
377 if (plane->base.type != DRM_PLANE_TYPE_PRIMARY) {
378 cfg |= ATMEL_HLCDC_LAYER_OVR | ATMEL_HLCDC_LAYER_ITER2BL |
379 ATMEL_HLCDC_LAYER_ITER;
381 if (format->has_alpha)
382 cfg |= ATMEL_HLCDC_LAYER_LAEN;
383 else
384 cfg |= ATMEL_HLCDC_LAYER_GAEN |
385 ATMEL_HLCDC_LAYER_GA(state->base.alpha);
388 if (state->disc_h && state->disc_w)
389 cfg |= ATMEL_HLCDC_LAYER_DISCEN;
391 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.general_config,
392 cfg);
395 static void atmel_hlcdc_plane_update_format(struct atmel_hlcdc_plane *plane,
396 struct atmel_hlcdc_plane_state *state)
398 u32 cfg;
399 int ret;
401 ret = atmel_hlcdc_format_to_plane_mode(state->base.fb->format->format,
402 &cfg);
403 if (ret)
404 return;
406 if ((state->base.fb->format->format == DRM_FORMAT_YUV422 ||
407 state->base.fb->format->format == DRM_FORMAT_NV61) &&
408 drm_rotation_90_or_270(state->base.rotation))
409 cfg |= ATMEL_HLCDC_YUV422ROT;
411 atmel_hlcdc_layer_write_cfg(&plane->layer,
412 ATMEL_HLCDC_LAYER_FORMAT_CFG, cfg);
415 static void atmel_hlcdc_plane_update_clut(struct atmel_hlcdc_plane *plane,
416 struct atmel_hlcdc_plane_state *state)
418 struct drm_crtc *crtc = state->base.crtc;
419 struct drm_color_lut *lut;
420 int idx;
422 if (!crtc || !crtc->state)
423 return;
425 if (!crtc->state->color_mgmt_changed || !crtc->state->gamma_lut)
426 return;
428 lut = (struct drm_color_lut *)crtc->state->gamma_lut->data;
430 for (idx = 0; idx < ATMEL_HLCDC_CLUT_SIZE; idx++, lut++) {
431 u32 val = ((lut->red << 8) & 0xff0000) |
432 (lut->green & 0xff00) |
433 (lut->blue >> 8);
435 atmel_hlcdc_layer_write_clut(&plane->layer, idx, val);
439 static void atmel_hlcdc_plane_update_buffers(struct atmel_hlcdc_plane *plane,
440 struct atmel_hlcdc_plane_state *state)
442 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
443 struct drm_framebuffer *fb = state->base.fb;
444 u32 sr;
445 int i;
447 sr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHSR);
449 for (i = 0; i < state->nplanes; i++) {
450 struct drm_gem_cma_object *gem = drm_fb_cma_get_gem_obj(fb, i);
452 state->dscrs[i]->addr = gem->paddr + state->offsets[i];
454 atmel_hlcdc_layer_write_reg(&plane->layer,
455 ATMEL_HLCDC_LAYER_PLANE_HEAD(i),
456 state->dscrs[i]->self);
458 if (!(sr & ATMEL_HLCDC_LAYER_EN)) {
459 atmel_hlcdc_layer_write_reg(&plane->layer,
460 ATMEL_HLCDC_LAYER_PLANE_ADDR(i),
461 state->dscrs[i]->addr);
462 atmel_hlcdc_layer_write_reg(&plane->layer,
463 ATMEL_HLCDC_LAYER_PLANE_CTRL(i),
464 state->dscrs[i]->ctrl);
465 atmel_hlcdc_layer_write_reg(&plane->layer,
466 ATMEL_HLCDC_LAYER_PLANE_NEXT(i),
467 state->dscrs[i]->self);
470 if (desc->layout.xstride[i])
471 atmel_hlcdc_layer_write_cfg(&plane->layer,
472 desc->layout.xstride[i],
473 state->xstride[i]);
475 if (desc->layout.pstride[i])
476 atmel_hlcdc_layer_write_cfg(&plane->layer,
477 desc->layout.pstride[i],
478 state->pstride[i]);
482 int atmel_hlcdc_plane_prepare_ahb_routing(struct drm_crtc_state *c_state)
484 unsigned int ahb_load[2] = { };
485 struct drm_plane *plane;
487 drm_atomic_crtc_state_for_each_plane(plane, c_state) {
488 struct atmel_hlcdc_plane_state *plane_state;
489 struct drm_plane_state *plane_s;
490 unsigned int pixels, load = 0;
491 int i;
493 plane_s = drm_atomic_get_plane_state(c_state->state, plane);
494 if (IS_ERR(plane_s))
495 return PTR_ERR(plane_s);
497 plane_state =
498 drm_plane_state_to_atmel_hlcdc_plane_state(plane_s);
500 pixels = (plane_state->src_w * plane_state->src_h) -
501 (plane_state->disc_w * plane_state->disc_h);
503 for (i = 0; i < plane_state->nplanes; i++)
504 load += pixels * plane_state->bpp[i];
506 if (ahb_load[0] <= ahb_load[1])
507 plane_state->ahb_id = 0;
508 else
509 plane_state->ahb_id = 1;
511 ahb_load[plane_state->ahb_id] += load;
514 return 0;
518 atmel_hlcdc_plane_prepare_disc_area(struct drm_crtc_state *c_state)
520 int disc_x = 0, disc_y = 0, disc_w = 0, disc_h = 0;
521 const struct atmel_hlcdc_layer_cfg_layout *layout;
522 struct atmel_hlcdc_plane_state *primary_state;
523 struct drm_plane_state *primary_s;
524 struct atmel_hlcdc_plane *primary;
525 struct drm_plane *ovl;
527 primary = drm_plane_to_atmel_hlcdc_plane(c_state->crtc->primary);
528 layout = &primary->layer.desc->layout;
529 if (!layout->disc_pos || !layout->disc_size)
530 return 0;
532 primary_s = drm_atomic_get_plane_state(c_state->state,
533 &primary->base);
534 if (IS_ERR(primary_s))
535 return PTR_ERR(primary_s);
537 primary_state = drm_plane_state_to_atmel_hlcdc_plane_state(primary_s);
539 drm_atomic_crtc_state_for_each_plane(ovl, c_state) {
540 struct atmel_hlcdc_plane_state *ovl_state;
541 struct drm_plane_state *ovl_s;
543 if (ovl == c_state->crtc->primary)
544 continue;
546 ovl_s = drm_atomic_get_plane_state(c_state->state, ovl);
547 if (IS_ERR(ovl_s))
548 return PTR_ERR(ovl_s);
550 ovl_state = drm_plane_state_to_atmel_hlcdc_plane_state(ovl_s);
552 if (!ovl_s->visible ||
553 !ovl_s->fb ||
554 ovl_s->fb->format->has_alpha ||
555 ovl_s->alpha != DRM_BLEND_ALPHA_OPAQUE)
556 continue;
558 /* TODO: implement a smarter hidden area detection */
559 if (ovl_state->crtc_h * ovl_state->crtc_w < disc_h * disc_w)
560 continue;
562 disc_x = ovl_state->crtc_x;
563 disc_y = ovl_state->crtc_y;
564 disc_h = ovl_state->crtc_h;
565 disc_w = ovl_state->crtc_w;
568 primary_state->disc_x = disc_x;
569 primary_state->disc_y = disc_y;
570 primary_state->disc_w = disc_w;
571 primary_state->disc_h = disc_h;
573 return 0;
576 static void
577 atmel_hlcdc_plane_update_disc_area(struct atmel_hlcdc_plane *plane,
578 struct atmel_hlcdc_plane_state *state)
580 const struct atmel_hlcdc_layer_cfg_layout *layout;
582 layout = &plane->layer.desc->layout;
583 if (!layout->disc_pos || !layout->disc_size)
584 return;
586 atmel_hlcdc_layer_write_cfg(&plane->layer, layout->disc_pos,
587 ATMEL_HLCDC_LAYER_DISC_POS(state->disc_x,
588 state->disc_y));
590 atmel_hlcdc_layer_write_cfg(&plane->layer, layout->disc_size,
591 ATMEL_HLCDC_LAYER_DISC_SIZE(state->disc_w,
592 state->disc_h));
595 static int atmel_hlcdc_plane_atomic_check(struct drm_plane *p,
596 struct drm_plane_state *s)
598 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
599 struct atmel_hlcdc_plane_state *state =
600 drm_plane_state_to_atmel_hlcdc_plane_state(s);
601 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
602 struct drm_framebuffer *fb = state->base.fb;
603 const struct drm_display_mode *mode;
604 struct drm_crtc_state *crtc_state;
605 int ret;
606 int i;
608 if (!state->base.crtc || WARN_ON(!fb))
609 return 0;
611 crtc_state = drm_atomic_get_existing_crtc_state(s->state, s->crtc);
612 mode = &crtc_state->adjusted_mode;
614 ret = drm_atomic_helper_check_plane_state(s, crtc_state,
615 (1 << 16) / 2048,
616 INT_MAX, true, true);
617 if (ret || !s->visible)
618 return ret;
620 state->src_x = s->src.x1;
621 state->src_y = s->src.y1;
622 state->src_w = drm_rect_width(&s->src);
623 state->src_h = drm_rect_height(&s->src);
624 state->crtc_x = s->dst.x1;
625 state->crtc_y = s->dst.y1;
626 state->crtc_w = drm_rect_width(&s->dst);
627 state->crtc_h = drm_rect_height(&s->dst);
629 if ((state->src_x | state->src_y | state->src_w | state->src_h) &
630 SUBPIXEL_MASK)
631 return -EINVAL;
633 state->src_x >>= 16;
634 state->src_y >>= 16;
635 state->src_w >>= 16;
636 state->src_h >>= 16;
638 state->nplanes = fb->format->num_planes;
639 if (state->nplanes > ATMEL_HLCDC_LAYER_MAX_PLANES)
640 return -EINVAL;
642 for (i = 0; i < state->nplanes; i++) {
643 unsigned int offset = 0;
644 int xdiv = i ? fb->format->hsub : 1;
645 int ydiv = i ? fb->format->vsub : 1;
647 state->bpp[i] = fb->format->cpp[i];
648 if (!state->bpp[i])
649 return -EINVAL;
651 switch (state->base.rotation & DRM_MODE_ROTATE_MASK) {
652 case DRM_MODE_ROTATE_90:
653 offset = (state->src_y / ydiv) *
654 fb->pitches[i];
655 offset += ((state->src_x + state->src_w - 1) /
656 xdiv) * state->bpp[i];
657 state->xstride[i] = -(((state->src_h - 1) / ydiv) *
658 fb->pitches[i]) -
659 (2 * state->bpp[i]);
660 state->pstride[i] = fb->pitches[i] - state->bpp[i];
661 break;
662 case DRM_MODE_ROTATE_180:
663 offset = ((state->src_y + state->src_h - 1) /
664 ydiv) * fb->pitches[i];
665 offset += ((state->src_x + state->src_w - 1) /
666 xdiv) * state->bpp[i];
667 state->xstride[i] = ((((state->src_w - 1) / xdiv) - 1) *
668 state->bpp[i]) - fb->pitches[i];
669 state->pstride[i] = -2 * state->bpp[i];
670 break;
671 case DRM_MODE_ROTATE_270:
672 offset = ((state->src_y + state->src_h - 1) /
673 ydiv) * fb->pitches[i];
674 offset += (state->src_x / xdiv) * state->bpp[i];
675 state->xstride[i] = ((state->src_h - 1) / ydiv) *
676 fb->pitches[i];
677 state->pstride[i] = -fb->pitches[i] - state->bpp[i];
678 break;
679 case DRM_MODE_ROTATE_0:
680 default:
681 offset = (state->src_y / ydiv) * fb->pitches[i];
682 offset += (state->src_x / xdiv) * state->bpp[i];
683 state->xstride[i] = fb->pitches[i] -
684 ((state->src_w / xdiv) *
685 state->bpp[i]);
686 state->pstride[i] = 0;
687 break;
690 state->offsets[i] = offset + fb->offsets[i];
694 * Swap width and size in case of 90 or 270 degrees rotation
696 if (drm_rotation_90_or_270(state->base.rotation)) {
697 swap(state->src_w, state->src_h);
700 if (!desc->layout.size &&
701 (mode->hdisplay != state->crtc_w ||
702 mode->vdisplay != state->crtc_h))
703 return -EINVAL;
705 if ((state->crtc_h != state->src_h || state->crtc_w != state->src_w) &&
706 (!desc->layout.memsize ||
707 state->base.fb->format->has_alpha))
708 return -EINVAL;
710 return 0;
713 static void atmel_hlcdc_plane_atomic_disable(struct drm_plane *p,
714 struct drm_plane_state *old_state)
716 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
718 /* Disable interrupts */
719 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_IDR,
720 0xffffffff);
722 /* Disable the layer */
723 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHDR,
724 ATMEL_HLCDC_LAYER_RST |
725 ATMEL_HLCDC_LAYER_A2Q |
726 ATMEL_HLCDC_LAYER_UPDATE);
728 /* Clear all pending interrupts */
729 atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR);
732 static void atmel_hlcdc_plane_atomic_update(struct drm_plane *p,
733 struct drm_plane_state *old_s)
735 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
736 struct atmel_hlcdc_plane_state *state =
737 drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
738 u32 sr;
740 if (!p->state->crtc || !p->state->fb)
741 return;
743 if (!state->base.visible) {
744 atmel_hlcdc_plane_atomic_disable(p, old_s);
745 return;
748 atmel_hlcdc_plane_update_pos_and_size(plane, state);
749 atmel_hlcdc_plane_update_general_settings(plane, state);
750 atmel_hlcdc_plane_update_format(plane, state);
751 atmel_hlcdc_plane_update_clut(plane, state);
752 atmel_hlcdc_plane_update_buffers(plane, state);
753 atmel_hlcdc_plane_update_disc_area(plane, state);
755 /* Enable the overrun interrupts. */
756 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_IER,
757 ATMEL_HLCDC_LAYER_OVR_IRQ(0) |
758 ATMEL_HLCDC_LAYER_OVR_IRQ(1) |
759 ATMEL_HLCDC_LAYER_OVR_IRQ(2));
761 /* Apply the new config at the next SOF event. */
762 sr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHSR);
763 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHER,
764 ATMEL_HLCDC_LAYER_UPDATE |
765 (sr & ATMEL_HLCDC_LAYER_EN ?
766 ATMEL_HLCDC_LAYER_A2Q : ATMEL_HLCDC_LAYER_EN));
769 static int atmel_hlcdc_plane_init_properties(struct atmel_hlcdc_plane *plane)
771 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
773 if (desc->type == ATMEL_HLCDC_OVERLAY_LAYER ||
774 desc->type == ATMEL_HLCDC_CURSOR_LAYER) {
775 int ret;
777 ret = drm_plane_create_alpha_property(&plane->base);
778 if (ret)
779 return ret;
782 if (desc->layout.xstride[0] && desc->layout.pstride[0]) {
783 int ret;
785 ret = drm_plane_create_rotation_property(&plane->base,
786 DRM_MODE_ROTATE_0,
787 DRM_MODE_ROTATE_0 |
788 DRM_MODE_ROTATE_90 |
789 DRM_MODE_ROTATE_180 |
790 DRM_MODE_ROTATE_270);
791 if (ret)
792 return ret;
795 if (desc->layout.csc) {
797 * TODO: decare a "yuv-to-rgb-conv-factors" property to let
798 * userspace modify these factors (using a BLOB property ?).
800 atmel_hlcdc_layer_write_cfg(&plane->layer,
801 desc->layout.csc,
802 0x4c900091);
803 atmel_hlcdc_layer_write_cfg(&plane->layer,
804 desc->layout.csc + 1,
805 0x7a5f5090);
806 atmel_hlcdc_layer_write_cfg(&plane->layer,
807 desc->layout.csc + 2,
808 0x40040890);
811 return 0;
814 void atmel_hlcdc_plane_irq(struct atmel_hlcdc_plane *plane)
816 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
817 u32 isr;
819 isr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR);
822 * There's not much we can do in case of overrun except informing
823 * the user. However, we are in interrupt context here, hence the
824 * use of dev_dbg().
826 if (isr &
827 (ATMEL_HLCDC_LAYER_OVR_IRQ(0) | ATMEL_HLCDC_LAYER_OVR_IRQ(1) |
828 ATMEL_HLCDC_LAYER_OVR_IRQ(2)))
829 dev_dbg(plane->base.dev->dev, "overrun on plane %s\n",
830 desc->name);
833 static const struct drm_plane_helper_funcs atmel_hlcdc_layer_plane_helper_funcs = {
834 .atomic_check = atmel_hlcdc_plane_atomic_check,
835 .atomic_update = atmel_hlcdc_plane_atomic_update,
836 .atomic_disable = atmel_hlcdc_plane_atomic_disable,
839 static int atmel_hlcdc_plane_alloc_dscrs(struct drm_plane *p,
840 struct atmel_hlcdc_plane_state *state)
842 struct atmel_hlcdc_dc *dc = p->dev->dev_private;
843 int i;
845 for (i = 0; i < ARRAY_SIZE(state->dscrs); i++) {
846 struct atmel_hlcdc_dma_channel_dscr *dscr;
847 dma_addr_t dscr_dma;
849 dscr = dma_pool_alloc(dc->dscrpool, GFP_KERNEL, &dscr_dma);
850 if (!dscr)
851 goto err;
853 dscr->addr = 0;
854 dscr->next = dscr_dma;
855 dscr->self = dscr_dma;
856 dscr->ctrl = ATMEL_HLCDC_LAYER_DFETCH;
858 state->dscrs[i] = dscr;
861 return 0;
863 err:
864 for (i--; i >= 0; i--) {
865 dma_pool_free(dc->dscrpool, state->dscrs[i],
866 state->dscrs[i]->self);
869 return -ENOMEM;
872 static void atmel_hlcdc_plane_reset(struct drm_plane *p)
874 struct atmel_hlcdc_plane_state *state;
876 if (p->state) {
877 state = drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
879 if (state->base.fb)
880 drm_framebuffer_put(state->base.fb);
882 kfree(state);
883 p->state = NULL;
886 state = kzalloc(sizeof(*state), GFP_KERNEL);
887 if (state) {
888 if (atmel_hlcdc_plane_alloc_dscrs(p, state)) {
889 kfree(state);
890 dev_err(p->dev->dev,
891 "Failed to allocate initial plane state\n");
892 return;
894 __drm_atomic_helper_plane_reset(p, &state->base);
898 static struct drm_plane_state *
899 atmel_hlcdc_plane_atomic_duplicate_state(struct drm_plane *p)
901 struct atmel_hlcdc_plane_state *state =
902 drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
903 struct atmel_hlcdc_plane_state *copy;
905 copy = kmemdup(state, sizeof(*state), GFP_KERNEL);
906 if (!copy)
907 return NULL;
909 if (atmel_hlcdc_plane_alloc_dscrs(p, copy)) {
910 kfree(copy);
911 return NULL;
914 if (copy->base.fb)
915 drm_framebuffer_get(copy->base.fb);
917 return &copy->base;
920 static void atmel_hlcdc_plane_atomic_destroy_state(struct drm_plane *p,
921 struct drm_plane_state *s)
923 struct atmel_hlcdc_plane_state *state =
924 drm_plane_state_to_atmel_hlcdc_plane_state(s);
925 struct atmel_hlcdc_dc *dc = p->dev->dev_private;
926 int i;
928 for (i = 0; i < ARRAY_SIZE(state->dscrs); i++) {
929 dma_pool_free(dc->dscrpool, state->dscrs[i],
930 state->dscrs[i]->self);
933 if (s->fb)
934 drm_framebuffer_put(s->fb);
936 kfree(state);
939 static const struct drm_plane_funcs layer_plane_funcs = {
940 .update_plane = drm_atomic_helper_update_plane,
941 .disable_plane = drm_atomic_helper_disable_plane,
942 .destroy = drm_plane_cleanup,
943 .reset = atmel_hlcdc_plane_reset,
944 .atomic_duplicate_state = atmel_hlcdc_plane_atomic_duplicate_state,
945 .atomic_destroy_state = atmel_hlcdc_plane_atomic_destroy_state,
948 static int atmel_hlcdc_plane_create(struct drm_device *dev,
949 const struct atmel_hlcdc_layer_desc *desc)
951 struct atmel_hlcdc_dc *dc = dev->dev_private;
952 struct atmel_hlcdc_plane *plane;
953 enum drm_plane_type type;
954 int ret;
956 plane = devm_kzalloc(dev->dev, sizeof(*plane), GFP_KERNEL);
957 if (!plane)
958 return -ENOMEM;
960 atmel_hlcdc_layer_init(&plane->layer, desc, dc->hlcdc->regmap);
962 if (desc->type == ATMEL_HLCDC_BASE_LAYER)
963 type = DRM_PLANE_TYPE_PRIMARY;
964 else if (desc->type == ATMEL_HLCDC_CURSOR_LAYER)
965 type = DRM_PLANE_TYPE_CURSOR;
966 else
967 type = DRM_PLANE_TYPE_OVERLAY;
969 ret = drm_universal_plane_init(dev, &plane->base, 0,
970 &layer_plane_funcs,
971 desc->formats->formats,
972 desc->formats->nformats,
973 NULL, type, NULL);
974 if (ret)
975 return ret;
977 drm_plane_helper_add(&plane->base,
978 &atmel_hlcdc_layer_plane_helper_funcs);
980 /* Set default property values*/
981 ret = atmel_hlcdc_plane_init_properties(plane);
982 if (ret)
983 return ret;
985 dc->layers[desc->id] = &plane->layer;
987 return 0;
990 int atmel_hlcdc_create_planes(struct drm_device *dev)
992 struct atmel_hlcdc_dc *dc = dev->dev_private;
993 const struct atmel_hlcdc_layer_desc *descs = dc->desc->layers;
994 int nlayers = dc->desc->nlayers;
995 int i, ret;
997 dc->dscrpool = dmam_pool_create("atmel-hlcdc-dscr", dev->dev,
998 sizeof(struct atmel_hlcdc_dma_channel_dscr),
999 sizeof(u64), 0);
1000 if (!dc->dscrpool)
1001 return -ENOMEM;
1003 for (i = 0; i < nlayers; i++) {
1004 if (descs[i].type != ATMEL_HLCDC_BASE_LAYER &&
1005 descs[i].type != ATMEL_HLCDC_OVERLAY_LAYER &&
1006 descs[i].type != ATMEL_HLCDC_CURSOR_LAYER)
1007 continue;
1009 ret = atmel_hlcdc_plane_create(dev, &descs[i]);
1010 if (ret)
1011 return ret;
1014 return 0;