2 * Copyright 2018 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include <nvif/class.h>
27 #include <nvif/cl0002.h>
29 #include <nvhw/class/cl507c.h>
30 #include <nvhw/class/cl507e.h>
31 #include <nvhw/class/clc37e.h>
33 #include <drm/drm_atomic_helper.h>
34 #include <drm/drm_fourcc.h>
36 #include "nouveau_bo.h"
37 #include "nouveau_gem.h"
40 nv50_wndw_ctxdma_del(struct nv50_wndw_ctxdma
*ctxdma
)
42 nvif_object_dtor(&ctxdma
->object
);
43 list_del(&ctxdma
->head
);
47 static struct nv50_wndw_ctxdma
*
48 nv50_wndw_ctxdma_new(struct nv50_wndw
*wndw
, struct drm_framebuffer
*fb
)
50 struct nouveau_drm
*drm
= nouveau_drm(fb
->dev
);
51 struct nv50_wndw_ctxdma
*ctxdma
;
56 struct nv_dma_v0 base
;
58 struct nv50_dma_v0 nv50
;
59 struct gf100_dma_v0 gf100
;
60 struct gf119_dma_v0 gf119
;
63 u32 argc
= sizeof(args
.base
);
66 nouveau_framebuffer_get_layout(fb
, &unused
, &kind
);
67 handle
= NV50_DISP_HANDLE_WNDW_CTX(kind
);
69 list_for_each_entry(ctxdma
, &wndw
->ctxdma
.list
, head
) {
70 if (ctxdma
->object
.handle
== handle
)
74 if (!(ctxdma
= kzalloc(sizeof(*ctxdma
), GFP_KERNEL
)))
75 return ERR_PTR(-ENOMEM
);
76 list_add(&ctxdma
->head
, &wndw
->ctxdma
.list
);
78 args
.base
.target
= NV_DMA_V0_TARGET_VRAM
;
79 args
.base
.access
= NV_DMA_V0_ACCESS_RDWR
;
81 args
.base
.limit
= drm
->client
.device
.info
.ram_user
- 1;
83 if (drm
->client
.device
.info
.chipset
< 0x80) {
84 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
85 argc
+= sizeof(args
.nv50
);
87 if (drm
->client
.device
.info
.chipset
< 0xc0) {
88 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
89 args
.nv50
.kind
= kind
;
90 argc
+= sizeof(args
.nv50
);
92 if (drm
->client
.device
.info
.chipset
< 0xd0) {
93 args
.gf100
.kind
= kind
;
94 argc
+= sizeof(args
.gf100
);
96 args
.gf119
.page
= GF119_DMA_V0_PAGE_LP
;
97 args
.gf119
.kind
= kind
;
98 argc
+= sizeof(args
.gf119
);
101 ret
= nvif_object_ctor(wndw
->ctxdma
.parent
, "kmsFbCtxDma", handle
,
102 NV_DMA_IN_MEMORY
, &args
, argc
, &ctxdma
->object
);
104 nv50_wndw_ctxdma_del(ctxdma
);
112 nv50_wndw_wait_armed(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
114 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
115 if (asyw
->set
.ntfy
) {
116 return wndw
->func
->ntfy_wait_begun(disp
->sync
,
118 wndw
->wndw
.base
.device
);
124 nv50_wndw_flush_clr(struct nv50_wndw
*wndw
, u32
*interlock
, bool flush
,
125 struct nv50_wndw_atom
*asyw
)
127 union nv50_wndw_atom_mask clr
= {
128 .mask
= asyw
->clr
.mask
& ~(flush
? 0 : asyw
->set
.mask
),
130 if (clr
.sema
) wndw
->func
-> sema_clr(wndw
);
131 if (clr
.ntfy
) wndw
->func
-> ntfy_clr(wndw
);
132 if (clr
.xlut
) wndw
->func
-> xlut_clr(wndw
);
133 if (clr
.csc
) wndw
->func
-> csc_clr(wndw
);
134 if (clr
.image
) wndw
->func
->image_clr(wndw
);
136 interlock
[wndw
->interlock
.type
] |= wndw
->interlock
.data
;
140 nv50_wndw_flush_set(struct nv50_wndw
*wndw
, u32
*interlock
,
141 struct nv50_wndw_atom
*asyw
)
143 if (interlock
[NV50_DISP_INTERLOCK_CORE
]) {
144 asyw
->image
.mode
= NV507C_SET_PRESENT_CONTROL_BEGIN_MODE_NON_TEARING
;
145 asyw
->image
.interval
= 1;
148 if (asyw
->set
.sema
) wndw
->func
->sema_set (wndw
, asyw
);
149 if (asyw
->set
.ntfy
) wndw
->func
->ntfy_set (wndw
, asyw
);
150 if (asyw
->set
.image
) wndw
->func
->image_set(wndw
, asyw
);
152 if (asyw
->set
.xlut
) {
154 asyw
->xlut
.i
.offset
=
155 nv50_lut_load(&wndw
->ilut
, asyw
->xlut
.i
.buffer
,
156 asyw
->ilut
, asyw
->xlut
.i
.load
);
158 wndw
->func
->xlut_set(wndw
, asyw
);
161 if (asyw
->set
.csc
) wndw
->func
->csc_set (wndw
, asyw
);
162 if (asyw
->set
.scale
) wndw
->func
->scale_set(wndw
, asyw
);
163 if (asyw
->set
.blend
) wndw
->func
->blend_set(wndw
, asyw
);
164 if (asyw
->set
.point
) {
165 if (asyw
->set
.point
= false, asyw
->set
.mask
)
166 interlock
[wndw
->interlock
.type
] |= wndw
->interlock
.data
;
167 interlock
[NV50_DISP_INTERLOCK_WIMM
] |= wndw
->interlock
.wimm
;
169 wndw
->immd
->point(wndw
, asyw
);
170 wndw
->immd
->update(wndw
, interlock
);
172 interlock
[wndw
->interlock
.type
] |= wndw
->interlock
.data
;
177 nv50_wndw_ntfy_enable(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
179 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
181 asyw
->ntfy
.handle
= wndw
->wndw
.sync
.handle
;
182 asyw
->ntfy
.offset
= wndw
->ntfy
;
183 asyw
->ntfy
.awaken
= false;
184 asyw
->set
.ntfy
= true;
186 wndw
->func
->ntfy_reset(disp
->sync
, wndw
->ntfy
);
191 nv50_wndw_atomic_check_release(struct nv50_wndw
*wndw
,
192 struct nv50_wndw_atom
*asyw
,
193 struct nv50_head_atom
*asyh
)
195 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
196 NV_ATOMIC(drm
, "%s release\n", wndw
->plane
.name
);
197 wndw
->func
->release(wndw
, asyw
, asyh
);
198 asyw
->ntfy
.handle
= 0;
199 asyw
->sema
.handle
= 0;
200 asyw
->xlut
.handle
= 0;
201 memset(asyw
->image
.handle
, 0x00, sizeof(asyw
->image
.handle
));
205 nv50_wndw_atomic_check_acquire_yuv(struct nv50_wndw_atom
*asyw
)
207 switch (asyw
->state
.fb
->format
->format
) {
208 case DRM_FORMAT_YUYV
:
209 asyw
->image
.format
= NV507E_SURFACE_SET_PARAMS_FORMAT_VE8YO8UE8YE8
;
211 case DRM_FORMAT_UYVY
:
212 asyw
->image
.format
= NV507E_SURFACE_SET_PARAMS_FORMAT_YO8VE8YE8UE8
;
219 asyw
->image
.colorspace
= NV507E_SURFACE_SET_PARAMS_COLOR_SPACE_YUV_601
;
224 nv50_wndw_atomic_check_acquire_rgb(struct nv50_wndw_atom
*asyw
)
226 switch (asyw
->state
.fb
->format
->format
) {
228 asyw
->image
.format
= NV507C_SURFACE_SET_PARAMS_FORMAT_I8
;
230 case DRM_FORMAT_XRGB8888
:
231 case DRM_FORMAT_ARGB8888
:
232 asyw
->image
.format
= NV507C_SURFACE_SET_PARAMS_FORMAT_A8R8G8B8
;
234 case DRM_FORMAT_RGB565
:
235 asyw
->image
.format
= NV507C_SURFACE_SET_PARAMS_FORMAT_R5G6B5
;
237 case DRM_FORMAT_XRGB1555
:
238 case DRM_FORMAT_ARGB1555
:
239 asyw
->image
.format
= NV507C_SURFACE_SET_PARAMS_FORMAT_A1R5G5B5
;
241 case DRM_FORMAT_XBGR2101010
:
242 case DRM_FORMAT_ABGR2101010
:
243 asyw
->image
.format
= NV507C_SURFACE_SET_PARAMS_FORMAT_A2B10G10R10
;
245 case DRM_FORMAT_XBGR8888
:
246 case DRM_FORMAT_ABGR8888
:
247 asyw
->image
.format
= NV507C_SURFACE_SET_PARAMS_FORMAT_A8B8G8R8
;
249 case DRM_FORMAT_XRGB2101010
:
250 case DRM_FORMAT_ARGB2101010
:
251 asyw
->image
.format
= NVC37E_SET_PARAMS_FORMAT_A2R10G10B10
;
253 case DRM_FORMAT_XBGR16161616F
:
254 case DRM_FORMAT_ABGR16161616F
:
255 asyw
->image
.format
= NV507C_SURFACE_SET_PARAMS_FORMAT_RF16_GF16_BF16_AF16
;
261 asyw
->image
.colorspace
= NV507E_SURFACE_SET_PARAMS_COLOR_SPACE_RGB
;
266 nv50_wndw_atomic_check_acquire(struct nv50_wndw
*wndw
, bool modeset
,
267 struct nv50_wndw_atom
*armw
,
268 struct nv50_wndw_atom
*asyw
,
269 struct nv50_head_atom
*asyh
)
271 struct drm_framebuffer
*fb
= asyw
->state
.fb
;
272 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
277 NV_ATOMIC(drm
, "%s acquire\n", wndw
->plane
.name
);
279 if (fb
!= armw
->state
.fb
|| !armw
->visible
|| modeset
) {
280 nouveau_framebuffer_get_layout(fb
, &tile_mode
, &kind
);
282 asyw
->image
.w
= fb
->width
;
283 asyw
->image
.h
= fb
->height
;
284 asyw
->image
.kind
= kind
;
286 ret
= nv50_wndw_atomic_check_acquire_rgb(asyw
);
288 ret
= nv50_wndw_atomic_check_acquire_yuv(asyw
);
293 if (asyw
->image
.kind
) {
294 asyw
->image
.layout
= NV507C_SURFACE_SET_STORAGE_MEMORY_LAYOUT_BLOCKLINEAR
;
295 if (drm
->client
.device
.info
.chipset
>= 0xc0)
296 asyw
->image
.blockh
= tile_mode
>> 4;
298 asyw
->image
.blockh
= tile_mode
;
299 asyw
->image
.blocks
[0] = fb
->pitches
[0] / 64;
300 asyw
->image
.pitch
[0] = 0;
302 asyw
->image
.layout
= NV507C_SURFACE_SET_STORAGE_MEMORY_LAYOUT_PITCH
;
303 asyw
->image
.blockh
= NV507C_SURFACE_SET_STORAGE_BLOCK_HEIGHT_ONE_GOB
;
304 asyw
->image
.blocks
[0] = 0;
305 asyw
->image
.pitch
[0] = fb
->pitches
[0];
308 if (!asyh
->state
.async_flip
)
309 asyw
->image
.interval
= 1;
311 asyw
->image
.interval
= 0;
313 if (asyw
->image
.interval
)
314 asyw
->image
.mode
= NV507C_SET_PRESENT_CONTROL_BEGIN_MODE_NON_TEARING
;
316 asyw
->image
.mode
= NV507C_SET_PRESENT_CONTROL_BEGIN_MODE_IMMEDIATE
;
318 asyw
->set
.image
= wndw
->func
->image_set
!= NULL
;
321 if (wndw
->func
->scale_set
) {
322 asyw
->scale
.sx
= asyw
->state
.src_x
>> 16;
323 asyw
->scale
.sy
= asyw
->state
.src_y
>> 16;
324 asyw
->scale
.sw
= asyw
->state
.src_w
>> 16;
325 asyw
->scale
.sh
= asyw
->state
.src_h
>> 16;
326 asyw
->scale
.dw
= asyw
->state
.crtc_w
;
327 asyw
->scale
.dh
= asyw
->state
.crtc_h
;
328 if (memcmp(&armw
->scale
, &asyw
->scale
, sizeof(asyw
->scale
)))
329 asyw
->set
.scale
= true;
332 if (wndw
->func
->blend_set
) {
333 asyw
->blend
.depth
= 255 - asyw
->state
.normalized_zpos
;
334 asyw
->blend
.k1
= asyw
->state
.alpha
>> 8;
335 switch (asyw
->state
.pixel_blend_mode
) {
336 case DRM_MODE_BLEND_PREMULTI
:
337 asyw
->blend
.src_color
= NVC37E_SET_COMPOSITION_FACTOR_SELECT_SRC_COLOR_FACTOR_MATCH_SELECT_K1
;
338 asyw
->blend
.dst_color
= NVC37E_SET_COMPOSITION_FACTOR_SELECT_DST_COLOR_FACTOR_MATCH_SELECT_NEG_K1_TIMES_SRC
;
340 case DRM_MODE_BLEND_COVERAGE
:
341 asyw
->blend
.src_color
= NVC37E_SET_COMPOSITION_FACTOR_SELECT_SRC_COLOR_FACTOR_MATCH_SELECT_K1_TIMES_SRC
;
342 asyw
->blend
.dst_color
= NVC37E_SET_COMPOSITION_FACTOR_SELECT_DST_COLOR_FACTOR_MATCH_SELECT_NEG_K1_TIMES_SRC
;
344 case DRM_MODE_BLEND_PIXEL_NONE
:
346 asyw
->blend
.src_color
= NVC37E_SET_COMPOSITION_FACTOR_SELECT_SRC_COLOR_FACTOR_MATCH_SELECT_K1
;
347 asyw
->blend
.dst_color
= NVC37E_SET_COMPOSITION_FACTOR_SELECT_DST_COLOR_FACTOR_MATCH_SELECT_NEG_K1
;
350 if (memcmp(&armw
->blend
, &asyw
->blend
, sizeof(asyw
->blend
)))
351 asyw
->set
.blend
= true;
355 asyw
->point
.x
= asyw
->state
.crtc_x
;
356 asyw
->point
.y
= asyw
->state
.crtc_y
;
357 if (memcmp(&armw
->point
, &asyw
->point
, sizeof(asyw
->point
)))
358 asyw
->set
.point
= true;
361 return wndw
->func
->acquire(wndw
, asyw
, asyh
);
365 nv50_wndw_atomic_check_lut(struct nv50_wndw
*wndw
,
366 struct nv50_wndw_atom
*armw
,
367 struct nv50_wndw_atom
*asyw
,
368 struct nv50_head_atom
*asyh
)
370 struct drm_property_blob
*ilut
= asyh
->state
.degamma_lut
;
372 /* I8 format without an input LUT makes no sense, and the
373 * HW error-checks for this.
375 * In order to handle legacy gamma, when there's no input
376 * LUT we need to steal the output LUT and use it instead.
378 if (!ilut
&& asyw
->state
.fb
->format
->format
== DRM_FORMAT_C8
) {
379 /* This should be an error, but there's legacy clients
380 * that do a modeset before providing a gamma table.
382 * We keep the window disabled to avoid angering HW.
384 if (!(ilut
= asyh
->state
.gamma_lut
)) {
385 asyw
->visible
= false;
389 if (wndw
->func
->ilut
)
390 asyh
->wndw
.olut
|= BIT(wndw
->id
);
392 asyh
->wndw
.olut
&= ~BIT(wndw
->id
);
395 if (!ilut
&& wndw
->func
->ilut_identity
&&
396 asyw
->state
.fb
->format
->format
!= DRM_FORMAT_XBGR16161616F
&&
397 asyw
->state
.fb
->format
->format
!= DRM_FORMAT_ABGR16161616F
) {
398 static struct drm_property_blob dummy
= {};
402 /* Recalculate LUT state. */
403 memset(&asyw
->xlut
, 0x00, sizeof(asyw
->xlut
));
404 if ((asyw
->ilut
= wndw
->func
->ilut
? ilut
: NULL
)) {
405 if (!wndw
->func
->ilut(wndw
, asyw
, drm_color_lut_size(ilut
))) {
406 DRM_DEBUG_KMS("Invalid ilut\n");
409 asyw
->xlut
.handle
= wndw
->wndw
.vram
.handle
;
410 asyw
->xlut
.i
.buffer
= !asyw
->xlut
.i
.buffer
;
411 asyw
->set
.xlut
= true;
413 asyw
->clr
.xlut
= armw
->xlut
.handle
!= 0;
416 /* Handle setting base SET_OUTPUT_LUT_LO_ENABLE_USE_CORE_LUT. */
417 if (wndw
->func
->olut_core
&&
418 (!armw
->visible
|| (armw
->xlut
.handle
&& !asyw
->xlut
.handle
)))
419 asyw
->set
.xlut
= true;
421 if (wndw
->func
->csc
&& asyh
->state
.ctm
) {
422 const struct drm_color_ctm
*ctm
= asyh
->state
.ctm
->data
;
423 wndw
->func
->csc(wndw
, asyw
, ctm
);
424 asyw
->csc
.valid
= true;
425 asyw
->set
.csc
= true;
427 asyw
->csc
.valid
= false;
428 asyw
->clr
.csc
= armw
->csc
.valid
;
431 /* Can't do an immediate flip while changing the LUT. */
432 asyh
->state
.async_flip
= false;
437 nv50_wndw_atomic_check(struct drm_plane
*plane
, struct drm_plane_state
*state
)
439 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
440 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
441 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(wndw
->plane
.state
);
442 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
443 struct nv50_head_atom
*harm
= NULL
, *asyh
= NULL
;
444 bool modeset
= false;
447 NV_ATOMIC(drm
, "%s atomic_check\n", plane
->name
);
449 /* Fetch the assembly state for the head the window will belong to,
450 * and determine whether the window will be visible.
452 if (asyw
->state
.crtc
) {
453 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
455 return PTR_ERR(asyh
);
456 modeset
= drm_atomic_crtc_needs_modeset(&asyh
->state
);
457 asyw
->visible
= asyh
->state
.active
;
459 asyw
->visible
= false;
462 /* Fetch assembly state for the head the window used to belong to. */
463 if (armw
->state
.crtc
) {
464 harm
= nv50_head_atom_get(asyw
->state
.state
, armw
->state
.crtc
);
466 return PTR_ERR(harm
);
469 /* LUT configuration can potentially cause the window to be disabled. */
470 if (asyw
->visible
&& wndw
->func
->xlut_set
&&
472 asyh
->state
.color_mgmt_changed
||
473 asyw
->state
.fb
->format
->format
!=
474 armw
->state
.fb
->format
->format
)) {
475 ret
= nv50_wndw_atomic_check_lut(wndw
, armw
, asyw
, asyh
);
480 /* Calculate new window state. */
482 ret
= nv50_wndw_atomic_check_acquire(wndw
, modeset
,
487 asyh
->wndw
.mask
|= BIT(wndw
->id
);
490 nv50_wndw_atomic_check_release(wndw
, asyw
, harm
);
491 harm
->wndw
.mask
&= ~BIT(wndw
->id
);
496 /* Aside from the obvious case where the window is actively being
497 * disabled, we might also need to temporarily disable the window
498 * when performing certain modeset operations.
500 if (!asyw
->visible
|| modeset
) {
501 asyw
->clr
.ntfy
= armw
->ntfy
.handle
!= 0;
502 asyw
->clr
.sema
= armw
->sema
.handle
!= 0;
503 asyw
->clr
.xlut
= armw
->xlut
.handle
!= 0;
504 if (asyw
->clr
.xlut
&& asyw
->visible
)
505 asyw
->set
.xlut
= asyw
->xlut
.handle
!= 0;
506 asyw
->clr
.csc
= armw
->csc
.valid
;
507 if (wndw
->func
->image_clr
)
508 asyw
->clr
.image
= armw
->image
.handle
[0] != 0;
515 nv50_wndw_cleanup_fb(struct drm_plane
*plane
, struct drm_plane_state
*old_state
)
517 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
518 struct nouveau_bo
*nvbo
;
520 NV_ATOMIC(drm
, "%s cleanup: %p\n", plane
->name
, old_state
->fb
);
524 nvbo
= nouveau_gem_object(old_state
->fb
->obj
[0]);
525 nouveau_bo_unpin(nvbo
);
529 nv50_wndw_prepare_fb(struct drm_plane
*plane
, struct drm_plane_state
*state
)
531 struct drm_framebuffer
*fb
= state
->fb
;
532 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
533 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
534 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
535 struct nouveau_bo
*nvbo
;
536 struct nv50_head_atom
*asyh
;
537 struct nv50_wndw_ctxdma
*ctxdma
;
540 NV_ATOMIC(drm
, "%s prepare: %p\n", plane
->name
, fb
);
544 nvbo
= nouveau_gem_object(fb
->obj
[0]);
545 ret
= nouveau_bo_pin(nvbo
, NOUVEAU_GEM_DOMAIN_VRAM
, true);
549 if (wndw
->ctxdma
.parent
) {
550 ctxdma
= nv50_wndw_ctxdma_new(wndw
, fb
);
551 if (IS_ERR(ctxdma
)) {
552 nouveau_bo_unpin(nvbo
);
553 return PTR_ERR(ctxdma
);
557 asyw
->image
.handle
[0] = ctxdma
->object
.handle
;
560 asyw
->state
.fence
= dma_resv_get_excl_rcu(nvbo
->bo
.base
.resv
);
561 asyw
->image
.offset
[0] = nvbo
->offset
;
563 if (wndw
->func
->prepare
) {
564 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
566 return PTR_ERR(asyh
);
568 wndw
->func
->prepare(wndw
, asyh
, asyw
);
574 static const struct drm_plane_helper_funcs
576 .prepare_fb
= nv50_wndw_prepare_fb
,
577 .cleanup_fb
= nv50_wndw_cleanup_fb
,
578 .atomic_check
= nv50_wndw_atomic_check
,
582 nv50_wndw_atomic_destroy_state(struct drm_plane
*plane
,
583 struct drm_plane_state
*state
)
585 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
586 __drm_atomic_helper_plane_destroy_state(&asyw
->state
);
590 static struct drm_plane_state
*
591 nv50_wndw_atomic_duplicate_state(struct drm_plane
*plane
)
593 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(plane
->state
);
594 struct nv50_wndw_atom
*asyw
;
595 if (!(asyw
= kmalloc(sizeof(*asyw
), GFP_KERNEL
)))
597 __drm_atomic_helper_plane_duplicate_state(plane
, &asyw
->state
);
598 asyw
->sema
= armw
->sema
;
599 asyw
->ntfy
= armw
->ntfy
;
601 asyw
->xlut
= armw
->xlut
;
602 asyw
->csc
= armw
->csc
;
603 asyw
->image
= armw
->image
;
604 asyw
->point
= armw
->point
;
611 nv50_wndw_zpos_default(struct drm_plane
*plane
)
613 return (plane
->type
== DRM_PLANE_TYPE_PRIMARY
) ? 0 :
614 (plane
->type
== DRM_PLANE_TYPE_OVERLAY
) ? 1 : 255;
618 nv50_wndw_reset(struct drm_plane
*plane
)
620 struct nv50_wndw_atom
*asyw
;
622 if (WARN_ON(!(asyw
= kzalloc(sizeof(*asyw
), GFP_KERNEL
))))
626 plane
->funcs
->atomic_destroy_state(plane
, plane
->state
);
628 __drm_atomic_helper_plane_reset(plane
, &asyw
->state
);
629 plane
->state
->zpos
= nv50_wndw_zpos_default(plane
);
630 plane
->state
->normalized_zpos
= nv50_wndw_zpos_default(plane
);
634 nv50_wndw_destroy(struct drm_plane
*plane
)
636 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
637 struct nv50_wndw_ctxdma
*ctxdma
, *ctxtmp
;
639 list_for_each_entry_safe(ctxdma
, ctxtmp
, &wndw
->ctxdma
.list
, head
) {
640 nv50_wndw_ctxdma_del(ctxdma
);
643 nvif_notify_dtor(&wndw
->notify
);
644 nv50_dmac_destroy(&wndw
->wimm
);
645 nv50_dmac_destroy(&wndw
->wndw
);
647 nv50_lut_fini(&wndw
->ilut
);
649 drm_plane_cleanup(&wndw
->plane
);
653 /* This function assumes the format has already been validated against the plane
654 * and the modifier was validated against the device-wides modifier list at FB
657 static bool nv50_plane_format_mod_supported(struct drm_plane
*plane
,
658 u32 format
, u64 modifier
)
660 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
663 if (drm
->client
.device
.info
.chipset
< 0xc0) {
664 const struct drm_format_info
*info
= drm_format_info(format
);
665 const uint8_t kind
= (modifier
>> 12) & 0xff;
667 if (!format
) return false;
669 for (i
= 0; i
< info
->num_planes
; i
++)
670 if ((info
->cpp
[i
] != 4) && kind
!= 0x70) return false;
676 const struct drm_plane_funcs
678 .update_plane
= drm_atomic_helper_update_plane
,
679 .disable_plane
= drm_atomic_helper_disable_plane
,
680 .destroy
= nv50_wndw_destroy
,
681 .reset
= nv50_wndw_reset
,
682 .atomic_duplicate_state
= nv50_wndw_atomic_duplicate_state
,
683 .atomic_destroy_state
= nv50_wndw_atomic_destroy_state
,
684 .format_mod_supported
= nv50_plane_format_mod_supported
,
688 nv50_wndw_notify(struct nvif_notify
*notify
)
690 return NVIF_NOTIFY_KEEP
;
694 nv50_wndw_fini(struct nv50_wndw
*wndw
)
696 nvif_notify_put(&wndw
->notify
);
700 nv50_wndw_init(struct nv50_wndw
*wndw
)
702 nvif_notify_get(&wndw
->notify
);
706 nv50_wndw_new_(const struct nv50_wndw_func
*func
, struct drm_device
*dev
,
707 enum drm_plane_type type
, const char *name
, int index
,
708 const u32
*format
, u32 heads
,
709 enum nv50_disp_interlock_type interlock_type
, u32 interlock_data
,
710 struct nv50_wndw
**pwndw
)
712 struct nouveau_drm
*drm
= nouveau_drm(dev
);
713 struct nvif_mmu
*mmu
= &drm
->client
.mmu
;
714 struct nv50_disp
*disp
= nv50_disp(dev
);
715 struct nv50_wndw
*wndw
;
719 if (!(wndw
= *pwndw
= kzalloc(sizeof(*wndw
), GFP_KERNEL
)))
723 wndw
->interlock
.type
= interlock_type
;
724 wndw
->interlock
.data
= interlock_data
;
726 wndw
->ctxdma
.parent
= &wndw
->wndw
.base
.user
;
727 INIT_LIST_HEAD(&wndw
->ctxdma
.list
);
729 for (nformat
= 0; format
[nformat
]; nformat
++);
731 ret
= drm_universal_plane_init(dev
, &wndw
->plane
, heads
, &nv50_wndw
,
733 nouveau_display(dev
)->format_modifiers
,
734 type
, "%s-%d", name
, index
);
741 drm_plane_helper_add(&wndw
->plane
, &nv50_wndw_helper
);
743 if (wndw
->func
->ilut
) {
744 ret
= nv50_lut_init(disp
, mmu
, &wndw
->ilut
);
749 wndw
->notify
.func
= nv50_wndw_notify
;
751 if (wndw
->func
->blend_set
) {
752 ret
= drm_plane_create_zpos_property(&wndw
->plane
,
753 nv50_wndw_zpos_default(&wndw
->plane
), 0, 254);
757 ret
= drm_plane_create_alpha_property(&wndw
->plane
);
761 ret
= drm_plane_create_blend_mode_property(&wndw
->plane
,
762 BIT(DRM_MODE_BLEND_PIXEL_NONE
) |
763 BIT(DRM_MODE_BLEND_PREMULTI
) |
764 BIT(DRM_MODE_BLEND_COVERAGE
));
768 ret
= drm_plane_create_zpos_immutable_property(&wndw
->plane
,
769 nv50_wndw_zpos_default(&wndw
->plane
));
778 nv50_wndw_new(struct nouveau_drm
*drm
, enum drm_plane_type type
, int index
,
779 struct nv50_wndw
**pwndw
)
784 int (*new)(struct nouveau_drm
*, enum drm_plane_type
,
785 int, s32
, struct nv50_wndw
**);
787 { GA102_DISP_WINDOW_CHANNEL_DMA
, 0, wndwc67e_new
},
788 { TU102_DISP_WINDOW_CHANNEL_DMA
, 0, wndwc57e_new
},
789 { GV100_DISP_WINDOW_CHANNEL_DMA
, 0, wndwc37e_new
},
792 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
795 cid
= nvif_mclass(&disp
->disp
->object
, wndws
);
797 NV_ERROR(drm
, "No supported window class\n");
801 ret
= wndws
[cid
].new(drm
, type
, index
, wndws
[cid
].oclass
, pwndw
);
805 return nv50_wimm_init(drm
, *pwndw
);