1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2015-2018 Etnaviv Project
6 #include <linux/component.h>
7 #include <linux/dma-mapping.h>
8 #include <linux/module.h>
9 #include <linux/of_platform.h>
10 #include <linux/uaccess.h>
12 #include <drm/drm_debugfs.h>
13 #include <drm/drm_drv.h>
14 #include <drm/drm_file.h>
15 #include <drm/drm_ioctl.h>
16 #include <drm/drm_of.h>
17 #include <drm/drm_prime.h>
19 #include "etnaviv_cmdbuf.h"
20 #include "etnaviv_drv.h"
21 #include "etnaviv_gpu.h"
22 #include "etnaviv_gem.h"
23 #include "etnaviv_mmu.h"
24 #include "etnaviv_perfmon.h"
31 static void load_gpu(struct drm_device
*dev
)
33 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
36 for (i
= 0; i
< ETNA_MAX_PIPES
; i
++) {
37 struct etnaviv_gpu
*g
= priv
->gpu
[i
];
42 ret
= etnaviv_gpu_init(g
);
49 static int etnaviv_open(struct drm_device
*dev
, struct drm_file
*file
)
51 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
52 struct etnaviv_file_private
*ctx
;
55 ctx
= kzalloc(sizeof(*ctx
), GFP_KERNEL
);
59 ctx
->mmu
= etnaviv_iommu_context_init(priv
->mmu_global
,
60 priv
->cmdbuf_suballoc
);
66 for (i
= 0; i
< ETNA_MAX_PIPES
; i
++) {
67 struct etnaviv_gpu
*gpu
= priv
->gpu
[i
];
68 struct drm_gpu_scheduler
*sched
;
72 drm_sched_entity_init(&ctx
->sched_entity
[i
],
73 DRM_SCHED_PRIORITY_NORMAL
, &sched
,
78 file
->driver_priv
= ctx
;
87 static void etnaviv_postclose(struct drm_device
*dev
, struct drm_file
*file
)
89 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
90 struct etnaviv_file_private
*ctx
= file
->driver_priv
;
93 for (i
= 0; i
< ETNA_MAX_PIPES
; i
++) {
94 struct etnaviv_gpu
*gpu
= priv
->gpu
[i
];
97 drm_sched_entity_destroy(&ctx
->sched_entity
[i
]);
100 etnaviv_iommu_context_put(ctx
->mmu
);
109 #ifdef CONFIG_DEBUG_FS
110 static int etnaviv_gem_show(struct drm_device
*dev
, struct seq_file
*m
)
112 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
114 etnaviv_gem_describe_objects(priv
, m
);
119 static int etnaviv_mm_show(struct drm_device
*dev
, struct seq_file
*m
)
121 struct drm_printer p
= drm_seq_file_printer(m
);
123 read_lock(&dev
->vma_offset_manager
->vm_lock
);
124 drm_mm_print(&dev
->vma_offset_manager
->vm_addr_space_mm
, &p
);
125 read_unlock(&dev
->vma_offset_manager
->vm_lock
);
130 static int etnaviv_mmu_show(struct etnaviv_gpu
*gpu
, struct seq_file
*m
)
132 struct drm_printer p
= drm_seq_file_printer(m
);
133 struct etnaviv_iommu_context
*mmu_context
;
135 seq_printf(m
, "Active Objects (%s):\n", dev_name(gpu
->dev
));
138 * Lock the GPU to avoid a MMU context switch just now and elevate
139 * the refcount of the current context to avoid it disappearing from
142 mutex_lock(&gpu
->lock
);
143 mmu_context
= gpu
->mmu_context
;
145 etnaviv_iommu_context_get(mmu_context
);
146 mutex_unlock(&gpu
->lock
);
151 mutex_lock(&mmu_context
->lock
);
152 drm_mm_print(&mmu_context
->mm
, &p
);
153 mutex_unlock(&mmu_context
->lock
);
155 etnaviv_iommu_context_put(mmu_context
);
160 static void etnaviv_buffer_dump(struct etnaviv_gpu
*gpu
, struct seq_file
*m
)
162 struct etnaviv_cmdbuf
*buf
= &gpu
->buffer
;
163 u32 size
= buf
->size
;
164 u32
*ptr
= buf
->vaddr
;
167 seq_printf(m
, "virt %p - phys 0x%llx - free 0x%08x\n",
168 buf
->vaddr
, (u64
)etnaviv_cmdbuf_get_pa(buf
),
169 size
- buf
->user_size
);
171 for (i
= 0; i
< size
/ 4; i
++) {
175 seq_printf(m
, "\t0x%p: ", ptr
+ i
);
176 seq_printf(m
, "%08x ", *(ptr
+ i
));
181 static int etnaviv_ring_show(struct etnaviv_gpu
*gpu
, struct seq_file
*m
)
183 seq_printf(m
, "Ring Buffer (%s): ", dev_name(gpu
->dev
));
185 mutex_lock(&gpu
->lock
);
186 etnaviv_buffer_dump(gpu
, m
);
187 mutex_unlock(&gpu
->lock
);
192 static int show_unlocked(struct seq_file
*m
, void *arg
)
194 struct drm_info_node
*node
= (struct drm_info_node
*) m
->private;
195 struct drm_device
*dev
= node
->minor
->dev
;
196 int (*show
)(struct drm_device
*dev
, struct seq_file
*m
) =
197 node
->info_ent
->data
;
202 static int show_each_gpu(struct seq_file
*m
, void *arg
)
204 struct drm_info_node
*node
= (struct drm_info_node
*) m
->private;
205 struct drm_device
*dev
= node
->minor
->dev
;
206 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
207 struct etnaviv_gpu
*gpu
;
208 int (*show
)(struct etnaviv_gpu
*gpu
, struct seq_file
*m
) =
209 node
->info_ent
->data
;
213 for (i
= 0; i
< ETNA_MAX_PIPES
; i
++) {
226 static struct drm_info_list etnaviv_debugfs_list
[] = {
227 {"gpu", show_each_gpu
, 0, etnaviv_gpu_debugfs
},
228 {"gem", show_unlocked
, 0, etnaviv_gem_show
},
229 { "mm", show_unlocked
, 0, etnaviv_mm_show
},
230 {"mmu", show_each_gpu
, 0, etnaviv_mmu_show
},
231 {"ring", show_each_gpu
, 0, etnaviv_ring_show
},
234 static void etnaviv_debugfs_init(struct drm_minor
*minor
)
236 drm_debugfs_create_files(etnaviv_debugfs_list
,
237 ARRAY_SIZE(etnaviv_debugfs_list
),
238 minor
->debugfs_root
, minor
);
246 static int etnaviv_ioctl_get_param(struct drm_device
*dev
, void *data
,
247 struct drm_file
*file
)
249 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
250 struct drm_etnaviv_param
*args
= data
;
251 struct etnaviv_gpu
*gpu
;
253 if (args
->pipe
>= ETNA_MAX_PIPES
)
256 gpu
= priv
->gpu
[args
->pipe
];
260 return etnaviv_gpu_get_param(gpu
, args
->param
, &args
->value
);
263 static int etnaviv_ioctl_gem_new(struct drm_device
*dev
, void *data
,
264 struct drm_file
*file
)
266 struct drm_etnaviv_gem_new
*args
= data
;
268 if (args
->flags
& ~(ETNA_BO_CACHED
| ETNA_BO_WC
| ETNA_BO_UNCACHED
|
272 return etnaviv_gem_new_handle(dev
, file
, args
->size
,
273 args
->flags
, &args
->handle
);
276 static int etnaviv_ioctl_gem_cpu_prep(struct drm_device
*dev
, void *data
,
277 struct drm_file
*file
)
279 struct drm_etnaviv_gem_cpu_prep
*args
= data
;
280 struct drm_gem_object
*obj
;
283 if (args
->op
& ~(ETNA_PREP_READ
| ETNA_PREP_WRITE
| ETNA_PREP_NOSYNC
))
286 obj
= drm_gem_object_lookup(file
, args
->handle
);
290 ret
= etnaviv_gem_cpu_prep(obj
, args
->op
, &args
->timeout
);
292 drm_gem_object_put(obj
);
297 static int etnaviv_ioctl_gem_cpu_fini(struct drm_device
*dev
, void *data
,
298 struct drm_file
*file
)
300 struct drm_etnaviv_gem_cpu_fini
*args
= data
;
301 struct drm_gem_object
*obj
;
307 obj
= drm_gem_object_lookup(file
, args
->handle
);
311 ret
= etnaviv_gem_cpu_fini(obj
);
313 drm_gem_object_put(obj
);
318 static int etnaviv_ioctl_gem_info(struct drm_device
*dev
, void *data
,
319 struct drm_file
*file
)
321 struct drm_etnaviv_gem_info
*args
= data
;
322 struct drm_gem_object
*obj
;
328 obj
= drm_gem_object_lookup(file
, args
->handle
);
332 ret
= etnaviv_gem_mmap_offset(obj
, &args
->offset
);
333 drm_gem_object_put(obj
);
338 static int etnaviv_ioctl_wait_fence(struct drm_device
*dev
, void *data
,
339 struct drm_file
*file
)
341 struct drm_etnaviv_wait_fence
*args
= data
;
342 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
343 struct drm_etnaviv_timespec
*timeout
= &args
->timeout
;
344 struct etnaviv_gpu
*gpu
;
346 if (args
->flags
& ~(ETNA_WAIT_NONBLOCK
))
349 if (args
->pipe
>= ETNA_MAX_PIPES
)
352 gpu
= priv
->gpu
[args
->pipe
];
356 if (args
->flags
& ETNA_WAIT_NONBLOCK
)
359 return etnaviv_gpu_wait_fence_interruptible(gpu
, args
->fence
,
363 static int etnaviv_ioctl_gem_userptr(struct drm_device
*dev
, void *data
,
364 struct drm_file
*file
)
366 struct drm_etnaviv_gem_userptr
*args
= data
;
368 if (args
->flags
& ~(ETNA_USERPTR_READ
|ETNA_USERPTR_WRITE
) ||
372 if (offset_in_page(args
->user_ptr
| args
->user_size
) ||
373 (uintptr_t)args
->user_ptr
!= args
->user_ptr
||
374 (u32
)args
->user_size
!= args
->user_size
||
375 args
->user_ptr
& ~PAGE_MASK
)
378 if (!access_ok((void __user
*)(unsigned long)args
->user_ptr
,
382 return etnaviv_gem_new_userptr(dev
, file
, args
->user_ptr
,
383 args
->user_size
, args
->flags
,
387 static int etnaviv_ioctl_gem_wait(struct drm_device
*dev
, void *data
,
388 struct drm_file
*file
)
390 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
391 struct drm_etnaviv_gem_wait
*args
= data
;
392 struct drm_etnaviv_timespec
*timeout
= &args
->timeout
;
393 struct drm_gem_object
*obj
;
394 struct etnaviv_gpu
*gpu
;
397 if (args
->flags
& ~(ETNA_WAIT_NONBLOCK
))
400 if (args
->pipe
>= ETNA_MAX_PIPES
)
403 gpu
= priv
->gpu
[args
->pipe
];
407 obj
= drm_gem_object_lookup(file
, args
->handle
);
411 if (args
->flags
& ETNA_WAIT_NONBLOCK
)
414 ret
= etnaviv_gem_wait_bo(gpu
, obj
, timeout
);
416 drm_gem_object_put(obj
);
421 static int etnaviv_ioctl_pm_query_dom(struct drm_device
*dev
, void *data
,
422 struct drm_file
*file
)
424 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
425 struct drm_etnaviv_pm_domain
*args
= data
;
426 struct etnaviv_gpu
*gpu
;
428 if (args
->pipe
>= ETNA_MAX_PIPES
)
431 gpu
= priv
->gpu
[args
->pipe
];
435 return etnaviv_pm_query_dom(gpu
, args
);
438 static int etnaviv_ioctl_pm_query_sig(struct drm_device
*dev
, void *data
,
439 struct drm_file
*file
)
441 struct etnaviv_drm_private
*priv
= dev
->dev_private
;
442 struct drm_etnaviv_pm_signal
*args
= data
;
443 struct etnaviv_gpu
*gpu
;
445 if (args
->pipe
>= ETNA_MAX_PIPES
)
448 gpu
= priv
->gpu
[args
->pipe
];
452 return etnaviv_pm_query_sig(gpu
, args
);
455 static const struct drm_ioctl_desc etnaviv_ioctls
[] = {
456 #define ETNA_IOCTL(n, func, flags) \
457 DRM_IOCTL_DEF_DRV(ETNAVIV_##n, etnaviv_ioctl_##func, flags)
458 ETNA_IOCTL(GET_PARAM
, get_param
, DRM_RENDER_ALLOW
),
459 ETNA_IOCTL(GEM_NEW
, gem_new
, DRM_RENDER_ALLOW
),
460 ETNA_IOCTL(GEM_INFO
, gem_info
, DRM_RENDER_ALLOW
),
461 ETNA_IOCTL(GEM_CPU_PREP
, gem_cpu_prep
, DRM_RENDER_ALLOW
),
462 ETNA_IOCTL(GEM_CPU_FINI
, gem_cpu_fini
, DRM_RENDER_ALLOW
),
463 ETNA_IOCTL(GEM_SUBMIT
, gem_submit
, DRM_RENDER_ALLOW
),
464 ETNA_IOCTL(WAIT_FENCE
, wait_fence
, DRM_RENDER_ALLOW
),
465 ETNA_IOCTL(GEM_USERPTR
, gem_userptr
, DRM_RENDER_ALLOW
),
466 ETNA_IOCTL(GEM_WAIT
, gem_wait
, DRM_RENDER_ALLOW
),
467 ETNA_IOCTL(PM_QUERY_DOM
, pm_query_dom
, DRM_RENDER_ALLOW
),
468 ETNA_IOCTL(PM_QUERY_SIG
, pm_query_sig
, DRM_RENDER_ALLOW
),
471 static const struct file_operations fops
= {
472 .owner
= THIS_MODULE
,
474 .release
= drm_release
,
475 .unlocked_ioctl
= drm_ioctl
,
476 .compat_ioctl
= drm_compat_ioctl
,
480 .mmap
= etnaviv_gem_mmap
,
483 static const struct drm_driver etnaviv_drm_driver
= {
484 .driver_features
= DRIVER_GEM
| DRIVER_RENDER
,
485 .open
= etnaviv_open
,
486 .postclose
= etnaviv_postclose
,
487 .prime_handle_to_fd
= drm_gem_prime_handle_to_fd
,
488 .prime_fd_to_handle
= drm_gem_prime_fd_to_handle
,
489 .gem_prime_import_sg_table
= etnaviv_gem_prime_import_sg_table
,
490 .gem_prime_mmap
= etnaviv_gem_prime_mmap
,
491 #ifdef CONFIG_DEBUG_FS
492 .debugfs_init
= etnaviv_debugfs_init
,
494 .ioctls
= etnaviv_ioctls
,
495 .num_ioctls
= DRM_ETNAVIV_NUM_IOCTLS
,
498 .desc
= "etnaviv DRM",
507 static int etnaviv_bind(struct device
*dev
)
509 struct etnaviv_drm_private
*priv
;
510 struct drm_device
*drm
;
513 drm
= drm_dev_alloc(&etnaviv_drm_driver
, dev
);
517 priv
= kzalloc(sizeof(*priv
), GFP_KERNEL
);
519 dev_err(dev
, "failed to allocate private data\n");
523 drm
->dev_private
= priv
;
525 dma_set_max_seg_size(dev
, SZ_2G
);
527 mutex_init(&priv
->gem_lock
);
528 INIT_LIST_HEAD(&priv
->gem_list
);
530 priv
->shm_gfp_mask
= GFP_HIGHUSER
| __GFP_RETRY_MAYFAIL
| __GFP_NOWARN
;
532 priv
->cmdbuf_suballoc
= etnaviv_cmdbuf_suballoc_new(drm
->dev
);
533 if (IS_ERR(priv
->cmdbuf_suballoc
)) {
534 dev_err(drm
->dev
, "Failed to create cmdbuf suballocator\n");
535 ret
= PTR_ERR(priv
->cmdbuf_suballoc
);
539 dev_set_drvdata(dev
, drm
);
541 ret
= component_bind_all(dev
, drm
);
543 goto out_destroy_suballoc
;
547 ret
= drm_dev_register(drm
, 0);
554 component_unbind_all(dev
, drm
);
555 out_destroy_suballoc
:
556 etnaviv_cmdbuf_suballoc_destroy(priv
->cmdbuf_suballoc
);
565 static void etnaviv_unbind(struct device
*dev
)
567 struct drm_device
*drm
= dev_get_drvdata(dev
);
568 struct etnaviv_drm_private
*priv
= drm
->dev_private
;
570 drm_dev_unregister(drm
);
572 component_unbind_all(dev
, drm
);
574 etnaviv_cmdbuf_suballoc_destroy(priv
->cmdbuf_suballoc
);
576 drm
->dev_private
= NULL
;
582 static const struct component_master_ops etnaviv_master_ops
= {
583 .bind
= etnaviv_bind
,
584 .unbind
= etnaviv_unbind
,
587 static int compare_of(struct device
*dev
, void *data
)
589 struct device_node
*np
= data
;
591 return dev
->of_node
== np
;
594 static int compare_str(struct device
*dev
, void *data
)
596 return !strcmp(dev_name(dev
), data
);
599 static int etnaviv_pdev_probe(struct platform_device
*pdev
)
601 struct device
*dev
= &pdev
->dev
;
602 struct component_match
*match
= NULL
;
604 if (!dev
->platform_data
) {
605 struct device_node
*core_node
;
607 for_each_compatible_node(core_node
, NULL
, "vivante,gc") {
608 if (!of_device_is_available(core_node
))
611 drm_of_component_match_add(&pdev
->dev
, &match
,
612 compare_of
, core_node
);
615 char **names
= dev
->platform_data
;
618 for (i
= 0; names
[i
]; i
++)
619 component_match_add(dev
, &match
, compare_str
, names
[i
]);
622 return component_master_add_with_match(dev
, &etnaviv_master_ops
, match
);
625 static int etnaviv_pdev_remove(struct platform_device
*pdev
)
627 component_master_del(&pdev
->dev
, &etnaviv_master_ops
);
632 static struct platform_driver etnaviv_platform_driver
= {
633 .probe
= etnaviv_pdev_probe
,
634 .remove
= etnaviv_pdev_remove
,
640 static struct platform_device
*etnaviv_drm
;
642 static int __init
etnaviv_init(void)
644 struct platform_device
*pdev
;
646 struct device_node
*np
;
648 etnaviv_validate_init();
650 ret
= platform_driver_register(&etnaviv_gpu_driver
);
654 ret
= platform_driver_register(&etnaviv_platform_driver
);
656 goto unregister_gpu_driver
;
659 * If the DT contains at least one available GPU device, instantiate
660 * the DRM platform device.
662 for_each_compatible_node(np
, NULL
, "vivante,gc") {
663 if (!of_device_is_available(np
))
666 pdev
= platform_device_alloc("etnaviv", -1);
670 goto unregister_platform_driver
;
672 pdev
->dev
.coherent_dma_mask
= DMA_BIT_MASK(40);
673 pdev
->dev
.dma_mask
= &pdev
->dev
.coherent_dma_mask
;
676 * Apply the same DMA configuration to the virtual etnaviv
677 * device as the GPU we found. This assumes that all Vivante
678 * GPUs in the system share the same DMA constraints.
680 of_dma_configure(&pdev
->dev
, np
, true);
682 ret
= platform_device_add(pdev
);
684 platform_device_put(pdev
);
686 goto unregister_platform_driver
;
696 unregister_platform_driver
:
697 platform_driver_unregister(&etnaviv_platform_driver
);
698 unregister_gpu_driver
:
699 platform_driver_unregister(&etnaviv_gpu_driver
);
702 module_init(etnaviv_init
);
704 static void __exit
etnaviv_exit(void)
706 platform_device_unregister(etnaviv_drm
);
707 platform_driver_unregister(&etnaviv_platform_driver
);
708 platform_driver_unregister(&etnaviv_gpu_driver
);
710 module_exit(etnaviv_exit
);
712 MODULE_AUTHOR("Christian Gmeiner <christian.gmeiner@gmail.com>");
713 MODULE_AUTHOR("Russell King <rmk+kernel@armlinux.org.uk>");
714 MODULE_AUTHOR("Lucas Stach <l.stach@pengutronix.de>");
715 MODULE_DESCRIPTION("etnaviv DRM Driver");
716 MODULE_LICENSE("GPL v2");
717 MODULE_ALIAS("platform:etnaviv");