Linux 4.19.133
[linux/fpc-iii.git] / drivers / gpu / drm / etnaviv / etnaviv_drv.c
blob00675fcbffa2d9b0f2bcbe68ffe9c85e434f68d2
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) 2015-2018 Etnaviv Project
4 */
6 #include <linux/component.h>
7 #include <linux/of_platform.h>
8 #include <drm/drm_of.h>
10 #include "etnaviv_cmdbuf.h"
11 #include "etnaviv_drv.h"
12 #include "etnaviv_gpu.h"
13 #include "etnaviv_gem.h"
14 #include "etnaviv_mmu.h"
15 #include "etnaviv_perfmon.h"
18 * DRM operations:
22 static void load_gpu(struct drm_device *dev)
24 struct etnaviv_drm_private *priv = dev->dev_private;
25 unsigned int i;
27 for (i = 0; i < ETNA_MAX_PIPES; i++) {
28 struct etnaviv_gpu *g = priv->gpu[i];
30 if (g) {
31 int ret;
33 ret = etnaviv_gpu_init(g);
34 if (ret)
35 priv->gpu[i] = NULL;
40 static int etnaviv_open(struct drm_device *dev, struct drm_file *file)
42 struct etnaviv_drm_private *priv = dev->dev_private;
43 struct etnaviv_file_private *ctx;
44 int i;
46 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
47 if (!ctx)
48 return -ENOMEM;
50 for (i = 0; i < ETNA_MAX_PIPES; i++) {
51 struct etnaviv_gpu *gpu = priv->gpu[i];
52 struct drm_sched_rq *rq;
54 if (gpu) {
55 rq = &gpu->sched.sched_rq[DRM_SCHED_PRIORITY_NORMAL];
56 drm_sched_entity_init(&ctx->sched_entity[i],
57 &rq, 1, NULL);
61 file->driver_priv = ctx;
63 return 0;
66 static void etnaviv_postclose(struct drm_device *dev, struct drm_file *file)
68 struct etnaviv_drm_private *priv = dev->dev_private;
69 struct etnaviv_file_private *ctx = file->driver_priv;
70 unsigned int i;
72 for (i = 0; i < ETNA_MAX_PIPES; i++) {
73 struct etnaviv_gpu *gpu = priv->gpu[i];
75 if (gpu) {
76 mutex_lock(&gpu->lock);
77 if (gpu->lastctx == ctx)
78 gpu->lastctx = NULL;
79 mutex_unlock(&gpu->lock);
81 drm_sched_entity_destroy(&ctx->sched_entity[i]);
85 kfree(ctx);
89 * DRM debugfs:
92 #ifdef CONFIG_DEBUG_FS
93 static int etnaviv_gem_show(struct drm_device *dev, struct seq_file *m)
95 struct etnaviv_drm_private *priv = dev->dev_private;
97 etnaviv_gem_describe_objects(priv, m);
99 return 0;
102 static int etnaviv_mm_show(struct drm_device *dev, struct seq_file *m)
104 struct drm_printer p = drm_seq_file_printer(m);
106 read_lock(&dev->vma_offset_manager->vm_lock);
107 drm_mm_print(&dev->vma_offset_manager->vm_addr_space_mm, &p);
108 read_unlock(&dev->vma_offset_manager->vm_lock);
110 return 0;
113 static int etnaviv_mmu_show(struct etnaviv_gpu *gpu, struct seq_file *m)
115 struct drm_printer p = drm_seq_file_printer(m);
117 seq_printf(m, "Active Objects (%s):\n", dev_name(gpu->dev));
119 mutex_lock(&gpu->mmu->lock);
120 drm_mm_print(&gpu->mmu->mm, &p);
121 mutex_unlock(&gpu->mmu->lock);
123 return 0;
126 static void etnaviv_buffer_dump(struct etnaviv_gpu *gpu, struct seq_file *m)
128 struct etnaviv_cmdbuf *buf = &gpu->buffer;
129 u32 size = buf->size;
130 u32 *ptr = buf->vaddr;
131 u32 i;
133 seq_printf(m, "virt %p - phys 0x%llx - free 0x%08x\n",
134 buf->vaddr, (u64)etnaviv_cmdbuf_get_pa(buf),
135 size - buf->user_size);
137 for (i = 0; i < size / 4; i++) {
138 if (i && !(i % 4))
139 seq_puts(m, "\n");
140 if (i % 4 == 0)
141 seq_printf(m, "\t0x%p: ", ptr + i);
142 seq_printf(m, "%08x ", *(ptr + i));
144 seq_puts(m, "\n");
147 static int etnaviv_ring_show(struct etnaviv_gpu *gpu, struct seq_file *m)
149 seq_printf(m, "Ring Buffer (%s): ", dev_name(gpu->dev));
151 mutex_lock(&gpu->lock);
152 etnaviv_buffer_dump(gpu, m);
153 mutex_unlock(&gpu->lock);
155 return 0;
158 static int show_unlocked(struct seq_file *m, void *arg)
160 struct drm_info_node *node = (struct drm_info_node *) m->private;
161 struct drm_device *dev = node->minor->dev;
162 int (*show)(struct drm_device *dev, struct seq_file *m) =
163 node->info_ent->data;
165 return show(dev, m);
168 static int show_each_gpu(struct seq_file *m, void *arg)
170 struct drm_info_node *node = (struct drm_info_node *) m->private;
171 struct drm_device *dev = node->minor->dev;
172 struct etnaviv_drm_private *priv = dev->dev_private;
173 struct etnaviv_gpu *gpu;
174 int (*show)(struct etnaviv_gpu *gpu, struct seq_file *m) =
175 node->info_ent->data;
176 unsigned int i;
177 int ret = 0;
179 for (i = 0; i < ETNA_MAX_PIPES; i++) {
180 gpu = priv->gpu[i];
181 if (!gpu)
182 continue;
184 ret = show(gpu, m);
185 if (ret < 0)
186 break;
189 return ret;
192 static struct drm_info_list etnaviv_debugfs_list[] = {
193 {"gpu", show_each_gpu, 0, etnaviv_gpu_debugfs},
194 {"gem", show_unlocked, 0, etnaviv_gem_show},
195 { "mm", show_unlocked, 0, etnaviv_mm_show },
196 {"mmu", show_each_gpu, 0, etnaviv_mmu_show},
197 {"ring", show_each_gpu, 0, etnaviv_ring_show},
200 static int etnaviv_debugfs_init(struct drm_minor *minor)
202 struct drm_device *dev = minor->dev;
203 int ret;
205 ret = drm_debugfs_create_files(etnaviv_debugfs_list,
206 ARRAY_SIZE(etnaviv_debugfs_list),
207 minor->debugfs_root, minor);
209 if (ret) {
210 dev_err(dev->dev, "could not install etnaviv_debugfs_list\n");
211 return ret;
214 return ret;
216 #endif
219 * DRM ioctls:
222 static int etnaviv_ioctl_get_param(struct drm_device *dev, void *data,
223 struct drm_file *file)
225 struct etnaviv_drm_private *priv = dev->dev_private;
226 struct drm_etnaviv_param *args = data;
227 struct etnaviv_gpu *gpu;
229 if (args->pipe >= ETNA_MAX_PIPES)
230 return -EINVAL;
232 gpu = priv->gpu[args->pipe];
233 if (!gpu)
234 return -ENXIO;
236 return etnaviv_gpu_get_param(gpu, args->param, &args->value);
239 static int etnaviv_ioctl_gem_new(struct drm_device *dev, void *data,
240 struct drm_file *file)
242 struct drm_etnaviv_gem_new *args = data;
244 if (args->flags & ~(ETNA_BO_CACHED | ETNA_BO_WC | ETNA_BO_UNCACHED |
245 ETNA_BO_FORCE_MMU))
246 return -EINVAL;
248 return etnaviv_gem_new_handle(dev, file, args->size,
249 args->flags, &args->handle);
252 #define TS(t) ((struct timespec){ \
253 .tv_sec = (t).tv_sec, \
254 .tv_nsec = (t).tv_nsec \
257 static int etnaviv_ioctl_gem_cpu_prep(struct drm_device *dev, void *data,
258 struct drm_file *file)
260 struct drm_etnaviv_gem_cpu_prep *args = data;
261 struct drm_gem_object *obj;
262 int ret;
264 if (args->op & ~(ETNA_PREP_READ | ETNA_PREP_WRITE | ETNA_PREP_NOSYNC))
265 return -EINVAL;
267 obj = drm_gem_object_lookup(file, args->handle);
268 if (!obj)
269 return -ENOENT;
271 ret = etnaviv_gem_cpu_prep(obj, args->op, &TS(args->timeout));
273 drm_gem_object_put_unlocked(obj);
275 return ret;
278 static int etnaviv_ioctl_gem_cpu_fini(struct drm_device *dev, void *data,
279 struct drm_file *file)
281 struct drm_etnaviv_gem_cpu_fini *args = data;
282 struct drm_gem_object *obj;
283 int ret;
285 if (args->flags)
286 return -EINVAL;
288 obj = drm_gem_object_lookup(file, args->handle);
289 if (!obj)
290 return -ENOENT;
292 ret = etnaviv_gem_cpu_fini(obj);
294 drm_gem_object_put_unlocked(obj);
296 return ret;
299 static int etnaviv_ioctl_gem_info(struct drm_device *dev, void *data,
300 struct drm_file *file)
302 struct drm_etnaviv_gem_info *args = data;
303 struct drm_gem_object *obj;
304 int ret;
306 if (args->pad)
307 return -EINVAL;
309 obj = drm_gem_object_lookup(file, args->handle);
310 if (!obj)
311 return -ENOENT;
313 ret = etnaviv_gem_mmap_offset(obj, &args->offset);
314 drm_gem_object_put_unlocked(obj);
316 return ret;
319 static int etnaviv_ioctl_wait_fence(struct drm_device *dev, void *data,
320 struct drm_file *file)
322 struct drm_etnaviv_wait_fence *args = data;
323 struct etnaviv_drm_private *priv = dev->dev_private;
324 struct timespec *timeout = &TS(args->timeout);
325 struct etnaviv_gpu *gpu;
327 if (args->flags & ~(ETNA_WAIT_NONBLOCK))
328 return -EINVAL;
330 if (args->pipe >= ETNA_MAX_PIPES)
331 return -EINVAL;
333 gpu = priv->gpu[args->pipe];
334 if (!gpu)
335 return -ENXIO;
337 if (args->flags & ETNA_WAIT_NONBLOCK)
338 timeout = NULL;
340 return etnaviv_gpu_wait_fence_interruptible(gpu, args->fence,
341 timeout);
344 static int etnaviv_ioctl_gem_userptr(struct drm_device *dev, void *data,
345 struct drm_file *file)
347 struct drm_etnaviv_gem_userptr *args = data;
348 int access;
350 if (args->flags & ~(ETNA_USERPTR_READ|ETNA_USERPTR_WRITE) ||
351 args->flags == 0)
352 return -EINVAL;
354 if (offset_in_page(args->user_ptr | args->user_size) ||
355 (uintptr_t)args->user_ptr != args->user_ptr ||
356 (u32)args->user_size != args->user_size ||
357 args->user_ptr & ~PAGE_MASK)
358 return -EINVAL;
360 if (args->flags & ETNA_USERPTR_WRITE)
361 access = VERIFY_WRITE;
362 else
363 access = VERIFY_READ;
365 if (!access_ok(access, (void __user *)(unsigned long)args->user_ptr,
366 args->user_size))
367 return -EFAULT;
369 return etnaviv_gem_new_userptr(dev, file, args->user_ptr,
370 args->user_size, args->flags,
371 &args->handle);
374 static int etnaviv_ioctl_gem_wait(struct drm_device *dev, void *data,
375 struct drm_file *file)
377 struct etnaviv_drm_private *priv = dev->dev_private;
378 struct drm_etnaviv_gem_wait *args = data;
379 struct timespec *timeout = &TS(args->timeout);
380 struct drm_gem_object *obj;
381 struct etnaviv_gpu *gpu;
382 int ret;
384 if (args->flags & ~(ETNA_WAIT_NONBLOCK))
385 return -EINVAL;
387 if (args->pipe >= ETNA_MAX_PIPES)
388 return -EINVAL;
390 gpu = priv->gpu[args->pipe];
391 if (!gpu)
392 return -ENXIO;
394 obj = drm_gem_object_lookup(file, args->handle);
395 if (!obj)
396 return -ENOENT;
398 if (args->flags & ETNA_WAIT_NONBLOCK)
399 timeout = NULL;
401 ret = etnaviv_gem_wait_bo(gpu, obj, timeout);
403 drm_gem_object_put_unlocked(obj);
405 return ret;
408 static int etnaviv_ioctl_pm_query_dom(struct drm_device *dev, void *data,
409 struct drm_file *file)
411 struct etnaviv_drm_private *priv = dev->dev_private;
412 struct drm_etnaviv_pm_domain *args = data;
413 struct etnaviv_gpu *gpu;
415 if (args->pipe >= ETNA_MAX_PIPES)
416 return -EINVAL;
418 gpu = priv->gpu[args->pipe];
419 if (!gpu)
420 return -ENXIO;
422 return etnaviv_pm_query_dom(gpu, args);
425 static int etnaviv_ioctl_pm_query_sig(struct drm_device *dev, void *data,
426 struct drm_file *file)
428 struct etnaviv_drm_private *priv = dev->dev_private;
429 struct drm_etnaviv_pm_signal *args = data;
430 struct etnaviv_gpu *gpu;
432 if (args->pipe >= ETNA_MAX_PIPES)
433 return -EINVAL;
435 gpu = priv->gpu[args->pipe];
436 if (!gpu)
437 return -ENXIO;
439 return etnaviv_pm_query_sig(gpu, args);
442 static const struct drm_ioctl_desc etnaviv_ioctls[] = {
443 #define ETNA_IOCTL(n, func, flags) \
444 DRM_IOCTL_DEF_DRV(ETNAVIV_##n, etnaviv_ioctl_##func, flags)
445 ETNA_IOCTL(GET_PARAM, get_param, DRM_AUTH|DRM_RENDER_ALLOW),
446 ETNA_IOCTL(GEM_NEW, gem_new, DRM_AUTH|DRM_RENDER_ALLOW),
447 ETNA_IOCTL(GEM_INFO, gem_info, DRM_AUTH|DRM_RENDER_ALLOW),
448 ETNA_IOCTL(GEM_CPU_PREP, gem_cpu_prep, DRM_AUTH|DRM_RENDER_ALLOW),
449 ETNA_IOCTL(GEM_CPU_FINI, gem_cpu_fini, DRM_AUTH|DRM_RENDER_ALLOW),
450 ETNA_IOCTL(GEM_SUBMIT, gem_submit, DRM_AUTH|DRM_RENDER_ALLOW),
451 ETNA_IOCTL(WAIT_FENCE, wait_fence, DRM_AUTH|DRM_RENDER_ALLOW),
452 ETNA_IOCTL(GEM_USERPTR, gem_userptr, DRM_AUTH|DRM_RENDER_ALLOW),
453 ETNA_IOCTL(GEM_WAIT, gem_wait, DRM_AUTH|DRM_RENDER_ALLOW),
454 ETNA_IOCTL(PM_QUERY_DOM, pm_query_dom, DRM_AUTH|DRM_RENDER_ALLOW),
455 ETNA_IOCTL(PM_QUERY_SIG, pm_query_sig, DRM_AUTH|DRM_RENDER_ALLOW),
458 static const struct vm_operations_struct vm_ops = {
459 .fault = etnaviv_gem_fault,
460 .open = drm_gem_vm_open,
461 .close = drm_gem_vm_close,
464 static const struct file_operations fops = {
465 .owner = THIS_MODULE,
466 .open = drm_open,
467 .release = drm_release,
468 .unlocked_ioctl = drm_ioctl,
469 .compat_ioctl = drm_compat_ioctl,
470 .poll = drm_poll,
471 .read = drm_read,
472 .llseek = no_llseek,
473 .mmap = etnaviv_gem_mmap,
476 static struct drm_driver etnaviv_drm_driver = {
477 .driver_features = DRIVER_GEM |
478 DRIVER_PRIME |
479 DRIVER_RENDER,
480 .open = etnaviv_open,
481 .postclose = etnaviv_postclose,
482 .gem_free_object_unlocked = etnaviv_gem_free_object,
483 .gem_vm_ops = &vm_ops,
484 .prime_handle_to_fd = drm_gem_prime_handle_to_fd,
485 .prime_fd_to_handle = drm_gem_prime_fd_to_handle,
486 .gem_prime_export = drm_gem_prime_export,
487 .gem_prime_import = drm_gem_prime_import,
488 .gem_prime_res_obj = etnaviv_gem_prime_res_obj,
489 .gem_prime_pin = etnaviv_gem_prime_pin,
490 .gem_prime_unpin = etnaviv_gem_prime_unpin,
491 .gem_prime_get_sg_table = etnaviv_gem_prime_get_sg_table,
492 .gem_prime_import_sg_table = etnaviv_gem_prime_import_sg_table,
493 .gem_prime_vmap = etnaviv_gem_prime_vmap,
494 .gem_prime_vunmap = etnaviv_gem_prime_vunmap,
495 .gem_prime_mmap = etnaviv_gem_prime_mmap,
496 #ifdef CONFIG_DEBUG_FS
497 .debugfs_init = etnaviv_debugfs_init,
498 #endif
499 .ioctls = etnaviv_ioctls,
500 .num_ioctls = DRM_ETNAVIV_NUM_IOCTLS,
501 .fops = &fops,
502 .name = "etnaviv",
503 .desc = "etnaviv DRM",
504 .date = "20151214",
505 .major = 1,
506 .minor = 2,
510 * Platform driver:
512 static int etnaviv_bind(struct device *dev)
514 struct etnaviv_drm_private *priv;
515 struct drm_device *drm;
516 int ret;
518 drm = drm_dev_alloc(&etnaviv_drm_driver, dev);
519 if (IS_ERR(drm))
520 return PTR_ERR(drm);
522 priv = kzalloc(sizeof(*priv), GFP_KERNEL);
523 if (!priv) {
524 dev_err(dev, "failed to allocate private data\n");
525 ret = -ENOMEM;
526 goto out_unref;
528 drm->dev_private = priv;
530 dev->dma_parms = &priv->dma_parms;
531 dma_set_max_seg_size(dev, SZ_2G);
533 mutex_init(&priv->gem_lock);
534 INIT_LIST_HEAD(&priv->gem_list);
535 priv->num_gpus = 0;
537 dev_set_drvdata(dev, drm);
539 ret = component_bind_all(dev, drm);
540 if (ret < 0)
541 goto out_bind;
543 load_gpu(drm);
545 ret = drm_dev_register(drm, 0);
546 if (ret)
547 goto out_register;
549 return 0;
551 out_register:
552 component_unbind_all(dev, drm);
553 out_bind:
554 kfree(priv);
555 out_unref:
556 drm_dev_unref(drm);
558 return ret;
561 static void etnaviv_unbind(struct device *dev)
563 struct drm_device *drm = dev_get_drvdata(dev);
564 struct etnaviv_drm_private *priv = drm->dev_private;
566 drm_dev_unregister(drm);
568 component_unbind_all(dev, drm);
570 dev->dma_parms = NULL;
572 drm->dev_private = NULL;
573 kfree(priv);
575 drm_dev_unref(drm);
578 static const struct component_master_ops etnaviv_master_ops = {
579 .bind = etnaviv_bind,
580 .unbind = etnaviv_unbind,
583 static int compare_of(struct device *dev, void *data)
585 struct device_node *np = data;
587 return dev->of_node == np;
590 static int compare_str(struct device *dev, void *data)
592 return !strcmp(dev_name(dev), data);
595 static int etnaviv_pdev_probe(struct platform_device *pdev)
597 struct device *dev = &pdev->dev;
598 struct component_match *match = NULL;
600 if (!dev->platform_data) {
601 struct device_node *core_node;
603 for_each_compatible_node(core_node, NULL, "vivante,gc") {
604 if (!of_device_is_available(core_node))
605 continue;
607 drm_of_component_match_add(&pdev->dev, &match,
608 compare_of, core_node);
610 } else {
611 char **names = dev->platform_data;
612 unsigned i;
614 for (i = 0; names[i]; i++)
615 component_match_add(dev, &match, compare_str, names[i]);
618 return component_master_add_with_match(dev, &etnaviv_master_ops, match);
621 static int etnaviv_pdev_remove(struct platform_device *pdev)
623 component_master_del(&pdev->dev, &etnaviv_master_ops);
625 return 0;
628 static struct platform_driver etnaviv_platform_driver = {
629 .probe = etnaviv_pdev_probe,
630 .remove = etnaviv_pdev_remove,
631 .driver = {
632 .name = "etnaviv",
636 static struct platform_device *etnaviv_drm;
638 static int __init etnaviv_init(void)
640 struct platform_device *pdev;
641 int ret;
642 struct device_node *np;
644 etnaviv_validate_init();
646 ret = platform_driver_register(&etnaviv_gpu_driver);
647 if (ret != 0)
648 return ret;
650 ret = platform_driver_register(&etnaviv_platform_driver);
651 if (ret != 0)
652 goto unregister_gpu_driver;
655 * If the DT contains at least one available GPU device, instantiate
656 * the DRM platform device.
658 for_each_compatible_node(np, NULL, "vivante,gc") {
659 if (!of_device_is_available(np))
660 continue;
662 pdev = platform_device_alloc("etnaviv", -1);
663 if (!pdev) {
664 ret = -ENOMEM;
665 of_node_put(np);
666 goto unregister_platform_driver;
668 pdev->dev.coherent_dma_mask = DMA_BIT_MASK(40);
669 pdev->dev.dma_mask = &pdev->dev.coherent_dma_mask;
672 * Apply the same DMA configuration to the virtual etnaviv
673 * device as the GPU we found. This assumes that all Vivante
674 * GPUs in the system share the same DMA constraints.
676 of_dma_configure(&pdev->dev, np, true);
678 ret = platform_device_add(pdev);
679 if (ret) {
680 platform_device_put(pdev);
681 of_node_put(np);
682 goto unregister_platform_driver;
685 etnaviv_drm = pdev;
686 of_node_put(np);
687 break;
690 return 0;
692 unregister_platform_driver:
693 platform_driver_unregister(&etnaviv_platform_driver);
694 unregister_gpu_driver:
695 platform_driver_unregister(&etnaviv_gpu_driver);
696 return ret;
698 module_init(etnaviv_init);
700 static void __exit etnaviv_exit(void)
702 platform_device_unregister(etnaviv_drm);
703 platform_driver_unregister(&etnaviv_platform_driver);
704 platform_driver_unregister(&etnaviv_gpu_driver);
706 module_exit(etnaviv_exit);
708 MODULE_AUTHOR("Christian Gmeiner <christian.gmeiner@gmail.com>");
709 MODULE_AUTHOR("Russell King <rmk+kernel@arm.linux.org.uk>");
710 MODULE_AUTHOR("Lucas Stach <l.stach@pengutronix.de>");
711 MODULE_DESCRIPTION("etnaviv DRM Driver");
712 MODULE_LICENSE("GPL v2");
713 MODULE_ALIAS("platform:etnaviv");