Input: xpad - add support for Xbox1 PDP Camo series gamepad
[linux/fpc-iii.git] / drivers / gpu / drm / etnaviv / etnaviv_drv.c
blobaa687669e22b4b05cbbc98f4af5e72ef62979e81
1 /*
2 * Copyright (C) 2015 Etnaviv Project
4 * This program is free software; you can redistribute it and/or modify it
5 * under the terms of the GNU General Public License version 2 as published by
6 * the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful, but WITHOUT
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
11 * more details.
13 * You should have received a copy of the GNU General Public License along with
14 * this program. If not, see <http://www.gnu.org/licenses/>.
17 #include <linux/component.h>
18 #include <linux/of_platform.h>
20 #include "etnaviv_drv.h"
21 #include "etnaviv_gpu.h"
22 #include "etnaviv_gem.h"
23 #include "etnaviv_mmu.h"
24 #include "etnaviv_gem.h"
26 #ifdef CONFIG_DRM_ETNAVIV_REGISTER_LOGGING
27 static bool reglog;
28 MODULE_PARM_DESC(reglog, "Enable register read/write logging");
29 module_param(reglog, bool, 0600);
30 #else
31 #define reglog 0
32 #endif
34 void __iomem *etnaviv_ioremap(struct platform_device *pdev, const char *name,
35 const char *dbgname)
37 struct resource *res;
38 void __iomem *ptr;
40 if (name)
41 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, name);
42 else
43 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
45 ptr = devm_ioremap_resource(&pdev->dev, res);
46 if (IS_ERR(ptr)) {
47 dev_err(&pdev->dev, "failed to ioremap %s: %ld\n", name,
48 PTR_ERR(ptr));
49 return ptr;
52 if (reglog)
53 dev_printk(KERN_DEBUG, &pdev->dev, "IO:region %s 0x%p %08zx\n",
54 dbgname, ptr, (size_t)resource_size(res));
56 return ptr;
59 void etnaviv_writel(u32 data, void __iomem *addr)
61 if (reglog)
62 printk(KERN_DEBUG "IO:W %p %08x\n", addr, data);
64 writel(data, addr);
67 u32 etnaviv_readl(const void __iomem *addr)
69 u32 val = readl(addr);
71 if (reglog)
72 printk(KERN_DEBUG "IO:R %p %08x\n", addr, val);
74 return val;
78 * DRM operations:
82 static void load_gpu(struct drm_device *dev)
84 struct etnaviv_drm_private *priv = dev->dev_private;
85 unsigned int i;
87 for (i = 0; i < ETNA_MAX_PIPES; i++) {
88 struct etnaviv_gpu *g = priv->gpu[i];
90 if (g) {
91 int ret;
93 ret = etnaviv_gpu_init(g);
94 if (ret)
95 priv->gpu[i] = NULL;
100 static int etnaviv_open(struct drm_device *dev, struct drm_file *file)
102 struct etnaviv_file_private *ctx;
104 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
105 if (!ctx)
106 return -ENOMEM;
108 file->driver_priv = ctx;
110 return 0;
113 static void etnaviv_preclose(struct drm_device *dev, struct drm_file *file)
115 struct etnaviv_drm_private *priv = dev->dev_private;
116 struct etnaviv_file_private *ctx = file->driver_priv;
117 unsigned int i;
119 for (i = 0; i < ETNA_MAX_PIPES; i++) {
120 struct etnaviv_gpu *gpu = priv->gpu[i];
122 if (gpu) {
123 mutex_lock(&gpu->lock);
124 if (gpu->lastctx == ctx)
125 gpu->lastctx = NULL;
126 mutex_unlock(&gpu->lock);
130 kfree(ctx);
134 * DRM debugfs:
137 #ifdef CONFIG_DEBUG_FS
138 static int etnaviv_gem_show(struct drm_device *dev, struct seq_file *m)
140 struct etnaviv_drm_private *priv = dev->dev_private;
142 etnaviv_gem_describe_objects(priv, m);
144 return 0;
147 static int etnaviv_mm_show(struct drm_device *dev, struct seq_file *m)
149 int ret;
151 read_lock(&dev->vma_offset_manager->vm_lock);
152 ret = drm_mm_dump_table(m, &dev->vma_offset_manager->vm_addr_space_mm);
153 read_unlock(&dev->vma_offset_manager->vm_lock);
155 return ret;
158 static int etnaviv_mmu_show(struct etnaviv_gpu *gpu, struct seq_file *m)
160 seq_printf(m, "Active Objects (%s):\n", dev_name(gpu->dev));
162 mutex_lock(&gpu->mmu->lock);
163 drm_mm_dump_table(m, &gpu->mmu->mm);
164 mutex_unlock(&gpu->mmu->lock);
166 return 0;
169 static void etnaviv_buffer_dump(struct etnaviv_gpu *gpu, struct seq_file *m)
171 struct etnaviv_cmdbuf *buf = gpu->buffer;
172 u32 size = buf->size;
173 u32 *ptr = buf->vaddr;
174 u32 i;
176 seq_printf(m, "virt %p - phys 0x%llx - free 0x%08x\n",
177 buf->vaddr, (u64)buf->paddr, size - buf->user_size);
179 for (i = 0; i < size / 4; i++) {
180 if (i && !(i % 4))
181 seq_puts(m, "\n");
182 if (i % 4 == 0)
183 seq_printf(m, "\t0x%p: ", ptr + i);
184 seq_printf(m, "%08x ", *(ptr + i));
186 seq_puts(m, "\n");
189 static int etnaviv_ring_show(struct etnaviv_gpu *gpu, struct seq_file *m)
191 seq_printf(m, "Ring Buffer (%s): ", dev_name(gpu->dev));
193 mutex_lock(&gpu->lock);
194 etnaviv_buffer_dump(gpu, m);
195 mutex_unlock(&gpu->lock);
197 return 0;
200 static int show_unlocked(struct seq_file *m, void *arg)
202 struct drm_info_node *node = (struct drm_info_node *) m->private;
203 struct drm_device *dev = node->minor->dev;
204 int (*show)(struct drm_device *dev, struct seq_file *m) =
205 node->info_ent->data;
207 return show(dev, m);
210 static int show_each_gpu(struct seq_file *m, void *arg)
212 struct drm_info_node *node = (struct drm_info_node *) m->private;
213 struct drm_device *dev = node->minor->dev;
214 struct etnaviv_drm_private *priv = dev->dev_private;
215 struct etnaviv_gpu *gpu;
216 int (*show)(struct etnaviv_gpu *gpu, struct seq_file *m) =
217 node->info_ent->data;
218 unsigned int i;
219 int ret = 0;
221 for (i = 0; i < ETNA_MAX_PIPES; i++) {
222 gpu = priv->gpu[i];
223 if (!gpu)
224 continue;
226 ret = show(gpu, m);
227 if (ret < 0)
228 break;
231 return ret;
234 static struct drm_info_list etnaviv_debugfs_list[] = {
235 {"gpu", show_each_gpu, 0, etnaviv_gpu_debugfs},
236 {"gem", show_unlocked, 0, etnaviv_gem_show},
237 { "mm", show_unlocked, 0, etnaviv_mm_show },
238 {"mmu", show_each_gpu, 0, etnaviv_mmu_show},
239 {"ring", show_each_gpu, 0, etnaviv_ring_show},
242 static int etnaviv_debugfs_init(struct drm_minor *minor)
244 struct drm_device *dev = minor->dev;
245 int ret;
247 ret = drm_debugfs_create_files(etnaviv_debugfs_list,
248 ARRAY_SIZE(etnaviv_debugfs_list),
249 minor->debugfs_root, minor);
251 if (ret) {
252 dev_err(dev->dev, "could not install etnaviv_debugfs_list\n");
253 return ret;
256 return ret;
259 static void etnaviv_debugfs_cleanup(struct drm_minor *minor)
261 drm_debugfs_remove_files(etnaviv_debugfs_list,
262 ARRAY_SIZE(etnaviv_debugfs_list), minor);
264 #endif
267 * DRM ioctls:
270 static int etnaviv_ioctl_get_param(struct drm_device *dev, void *data,
271 struct drm_file *file)
273 struct etnaviv_drm_private *priv = dev->dev_private;
274 struct drm_etnaviv_param *args = data;
275 struct etnaviv_gpu *gpu;
277 if (args->pipe >= ETNA_MAX_PIPES)
278 return -EINVAL;
280 gpu = priv->gpu[args->pipe];
281 if (!gpu)
282 return -ENXIO;
284 return etnaviv_gpu_get_param(gpu, args->param, &args->value);
287 static int etnaviv_ioctl_gem_new(struct drm_device *dev, void *data,
288 struct drm_file *file)
290 struct drm_etnaviv_gem_new *args = data;
292 if (args->flags & ~(ETNA_BO_CACHED | ETNA_BO_WC | ETNA_BO_UNCACHED |
293 ETNA_BO_FORCE_MMU))
294 return -EINVAL;
296 return etnaviv_gem_new_handle(dev, file, args->size,
297 args->flags, &args->handle);
300 #define TS(t) ((struct timespec){ \
301 .tv_sec = (t).tv_sec, \
302 .tv_nsec = (t).tv_nsec \
305 static int etnaviv_ioctl_gem_cpu_prep(struct drm_device *dev, void *data,
306 struct drm_file *file)
308 struct drm_etnaviv_gem_cpu_prep *args = data;
309 struct drm_gem_object *obj;
310 int ret;
312 if (args->op & ~(ETNA_PREP_READ | ETNA_PREP_WRITE | ETNA_PREP_NOSYNC))
313 return -EINVAL;
315 obj = drm_gem_object_lookup(file, args->handle);
316 if (!obj)
317 return -ENOENT;
319 ret = etnaviv_gem_cpu_prep(obj, args->op, &TS(args->timeout));
321 drm_gem_object_unreference_unlocked(obj);
323 return ret;
326 static int etnaviv_ioctl_gem_cpu_fini(struct drm_device *dev, void *data,
327 struct drm_file *file)
329 struct drm_etnaviv_gem_cpu_fini *args = data;
330 struct drm_gem_object *obj;
331 int ret;
333 if (args->flags)
334 return -EINVAL;
336 obj = drm_gem_object_lookup(file, args->handle);
337 if (!obj)
338 return -ENOENT;
340 ret = etnaviv_gem_cpu_fini(obj);
342 drm_gem_object_unreference_unlocked(obj);
344 return ret;
347 static int etnaviv_ioctl_gem_info(struct drm_device *dev, void *data,
348 struct drm_file *file)
350 struct drm_etnaviv_gem_info *args = data;
351 struct drm_gem_object *obj;
352 int ret;
354 if (args->pad)
355 return -EINVAL;
357 obj = drm_gem_object_lookup(file, args->handle);
358 if (!obj)
359 return -ENOENT;
361 ret = etnaviv_gem_mmap_offset(obj, &args->offset);
362 drm_gem_object_unreference_unlocked(obj);
364 return ret;
367 static int etnaviv_ioctl_wait_fence(struct drm_device *dev, void *data,
368 struct drm_file *file)
370 struct drm_etnaviv_wait_fence *args = data;
371 struct etnaviv_drm_private *priv = dev->dev_private;
372 struct timespec *timeout = &TS(args->timeout);
373 struct etnaviv_gpu *gpu;
375 if (args->flags & ~(ETNA_WAIT_NONBLOCK))
376 return -EINVAL;
378 if (args->pipe >= ETNA_MAX_PIPES)
379 return -EINVAL;
381 gpu = priv->gpu[args->pipe];
382 if (!gpu)
383 return -ENXIO;
385 if (args->flags & ETNA_WAIT_NONBLOCK)
386 timeout = NULL;
388 return etnaviv_gpu_wait_fence_interruptible(gpu, args->fence,
389 timeout);
392 static int etnaviv_ioctl_gem_userptr(struct drm_device *dev, void *data,
393 struct drm_file *file)
395 struct drm_etnaviv_gem_userptr *args = data;
396 int access;
398 if (args->flags & ~(ETNA_USERPTR_READ|ETNA_USERPTR_WRITE) ||
399 args->flags == 0)
400 return -EINVAL;
402 if (offset_in_page(args->user_ptr | args->user_size) ||
403 (uintptr_t)args->user_ptr != args->user_ptr ||
404 (u32)args->user_size != args->user_size ||
405 args->user_ptr & ~PAGE_MASK)
406 return -EINVAL;
408 if (args->flags & ETNA_USERPTR_WRITE)
409 access = VERIFY_WRITE;
410 else
411 access = VERIFY_READ;
413 if (!access_ok(access, (void __user *)(unsigned long)args->user_ptr,
414 args->user_size))
415 return -EFAULT;
417 return etnaviv_gem_new_userptr(dev, file, args->user_ptr,
418 args->user_size, args->flags,
419 &args->handle);
422 static int etnaviv_ioctl_gem_wait(struct drm_device *dev, void *data,
423 struct drm_file *file)
425 struct etnaviv_drm_private *priv = dev->dev_private;
426 struct drm_etnaviv_gem_wait *args = data;
427 struct timespec *timeout = &TS(args->timeout);
428 struct drm_gem_object *obj;
429 struct etnaviv_gpu *gpu;
430 int ret;
432 if (args->flags & ~(ETNA_WAIT_NONBLOCK))
433 return -EINVAL;
435 if (args->pipe >= ETNA_MAX_PIPES)
436 return -EINVAL;
438 gpu = priv->gpu[args->pipe];
439 if (!gpu)
440 return -ENXIO;
442 obj = drm_gem_object_lookup(file, args->handle);
443 if (!obj)
444 return -ENOENT;
446 if (args->flags & ETNA_WAIT_NONBLOCK)
447 timeout = NULL;
449 ret = etnaviv_gem_wait_bo(gpu, obj, timeout);
451 drm_gem_object_unreference_unlocked(obj);
453 return ret;
456 static const struct drm_ioctl_desc etnaviv_ioctls[] = {
457 #define ETNA_IOCTL(n, func, flags) \
458 DRM_IOCTL_DEF_DRV(ETNAVIV_##n, etnaviv_ioctl_##func, flags)
459 ETNA_IOCTL(GET_PARAM, get_param, DRM_AUTH|DRM_RENDER_ALLOW),
460 ETNA_IOCTL(GEM_NEW, gem_new, DRM_AUTH|DRM_RENDER_ALLOW),
461 ETNA_IOCTL(GEM_INFO, gem_info, DRM_AUTH|DRM_RENDER_ALLOW),
462 ETNA_IOCTL(GEM_CPU_PREP, gem_cpu_prep, DRM_AUTH|DRM_RENDER_ALLOW),
463 ETNA_IOCTL(GEM_CPU_FINI, gem_cpu_fini, DRM_AUTH|DRM_RENDER_ALLOW),
464 ETNA_IOCTL(GEM_SUBMIT, gem_submit, DRM_AUTH|DRM_RENDER_ALLOW),
465 ETNA_IOCTL(WAIT_FENCE, wait_fence, DRM_AUTH|DRM_RENDER_ALLOW),
466 ETNA_IOCTL(GEM_USERPTR, gem_userptr, DRM_AUTH|DRM_RENDER_ALLOW),
467 ETNA_IOCTL(GEM_WAIT, gem_wait, DRM_AUTH|DRM_RENDER_ALLOW),
470 static const struct vm_operations_struct vm_ops = {
471 .fault = etnaviv_gem_fault,
472 .open = drm_gem_vm_open,
473 .close = drm_gem_vm_close,
476 static const struct file_operations fops = {
477 .owner = THIS_MODULE,
478 .open = drm_open,
479 .release = drm_release,
480 .unlocked_ioctl = drm_ioctl,
481 #ifdef CONFIG_COMPAT
482 .compat_ioctl = drm_compat_ioctl,
483 #endif
484 .poll = drm_poll,
485 .read = drm_read,
486 .llseek = no_llseek,
487 .mmap = etnaviv_gem_mmap,
490 static struct drm_driver etnaviv_drm_driver = {
491 .driver_features = DRIVER_GEM |
492 DRIVER_PRIME |
493 DRIVER_RENDER,
494 .open = etnaviv_open,
495 .preclose = etnaviv_preclose,
496 .gem_free_object_unlocked = etnaviv_gem_free_object,
497 .gem_vm_ops = &vm_ops,
498 .prime_handle_to_fd = drm_gem_prime_handle_to_fd,
499 .prime_fd_to_handle = drm_gem_prime_fd_to_handle,
500 .gem_prime_export = drm_gem_prime_export,
501 .gem_prime_import = drm_gem_prime_import,
502 .gem_prime_pin = etnaviv_gem_prime_pin,
503 .gem_prime_unpin = etnaviv_gem_prime_unpin,
504 .gem_prime_get_sg_table = etnaviv_gem_prime_get_sg_table,
505 .gem_prime_import_sg_table = etnaviv_gem_prime_import_sg_table,
506 .gem_prime_vmap = etnaviv_gem_prime_vmap,
507 .gem_prime_vunmap = etnaviv_gem_prime_vunmap,
508 #ifdef CONFIG_DEBUG_FS
509 .debugfs_init = etnaviv_debugfs_init,
510 .debugfs_cleanup = etnaviv_debugfs_cleanup,
511 #endif
512 .ioctls = etnaviv_ioctls,
513 .num_ioctls = DRM_ETNAVIV_NUM_IOCTLS,
514 .fops = &fops,
515 .name = "etnaviv",
516 .desc = "etnaviv DRM",
517 .date = "20151214",
518 .major = 1,
519 .minor = 0,
523 * Platform driver:
525 static int etnaviv_bind(struct device *dev)
527 struct etnaviv_drm_private *priv;
528 struct drm_device *drm;
529 int ret;
531 drm = drm_dev_alloc(&etnaviv_drm_driver, dev);
532 if (IS_ERR(drm))
533 return PTR_ERR(drm);
535 priv = kzalloc(sizeof(*priv), GFP_KERNEL);
536 if (!priv) {
537 dev_err(dev, "failed to allocate private data\n");
538 ret = -ENOMEM;
539 goto out_unref;
541 drm->dev_private = priv;
543 priv->wq = alloc_ordered_workqueue("etnaviv", 0);
544 if (!priv->wq) {
545 ret = -ENOMEM;
546 goto out_wq;
549 mutex_init(&priv->gem_lock);
550 INIT_LIST_HEAD(&priv->gem_list);
551 priv->num_gpus = 0;
553 dev_set_drvdata(dev, drm);
555 ret = component_bind_all(dev, drm);
556 if (ret < 0)
557 goto out_bind;
559 load_gpu(drm);
561 ret = drm_dev_register(drm, 0);
562 if (ret)
563 goto out_register;
565 return 0;
567 out_register:
568 component_unbind_all(dev, drm);
569 out_bind:
570 flush_workqueue(priv->wq);
571 destroy_workqueue(priv->wq);
572 out_wq:
573 kfree(priv);
574 out_unref:
575 drm_dev_unref(drm);
577 return ret;
580 static void etnaviv_unbind(struct device *dev)
582 struct drm_device *drm = dev_get_drvdata(dev);
583 struct etnaviv_drm_private *priv = drm->dev_private;
585 drm_dev_unregister(drm);
587 flush_workqueue(priv->wq);
588 destroy_workqueue(priv->wq);
590 component_unbind_all(dev, drm);
592 drm->dev_private = NULL;
593 kfree(priv);
595 drm_put_dev(drm);
598 static const struct component_master_ops etnaviv_master_ops = {
599 .bind = etnaviv_bind,
600 .unbind = etnaviv_unbind,
603 static int compare_of(struct device *dev, void *data)
605 struct device_node *np = data;
607 return dev->of_node == np;
610 static int compare_str(struct device *dev, void *data)
612 return !strcmp(dev_name(dev), data);
615 static int etnaviv_pdev_probe(struct platform_device *pdev)
617 struct device *dev = &pdev->dev;
618 struct device_node *node = dev->of_node;
619 struct component_match *match = NULL;
621 dma_set_coherent_mask(&pdev->dev, DMA_BIT_MASK(32));
623 if (node) {
624 struct device_node *core_node;
625 int i;
627 for (i = 0; ; i++) {
628 core_node = of_parse_phandle(node, "cores", i);
629 if (!core_node)
630 break;
632 component_match_add(&pdev->dev, &match, compare_of,
633 core_node);
634 of_node_put(core_node);
636 } else if (dev->platform_data) {
637 char **names = dev->platform_data;
638 unsigned i;
640 for (i = 0; names[i]; i++)
641 component_match_add(dev, &match, compare_str, names[i]);
644 return component_master_add_with_match(dev, &etnaviv_master_ops, match);
647 static int etnaviv_pdev_remove(struct platform_device *pdev)
649 component_master_del(&pdev->dev, &etnaviv_master_ops);
651 return 0;
654 static const struct of_device_id dt_match[] = {
655 { .compatible = "fsl,imx-gpu-subsystem" },
656 { .compatible = "marvell,dove-gpu-subsystem" },
659 MODULE_DEVICE_TABLE(of, dt_match);
661 static struct platform_driver etnaviv_platform_driver = {
662 .probe = etnaviv_pdev_probe,
663 .remove = etnaviv_pdev_remove,
664 .driver = {
665 .name = "etnaviv",
666 .of_match_table = dt_match,
670 static int __init etnaviv_init(void)
672 int ret;
674 etnaviv_validate_init();
676 ret = platform_driver_register(&etnaviv_gpu_driver);
677 if (ret != 0)
678 return ret;
680 ret = platform_driver_register(&etnaviv_platform_driver);
681 if (ret != 0)
682 platform_driver_unregister(&etnaviv_gpu_driver);
684 return ret;
686 module_init(etnaviv_init);
688 static void __exit etnaviv_exit(void)
690 platform_driver_unregister(&etnaviv_gpu_driver);
691 platform_driver_unregister(&etnaviv_platform_driver);
693 module_exit(etnaviv_exit);
695 MODULE_AUTHOR("Christian Gmeiner <christian.gmeiner@gmail.com>");
696 MODULE_AUTHOR("Russell King <rmk+kernel@arm.linux.org.uk>");
697 MODULE_AUTHOR("Lucas Stach <l.stach@pengutronix.de>");
698 MODULE_DESCRIPTION("etnaviv DRM Driver");
699 MODULE_LICENSE("GPL v2");
700 MODULE_ALIAS("platform:etnaviv");