1 // SPDX-License-Identifier: GPL-2.0+
2 /* Copyright (C) 2014-2018 Broadcom */
5 * DOC: Broadcom V3D Graphics Driver
7 * This driver supports the Broadcom V3D 3.3 and 4.1 OpenGL ES GPUs.
8 * For V3D 2.x support, see the VC4 driver.
10 * The V3D GPU includes a tiled render (composed of a bin and render
11 * pipelines), the TFU (texture formatting unit), and the CSD (compute
15 #include <linux/clk.h>
16 #include <linux/device.h>
17 #include <linux/dma-mapping.h>
19 #include <linux/module.h>
20 #include <linux/of_platform.h>
21 #include <linux/platform_device.h>
22 #include <linux/pm_runtime.h>
23 #include <linux/reset.h>
25 #include <drm/drm_drv.h>
26 #include <drm/drm_fb_cma_helper.h>
27 #include <drm/drm_fb_helper.h>
28 #include <drm/drm_managed.h>
29 #include <uapi/drm/v3d_drm.h>
34 #define DRIVER_NAME "v3d"
35 #define DRIVER_DESC "Broadcom V3D graphics"
36 #define DRIVER_DATE "20180419"
37 #define DRIVER_MAJOR 1
38 #define DRIVER_MINOR 0
39 #define DRIVER_PATCHLEVEL 0
41 static int v3d_get_param_ioctl(struct drm_device
*dev
, void *data
,
42 struct drm_file
*file_priv
)
44 struct v3d_dev
*v3d
= to_v3d_dev(dev
);
45 struct drm_v3d_get_param
*args
= data
;
47 static const u32 reg_map
[] = {
48 [DRM_V3D_PARAM_V3D_UIFCFG
] = V3D_HUB_UIFCFG
,
49 [DRM_V3D_PARAM_V3D_HUB_IDENT1
] = V3D_HUB_IDENT1
,
50 [DRM_V3D_PARAM_V3D_HUB_IDENT2
] = V3D_HUB_IDENT2
,
51 [DRM_V3D_PARAM_V3D_HUB_IDENT3
] = V3D_HUB_IDENT3
,
52 [DRM_V3D_PARAM_V3D_CORE0_IDENT0
] = V3D_CTL_IDENT0
,
53 [DRM_V3D_PARAM_V3D_CORE0_IDENT1
] = V3D_CTL_IDENT1
,
54 [DRM_V3D_PARAM_V3D_CORE0_IDENT2
] = V3D_CTL_IDENT2
,
60 /* Note that DRM_V3D_PARAM_V3D_CORE0_IDENT0 is 0, so we need
61 * to explicitly allow it in the "the register in our
62 * parameter map" check.
64 if (args
->param
< ARRAY_SIZE(reg_map
) &&
65 (reg_map
[args
->param
] ||
66 args
->param
== DRM_V3D_PARAM_V3D_CORE0_IDENT0
)) {
67 u32 offset
= reg_map
[args
->param
];
72 ret
= pm_runtime_get_sync(v3d
->drm
.dev
);
75 if (args
->param
>= DRM_V3D_PARAM_V3D_CORE0_IDENT0
&&
76 args
->param
<= DRM_V3D_PARAM_V3D_CORE0_IDENT2
) {
77 args
->value
= V3D_CORE_READ(0, offset
);
79 args
->value
= V3D_READ(offset
);
81 pm_runtime_mark_last_busy(v3d
->drm
.dev
);
82 pm_runtime_put_autosuspend(v3d
->drm
.dev
);
87 switch (args
->param
) {
88 case DRM_V3D_PARAM_SUPPORTS_TFU
:
91 case DRM_V3D_PARAM_SUPPORTS_CSD
:
92 args
->value
= v3d_has_csd(v3d
);
94 case DRM_V3D_PARAM_SUPPORTS_CACHE_FLUSH
:
98 DRM_DEBUG("Unknown parameter %d\n", args
->param
);
104 v3d_open(struct drm_device
*dev
, struct drm_file
*file
)
106 struct v3d_dev
*v3d
= to_v3d_dev(dev
);
107 struct v3d_file_priv
*v3d_priv
;
108 struct drm_gpu_scheduler
*sched
;
111 v3d_priv
= kzalloc(sizeof(*v3d_priv
), GFP_KERNEL
);
117 for (i
= 0; i
< V3D_MAX_QUEUES
; i
++) {
118 sched
= &v3d
->queue
[i
].sched
;
119 drm_sched_entity_init(&v3d_priv
->sched_entity
[i
],
120 DRM_SCHED_PRIORITY_NORMAL
, &sched
,
124 file
->driver_priv
= v3d_priv
;
130 v3d_postclose(struct drm_device
*dev
, struct drm_file
*file
)
132 struct v3d_file_priv
*v3d_priv
= file
->driver_priv
;
135 for (q
= 0; q
< V3D_MAX_QUEUES
; q
++) {
136 drm_sched_entity_destroy(&v3d_priv
->sched_entity
[q
]);
142 DEFINE_DRM_GEM_FOPS(v3d_drm_fops
);
144 /* DRM_AUTH is required on SUBMIT_CL for now, while we don't have GMP
145 * protection between clients. Note that render nodes would be be
146 * able to submit CLs that could access BOs from clients authenticated
147 * with the master node. The TFU doesn't use the GMP, so it would
148 * need to stay DRM_AUTH until we do buffer size/offset validation.
150 static const struct drm_ioctl_desc v3d_drm_ioctls
[] = {
151 DRM_IOCTL_DEF_DRV(V3D_SUBMIT_CL
, v3d_submit_cl_ioctl
, DRM_RENDER_ALLOW
| DRM_AUTH
),
152 DRM_IOCTL_DEF_DRV(V3D_WAIT_BO
, v3d_wait_bo_ioctl
, DRM_RENDER_ALLOW
),
153 DRM_IOCTL_DEF_DRV(V3D_CREATE_BO
, v3d_create_bo_ioctl
, DRM_RENDER_ALLOW
),
154 DRM_IOCTL_DEF_DRV(V3D_MMAP_BO
, v3d_mmap_bo_ioctl
, DRM_RENDER_ALLOW
),
155 DRM_IOCTL_DEF_DRV(V3D_GET_PARAM
, v3d_get_param_ioctl
, DRM_RENDER_ALLOW
),
156 DRM_IOCTL_DEF_DRV(V3D_GET_BO_OFFSET
, v3d_get_bo_offset_ioctl
, DRM_RENDER_ALLOW
),
157 DRM_IOCTL_DEF_DRV(V3D_SUBMIT_TFU
, v3d_submit_tfu_ioctl
, DRM_RENDER_ALLOW
| DRM_AUTH
),
158 DRM_IOCTL_DEF_DRV(V3D_SUBMIT_CSD
, v3d_submit_csd_ioctl
, DRM_RENDER_ALLOW
| DRM_AUTH
),
161 static const struct drm_driver v3d_drm_driver
= {
162 .driver_features
= (DRIVER_GEM
|
167 .postclose
= v3d_postclose
,
169 #if defined(CONFIG_DEBUG_FS)
170 .debugfs_init
= v3d_debugfs_init
,
173 .gem_create_object
= v3d_create_object
,
174 .prime_handle_to_fd
= drm_gem_prime_handle_to_fd
,
175 .prime_fd_to_handle
= drm_gem_prime_fd_to_handle
,
176 .gem_prime_import_sg_table
= v3d_prime_import_sg_table
,
177 .gem_prime_mmap
= drm_gem_prime_mmap
,
179 .ioctls
= v3d_drm_ioctls
,
180 .num_ioctls
= ARRAY_SIZE(v3d_drm_ioctls
),
181 .fops
= &v3d_drm_fops
,
186 .major
= DRIVER_MAJOR
,
187 .minor
= DRIVER_MINOR
,
188 .patchlevel
= DRIVER_PATCHLEVEL
,
191 static const struct of_device_id v3d_of_match
[] = {
192 { .compatible
= "brcm,7268-v3d" },
193 { .compatible
= "brcm,7278-v3d" },
196 MODULE_DEVICE_TABLE(of
, v3d_of_match
);
199 map_regs(struct v3d_dev
*v3d
, void __iomem
**regs
, const char *name
)
201 struct resource
*res
=
202 platform_get_resource_byname(v3d_to_pdev(v3d
), IORESOURCE_MEM
, name
);
204 *regs
= devm_ioremap_resource(v3d
->drm
.dev
, res
);
205 return PTR_ERR_OR_ZERO(*regs
);
208 static int v3d_platform_drm_probe(struct platform_device
*pdev
)
210 struct device
*dev
= &pdev
->dev
;
211 struct drm_device
*drm
;
218 v3d
= devm_drm_dev_alloc(dev
, &v3d_drm_driver
, struct v3d_dev
, drm
);
224 platform_set_drvdata(pdev
, drm
);
226 ret
= map_regs(v3d
, &v3d
->hub_regs
, "hub");
230 ret
= map_regs(v3d
, &v3d
->core_regs
[0], "core0");
234 mmu_debug
= V3D_READ(V3D_MMU_DEBUG_INFO
);
235 dev
->coherent_dma_mask
=
236 DMA_BIT_MASK(30 + V3D_GET_FIELD(mmu_debug
, V3D_MMU_PA_WIDTH
));
237 v3d
->va_width
= 30 + V3D_GET_FIELD(mmu_debug
, V3D_MMU_VA_WIDTH
);
239 ident1
= V3D_READ(V3D_HUB_IDENT1
);
240 v3d
->ver
= (V3D_GET_FIELD(ident1
, V3D_HUB_IDENT1_TVER
) * 10 +
241 V3D_GET_FIELD(ident1
, V3D_HUB_IDENT1_REV
));
242 v3d
->cores
= V3D_GET_FIELD(ident1
, V3D_HUB_IDENT1_NCORES
);
243 WARN_ON(v3d
->cores
> 1); /* multicore not yet implemented */
245 v3d
->reset
= devm_reset_control_get_exclusive(dev
, NULL
);
246 if (IS_ERR(v3d
->reset
)) {
247 ret
= PTR_ERR(v3d
->reset
);
249 if (ret
== -EPROBE_DEFER
)
253 ret
= map_regs(v3d
, &v3d
->bridge_regs
, "bridge");
256 "Failed to get reset control or bridge regs\n");
262 ret
= map_regs(v3d
, &v3d
->gca_regs
, "gca");
267 v3d
->mmu_scratch
= dma_alloc_wc(dev
, 4096, &v3d
->mmu_scratch_paddr
,
268 GFP_KERNEL
| __GFP_NOWARN
| __GFP_ZERO
);
269 if (!v3d
->mmu_scratch
) {
270 dev_err(dev
, "Failed to allocate MMU scratch page\n");
274 pm_runtime_use_autosuspend(dev
);
275 pm_runtime_set_autosuspend_delay(dev
, 50);
276 pm_runtime_enable(dev
);
278 ret
= v3d_gem_init(drm
);
282 ret
= v3d_irq_init(v3d
);
286 ret
= drm_dev_register(drm
, 0);
293 v3d_irq_disable(v3d
);
295 v3d_gem_destroy(drm
);
297 dma_free_wc(dev
, 4096, v3d
->mmu_scratch
, v3d
->mmu_scratch_paddr
);
301 static int v3d_platform_drm_remove(struct platform_device
*pdev
)
303 struct drm_device
*drm
= platform_get_drvdata(pdev
);
304 struct v3d_dev
*v3d
= to_v3d_dev(drm
);
306 drm_dev_unregister(drm
);
308 v3d_gem_destroy(drm
);
310 dma_free_wc(v3d
->drm
.dev
, 4096, v3d
->mmu_scratch
,
311 v3d
->mmu_scratch_paddr
);
316 static struct platform_driver v3d_platform_driver
= {
317 .probe
= v3d_platform_drm_probe
,
318 .remove
= v3d_platform_drm_remove
,
321 .of_match_table
= v3d_of_match
,
325 module_platform_driver(v3d_platform_driver
);
327 MODULE_ALIAS("platform:v3d-drm");
328 MODULE_DESCRIPTION("Broadcom V3D DRM Driver");
329 MODULE_AUTHOR("Eric Anholt <eric@anholt.net>");
330 MODULE_LICENSE("GPL v2");