2 * Copyright (C) 2014 Christian Gmeiner <christian.gmeiner@gmail.com>
4 * This program is free software; you can redistribute it and/or modify it
5 * under the terms of the GNU General Public License version 2 as published by
6 * the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful, but WITHOUT
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * You should have received a copy of the GNU General Public License along with
14 * this program. If not, see <http://www.gnu.org/licenses/>.
17 #include <linux/iommu.h>
18 #include <linux/platform_device.h>
19 #include <linux/sizes.h>
20 #include <linux/slab.h>
21 #include <linux/dma-mapping.h>
22 #include <linux/bitops.h>
24 #include "etnaviv_gpu.h"
25 #include "etnaviv_mmu.h"
26 #include "etnaviv_iommu.h"
27 #include "state_hi.xml.h"
30 #define PT_ENTRIES (PT_SIZE / sizeof(u32))
32 #define GPU_MEM_START 0x80000000
34 struct etnaviv_iommu_domain_pgtable
{
39 struct etnaviv_iommu_domain
{
40 struct iommu_domain domain
;
43 dma_addr_t bad_page_dma
;
44 struct etnaviv_iommu_domain_pgtable pgtable
;
48 static struct etnaviv_iommu_domain
*to_etnaviv_domain(struct iommu_domain
*domain
)
50 return container_of(domain
, struct etnaviv_iommu_domain
, domain
);
53 static int pgtable_alloc(struct etnaviv_iommu_domain_pgtable
*pgtable
,
56 pgtable
->pgtable
= dma_alloc_coherent(NULL
, size
, &pgtable
->paddr
, GFP_KERNEL
);
57 if (!pgtable
->pgtable
)
63 static void pgtable_free(struct etnaviv_iommu_domain_pgtable
*pgtable
,
66 dma_free_coherent(NULL
, size
, pgtable
->pgtable
, pgtable
->paddr
);
69 static u32
pgtable_read(struct etnaviv_iommu_domain_pgtable
*pgtable
,
72 /* calcuate index into page table */
73 unsigned int index
= (iova
- GPU_MEM_START
) / SZ_4K
;
76 paddr
= pgtable
->pgtable
[index
];
81 static void pgtable_write(struct etnaviv_iommu_domain_pgtable
*pgtable
,
82 unsigned long iova
, phys_addr_t paddr
)
84 /* calcuate index into page table */
85 unsigned int index
= (iova
- GPU_MEM_START
) / SZ_4K
;
87 pgtable
->pgtable
[index
] = paddr
;
90 static int __etnaviv_iommu_init(struct etnaviv_iommu_domain
*etnaviv_domain
)
95 etnaviv_domain
->bad_page_cpu
= dma_alloc_coherent(etnaviv_domain
->dev
,
97 &etnaviv_domain
->bad_page_dma
,
99 if (!etnaviv_domain
->bad_page_cpu
)
102 p
= etnaviv_domain
->bad_page_cpu
;
103 for (i
= 0; i
< SZ_4K
/ 4; i
++)
106 ret
= pgtable_alloc(&etnaviv_domain
->pgtable
, PT_SIZE
);
108 dma_free_coherent(etnaviv_domain
->dev
, SZ_4K
,
109 etnaviv_domain
->bad_page_cpu
,
110 etnaviv_domain
->bad_page_dma
);
114 for (i
= 0; i
< PT_ENTRIES
; i
++)
115 etnaviv_domain
->pgtable
.pgtable
[i
] =
116 etnaviv_domain
->bad_page_dma
;
118 spin_lock_init(&etnaviv_domain
->map_lock
);
123 static void etnaviv_domain_free(struct iommu_domain
*domain
)
125 struct etnaviv_iommu_domain
*etnaviv_domain
= to_etnaviv_domain(domain
);
127 pgtable_free(&etnaviv_domain
->pgtable
, PT_SIZE
);
129 dma_free_coherent(etnaviv_domain
->dev
, SZ_4K
,
130 etnaviv_domain
->bad_page_cpu
,
131 etnaviv_domain
->bad_page_dma
);
133 kfree(etnaviv_domain
);
136 static int etnaviv_iommuv1_map(struct iommu_domain
*domain
, unsigned long iova
,
137 phys_addr_t paddr
, size_t size
, int prot
)
139 struct etnaviv_iommu_domain
*etnaviv_domain
= to_etnaviv_domain(domain
);
144 spin_lock(&etnaviv_domain
->map_lock
);
145 pgtable_write(&etnaviv_domain
->pgtable
, iova
, paddr
);
146 spin_unlock(&etnaviv_domain
->map_lock
);
151 static size_t etnaviv_iommuv1_unmap(struct iommu_domain
*domain
,
152 unsigned long iova
, size_t size
)
154 struct etnaviv_iommu_domain
*etnaviv_domain
= to_etnaviv_domain(domain
);
159 spin_lock(&etnaviv_domain
->map_lock
);
160 pgtable_write(&etnaviv_domain
->pgtable
, iova
,
161 etnaviv_domain
->bad_page_dma
);
162 spin_unlock(&etnaviv_domain
->map_lock
);
167 static phys_addr_t
etnaviv_iommu_iova_to_phys(struct iommu_domain
*domain
,
170 struct etnaviv_iommu_domain
*etnaviv_domain
= to_etnaviv_domain(domain
);
172 return pgtable_read(&etnaviv_domain
->pgtable
, iova
);
175 static size_t etnaviv_iommuv1_dump_size(struct iommu_domain
*domain
)
180 static void etnaviv_iommuv1_dump(struct iommu_domain
*domain
, void *buf
)
182 struct etnaviv_iommu_domain
*etnaviv_domain
= to_etnaviv_domain(domain
);
184 memcpy(buf
, etnaviv_domain
->pgtable
.pgtable
, PT_SIZE
);
187 static struct etnaviv_iommu_ops etnaviv_iommu_ops
= {
189 .domain_free
= etnaviv_domain_free
,
190 .map
= etnaviv_iommuv1_map
,
191 .unmap
= etnaviv_iommuv1_unmap
,
192 .iova_to_phys
= etnaviv_iommu_iova_to_phys
,
193 .pgsize_bitmap
= SZ_4K
,
195 .dump_size
= etnaviv_iommuv1_dump_size
,
196 .dump
= etnaviv_iommuv1_dump
,
199 void etnaviv_iommuv1_restore(struct etnaviv_gpu
*gpu
)
201 struct etnaviv_iommu_domain
*etnaviv_domain
=
202 to_etnaviv_domain(gpu
->mmu
->domain
);
205 /* set base addresses */
206 gpu_write(gpu
, VIVS_MC_MEMORY_BASE_ADDR_RA
, gpu
->memory_base
);
207 gpu_write(gpu
, VIVS_MC_MEMORY_BASE_ADDR_FE
, gpu
->memory_base
);
208 gpu_write(gpu
, VIVS_MC_MEMORY_BASE_ADDR_TX
, gpu
->memory_base
);
209 gpu_write(gpu
, VIVS_MC_MEMORY_BASE_ADDR_PEZ
, gpu
->memory_base
);
210 gpu_write(gpu
, VIVS_MC_MEMORY_BASE_ADDR_PE
, gpu
->memory_base
);
212 /* set page table address in MC */
213 pgtable
= (u32
)etnaviv_domain
->pgtable
.paddr
;
215 gpu_write(gpu
, VIVS_MC_MMU_FE_PAGE_TABLE
, pgtable
);
216 gpu_write(gpu
, VIVS_MC_MMU_TX_PAGE_TABLE
, pgtable
);
217 gpu_write(gpu
, VIVS_MC_MMU_PE_PAGE_TABLE
, pgtable
);
218 gpu_write(gpu
, VIVS_MC_MMU_PEZ_PAGE_TABLE
, pgtable
);
219 gpu_write(gpu
, VIVS_MC_MMU_RA_PAGE_TABLE
, pgtable
);
222 struct iommu_domain
*etnaviv_iommuv1_domain_alloc(struct etnaviv_gpu
*gpu
)
224 struct etnaviv_iommu_domain
*etnaviv_domain
;
227 etnaviv_domain
= kzalloc(sizeof(*etnaviv_domain
), GFP_KERNEL
);
231 etnaviv_domain
->dev
= gpu
->dev
;
233 etnaviv_domain
->domain
.type
= __IOMMU_DOMAIN_PAGING
;
234 etnaviv_domain
->domain
.ops
= &etnaviv_iommu_ops
.ops
;
235 etnaviv_domain
->domain
.pgsize_bitmap
= SZ_4K
;
236 etnaviv_domain
->domain
.geometry
.aperture_start
= GPU_MEM_START
;
237 etnaviv_domain
->domain
.geometry
.aperture_end
= GPU_MEM_START
+ PT_ENTRIES
* SZ_4K
- 1;
239 ret
= __etnaviv_iommu_init(etnaviv_domain
);
243 return &etnaviv_domain
->domain
;
246 kfree(etnaviv_domain
);