1 /* SPDX-License-Identifier: MIT */
4 #include <core/subdev.h>
11 bool mapref
:1; /* PTs (de)referenced on (un)map (vs pre-allocated). */
12 bool sparse
:1; /* Unmapped PDEs/PTEs will not trigger MMU faults. */
13 #define NVKM_VMA_PAGE_NONE 7
14 u8 page
:3; /* Requested page type (index, or NONE for automatic). */
15 u8 refd
:3; /* Current page type (index, or NONE for unreferenced). */
16 bool used
:1; /* Region allocated. */
17 bool part
:1; /* Region was split from an allocated region by map(). */
18 bool user
:1; /* Region user-allocated. */
19 bool busy
:1; /* Region busy (for temporarily preventing user access). */
20 bool mapped
:1; /* Region contains valid pages. */
21 struct nvkm_memory
*memory
; /* Memory currently mapped into VMA. */
22 struct nvkm_tags
*tags
; /* Compression tag reference. */
26 const struct nvkm_vmm_func
*func
;
36 struct nvkm_vmm_pt
*pd
;
37 struct list_head join
;
39 struct list_head list
;
44 atomic_t engref
[NVKM_SUBDEV_NR
];
52 int nvkm_vmm_new(struct nvkm_device
*, u64 addr
, u64 size
, void *argv
, u32 argc
,
53 struct lock_class_key
*, const char *name
, struct nvkm_vmm
**);
54 struct nvkm_vmm
*nvkm_vmm_ref(struct nvkm_vmm
*);
55 void nvkm_vmm_unref(struct nvkm_vmm
**);
56 int nvkm_vmm_boot(struct nvkm_vmm
*);
57 int nvkm_vmm_join(struct nvkm_vmm
*, struct nvkm_memory
*inst
);
58 void nvkm_vmm_part(struct nvkm_vmm
*, struct nvkm_memory
*inst
);
59 int nvkm_vmm_get(struct nvkm_vmm
*, u8 page
, u64 size
, struct nvkm_vma
**);
60 void nvkm_vmm_put(struct nvkm_vmm
*, struct nvkm_vma
**);
63 struct nvkm_memory
*memory
;
66 struct nvkm_mm_node
*mem
;
67 struct scatterlist
*sgl
;
72 const struct nvkm_vmm_page
*page
;
74 struct nvkm_tags
*tags
;
80 int nvkm_vmm_map(struct nvkm_vmm
*, struct nvkm_vma
*, void *argv
, u32 argc
,
81 struct nvkm_vmm_map
*);
82 void nvkm_vmm_unmap(struct nvkm_vmm
*, struct nvkm_vma
*);
84 struct nvkm_memory
*nvkm_umem_search(struct nvkm_client
*, u64
);
85 struct nvkm_vmm
*nvkm_uvmm_search(struct nvkm_client
*, u64 handle
);
88 const struct nvkm_mmu_func
*func
;
89 struct nvkm_subdev subdev
;
95 #define NVKM_MEM_VRAM 0x01
96 #define NVKM_MEM_HOST 0x02
97 #define NVKM_MEM_COMP 0x04
98 #define NVKM_MEM_DISP 0x08
105 #define NVKM_MEM_KIND 0x10
106 #define NVKM_MEM_MAPPABLE 0x20
107 #define NVKM_MEM_COHERENT 0x40
108 #define NVKM_MEM_UNCACHED 0x80
113 struct nvkm_vmm
*vmm
;
117 struct list_head list
;
120 struct nvkm_device_oclass user
;
123 int nv04_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
124 int nv41_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
125 int nv44_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
126 int nv50_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
127 int g84_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
128 int mcp77_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
129 int gf100_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
130 int gk104_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
131 int gk20a_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
132 int gm200_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
133 int gm20b_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
134 int gp100_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
135 int gp10b_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
136 int gv100_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);
137 int tu102_mmu_new(struct nvkm_device
*, int, struct nvkm_mmu
**);