/src/sys/external/bsd/drm2/dist/drm/nouveau/ |
nouveau_vmm.c | 37 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); 46 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); 54 nouveau_vma_find(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm) 59 if (vma->vmm == vmm) 73 nvif_vmm_put(&vma->vmm->vmm, &tmp); 82 nouveau_vma_new(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm, [all...] |
nouveau_vmm.h | 5 #include <nvif/vmm.h> 10 struct nouveau_vmm *vmm; member in struct:nouveau_vma 29 struct nvif_vmm vmm; member in struct:nouveau_vmm
|
/src/sys/external/bsd/drm2/dist/drm/nouveau/nvif/ |
nouveau_nvif_vmm.c | 27 #include <nvif/vmm.h> 33 nvif_vmm_unmap(struct nvif_vmm *vmm, u64 addr) 35 return nvif_object_mthd(&vmm->object, NVIF_VMM_V0_UNMAP, 41 nvif_vmm_map(struct nvif_vmm *vmm, u64 addr, u64 size, void *argv, u32 argc, 62 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_MAP, 70 nvif_vmm_put(struct nvif_vmm *vmm, struct nvif_vma *vma) 73 WARN_ON(nvif_object_mthd(&vmm->object, NVIF_VMM_V0_PUT, 82 nvif_vmm_get(struct nvif_vmm *vmm, enum nvif_vmm_get type, bool sparse, 103 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_GET, 113 nvif_vmm_fini(struct nvif_vmm *vmm) [all...] |
/src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/subdev/mmu/ |
nouveau_nvkm_subdev_mmu_vmmnv44.c | 27 #include "vmm.h" 32 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 44 u32 addr = (list ? *list++ : vmm->null) >> 12; 71 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); 72 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); 73 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); 74 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); 78 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 87 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); 95 VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27) 222 struct nvkm_vmm *vmm; local in function:nv44_vmm_new [all...] |
nouveau_nvkm_subdev_mmu_vmm.c | 28 #include "vmm.h" 82 struct nvkm_vmm *vmm; member in struct:nvkm_vmm_iter 120 VMM_TRACE(_it->vmm, "%s "f, _buf, ##a); \ 136 if (it->vmm->func->flush) { 138 it->vmm->func->flush(it->vmm, it->flush); 152 struct nvkm_vmm *vmm = it->vmm; local in function:nvkm_vmm_unref_pdes 166 func->sparse(vmm, pgd->pt[0], pdei, 1); 169 func->unmap(vmm, pgd->pt[0], pdei, 1) 210 struct nvkm_vmm *vmm = it->vmm; local in function:nvkm_vmm_unref_sptes 309 struct nvkm_vmm *vmm = it->vmm; local in function:nvkm_vmm_ref_sptes 426 struct nvkm_vmm *vmm = it->vmm; local in function:nvkm_vmm_ref_hwpt 1983 struct nvkm_vmm *vmm = container_of(kref, typeof(*vmm), kref); local in function:nvkm_vmm_del 1991 struct nvkm_vmm *vmm = *pvmm; local in function:nvkm_vmm_unref 2012 struct nvkm_vmm *vmm = NULL; local in function:nvkm_vmm_new [all...] |
nouveau_nvkm_subdev_mmu_vmmnv04.c | 27 #include "vmm.h" 33 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 38 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); 45 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 48 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); 53 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 59 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); 62 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); 67 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, 70 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes) 135 struct nvkm_vmm *vmm; local in function:nv04_vmm_new [all...] |
nouveau_nvkm_subdev_mmu_vmmnv41.c | 27 #include "vmm.h" 32 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 37 VMM_WO032(pt, vmm, ptei++ * 4, data); 44 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 47 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); 52 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 59 VMM_WO032(pt, vmm, ptei++ * 4, data); 63 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); 68 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, 71 VMM_FO032(pt, vmm, ptei * 4, 0, ptes) [all...] |
nouveau_nvkm_subdev_mmu_uvmm.c | 47 return nvkm_uvmm(object)->vmm; 57 struct nvkm_vmm *vmm = uvmm->vmm; local in function:nvkm_uvmm_mthd_pfnclr 71 mutex_lock(&vmm->mutex); 72 ret = nvkm_vmm_pfn_unmap(vmm, addr, size); 73 mutex_unlock(&vmm->mutex); 86 struct nvkm_vmm *vmm = uvmm->vmm; local in function:nvkm_uvmm_mthd_pfnmap 105 mutex_lock(&vmm->mutex); 106 ret = nvkm_vmm_pfn_map(vmm, page, addr, size, phys) 120 struct nvkm_vmm *vmm = uvmm->vmm; local in function:nvkm_uvmm_mthd_unmap 164 struct nvkm_vmm *vmm = uvmm->vmm; local in function:nvkm_uvmm_mthd_map 242 struct nvkm_vmm *vmm = uvmm->vmm; local in function:nvkm_uvmm_mthd_put 280 struct nvkm_vmm *vmm = uvmm->vmm; local in function:nvkm_uvmm_mthd_get [all...] |
nouveau_nvkm_subdev_mmu_vmmgf100.c | 27 #include "vmm.h" 39 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 51 VMM_WO064(pt, vmm, ptei++ * 8, data); 58 VMM_WO064(pt, vmm, ptei++ * 8, data); 66 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 69 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); 74 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 78 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); 82 VMM_WO064(pt, vmm, ptei++ * 8, data); 89 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte) [all...] |
nouveau_nvkm_subdev_mmu_vmmgp100.c | 27 #include "vmm.h" 39 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, 42 struct device *dev = vmm->mmu->subdev.device->dev; 65 gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, 75 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); 85 gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 88 struct device *dev = vmm->mmu->subdev.device->dev; 117 VMM_WO064(pt, vmm, ptei++ * 8, data); 124 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 132 VMM_WO064(pt, vmm, ptei++ * 8, data) [all...] |
nouveau_nvkm_subdev_mmu_vmmnv50.c | 27 #include "vmm.h" 37 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 58 VMM_WO064(pt, vmm, ptei++ * 8, data); 64 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 67 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); 72 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, 76 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); 80 VMM_WO064(pt, vmm, ptei++ * 8, data); 87 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); 91 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt [all...] |
nouveau_nvkm_subdev_mmu_nv04.c | 30 #include "vmm.h" 39 .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV04}, nv04_vmm_new, true },
|
nouveau_nvkm_subdev_mmu_vmmtu102.c | 27 #include "vmm.h" 32 tu102_vmm_flush(struct nvkm_vmm *vmm, int depth) 34 struct nvkm_subdev *subdev = &vmm->mmu->subdev; 39 if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) 44 nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8);
|
uvmm.h | 7 #include "vmm.h" 11 struct nvkm_vmm *vmm; member in struct:nvkm_uvmm
|
nouveau_nvkm_subdev_mmu_vmmgm200.c | 27 #include "vmm.h" 33 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, 37 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); 60 gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, 64 VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); 103 gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) 105 if (vmm->func->page[1].shift == 16) 107 return gf100_vmm_join_(vmm, inst, base); 111 gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) 113 return gm200_vmm_join_(vmm, inst, 0) [all...] |
/src/sys/external/bsd/drm2/dist/drm/ast/ |
ast_ttm.c | 43 struct drm_vram_mm *vmm; local in function:ast_mm_init 47 vmm = drm_vram_helper_alloc_mm( 50 if (IS_ERR(vmm)) { 51 ret = PTR_ERR(vmm);
|
/src/sys/external/bsd/drm2/dist/drm/nouveau/include/nvif/ |
cl506e.h | 11 __u64 vmm; member in struct:nv50_channel_dma_v0
|
cl506f.h | 14 __u64 vmm; member in struct:nv50_channel_gpfifo_v0
|
cl826e.h | 11 __u64 vmm; member in struct:g82_channel_dma_v0
|
cl826f.h | 14 __u64 vmm; member in struct:g82_channel_gpfifo_v0
|
cl906f.h | 13 __u64 vmm; member in struct:fermi_channel_gpfifo_v0
|
cla06f.h | 14 __u64 vmm; member in struct:kepler_channel_gpfifo_a_v0
|
clc36f.h | 14 __u64 vmm; member in struct:volta_channel_gpfifo_a_v0
|
/src/sys/external/bsd/drm2/dist/drm/vboxvideo/ |
vbox_ttm.c | 20 struct drm_vram_mm *vmm; local in function:vbox_mm_init 24 vmm = drm_vram_helper_alloc_mm(dev, pci_resource_start(dev->pdev, 0), 26 if (IS_ERR(vmm)) { 27 ret = PTR_ERR(vmm);
|
/src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/subdev/bar/ |
nouveau_nvkm_subdev_bar_gf100.c | 39 return gf100_bar(base)->bar[1].vmm; 68 return gf100_bar(base)->bar[0].vmm; 108 (bar_nr == 3) ? "bar2" : "bar1", &bar_vm->vmm); 112 atomic_inc(&bar_vm->vmm->engref[NVKM_SUBDEV_BAR]); 113 bar_vm->vmm->debug = bar->base.subdev.debug; 119 ret = nvkm_vmm_boot(bar_vm->vmm); 124 return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); 158 nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); 159 nvkm_vmm_unref(&bar->bar[1].vmm); 162 nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst) [all...] |