/src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/subdev/mmu/ |
nouveau_nvkm_subdev_mmu_vmmnv44.c | 33 dma_addr_t *list, u32 ptei, u32 ptes) 35 u32 pteo = (ptei << 2) & ~0x0000000f; 45 switch (ptei++ & 0x3) { 79 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 83 if (ptei & 3) { 84 const u32 pten = min(ptes, 4 - (ptei & 3)); 87 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); 88 ptei += pten; 95 VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); 96 VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22) [all...] |
nouveau_nvkm_subdev_mmu_vmmnv41.c | 33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 37 VMM_WO032(pt, vmm, ptei++ * 4, data); 45 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 47 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); 53 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 59 VMM_WO032(pt, vmm, ptei++ * 4, data); 63 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); 69 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 71 VMM_FO032(pt, vmm, ptei * 4, 0, ptes);
|
nouveau_nvkm_subdev_mmu_vmmnv04.c | 34 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 38 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); 46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 48 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); 54 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 59 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); 62 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); 68 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 70 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes);
|
nouveau_nvkm_subdev_mmu_vmmgp100.c | 40 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 47 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); 48 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); 59 ptei++; 66 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 71 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); 72 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); 75 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); 78 ptei++; 86 u32 ptei, u32 ptes, struct nvkm_vmm_map *map [all...] |
nouveau_nvkm_subdev_mmu_vmmgk104.c | 31 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 34 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes);
|
nouveau_nvkm_subdev_mmu_vmmgf100.c | 40 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 51 VMM_WO064(pt, vmm, ptei++ * 8, data); 58 VMM_WO064(pt, vmm, ptei++ * 8, data); 67 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 69 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); 75 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 78 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); 82 VMM_WO064(pt, vmm, ptei++ * 8, data); 89 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); 94 u32 ptei, u32 ptes, struct nvkm_vmm_map *map [all...] |
nouveau_nvkm_subdev_mmu_vmmnv50.c | 38 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 49 if (ptes >= pten && IS_ALIGNED(ptei, pten)) 58 VMM_WO064(pt, vmm, ptei++ * 8, data); 65 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 67 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); 73 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 76 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); 80 VMM_WO064(pt, vmm, ptei++ * 8, data); 87 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); 92 u32 ptei, u32 ptes, struct nvkm_vmm_map *map [all...] |
nouveau_nvkm_subdev_mmu_vmm.c | 205 const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) 211 u32 spti = ptei & (sptn - 1), lpti, pteb; 216 for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) { 226 for (ptei = pteb = ptei >> sptb; ptei < lpti; pteb = ptei) { 229 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) 430 u32 pteb, ptei, ptes; local in function:nvkm_vmm_ref_hwpt 542 const u32 ptei = it.pte[0]; local in function:nvkm_vmm_iter [all...] |
nouveau_nvkm_subdev_mmu_vmmgm200.c | 34 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 37 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes);
|
vmm.h | 56 struct nvkm_mmu_pt *, u32 ptei, u32 ptes); 60 u32 ptei, u32 ptes, struct nvkm_vmm_map *); 74 bool (*pfn_clear)(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32 ptei, u32 ptes); 290 #define VMM_MAP_ITER(VMM,PT,PTEI,PTEN,MAP,FILL,BASE,SIZE,NEXT) do { \ 304 VMM_SPAM(VMM, "ITER %08x %08x PTE(s)", PTEI, (u32)_ptes); \ 306 FILL(VMM, PT, PTEI, _ptes, MAP, _addr); \ 307 PTEI += _ptes; \ 313 #define VMM_MAP_ITER_MEM(VMM,PT,PTEI,PTEN,MAP,FILL) \ 314 VMM_MAP_ITER(VMM,PT,PTEI,PTEN,MAP,FILL, \ 318 #define VMM_MAP_ITER_DMA(VMM,PT,PTEI,PTEN,MAP,FILL) [all...] |
/src/lib/libkvm/ |
kvm_hppa.c | 138 pte_pa = (pde & PG_FRAME) + (ptei(va) * sizeof(pt_entry_t));
|