HomeSort by: relevance | last modified time | path
    Searched defs:kva (Results 1 - 25 of 183) sorted by relevancy

1 2 3 4 5 6 7 8

  /src/sys/uvm/
uvm_io.c 58 vaddr_t baseva, endva, pageoffset, kva; local in function:uvm_io
104 error = uvm_map_extract(map, baseva, chunksz, kernel_map, &kva,
126 error = uiomove((void *) (kva + pageoffset), sz, uio);
135 uvm_unmap_remove(kernel_map, kva, kva + chunksz, &dead_entries,
uvm_io.c 58 vaddr_t baseva, endva, pageoffset, kva; local in function:uvm_io
104 error = uvm_map_extract(map, baseva, chunksz, kernel_map, &kva,
126 error = uiomove((void *) (kva + pageoffset), sz, uio);
135 uvm_unmap_remove(kernel_map, kva, kva + chunksz, &dead_entries,
uvm_io.c 58 vaddr_t baseva, endva, pageoffset, kva; local in function:uvm_io
104 error = uvm_map_extract(map, baseva, chunksz, kernel_map, &kva,
126 error = uiomove((void *) (kva + pageoffset), sz, uio);
135 uvm_unmap_remove(kernel_map, kva, kva + chunksz, &dead_entries,
  /src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
amdgpu_ih.c 72 void *kva; local in function:amdgpu_ih_ring_init
87 &kva, BUS_DMA_WAITOK|BUS_DMA_COHERENT);
93 r = -bus_dmamap_load(adev->ddev->dmat, ih->ring_map, kva, size,
96 fail3: __unused bus_dmamem_unmap(adev->ddev->dmat, kva, size);
99 ih->ring = kva;
164 void *kva = __UNVOLATILE(ih->ring); local in function:amdgpu_ih_ring_fini
166 bus_dmamem_unmap(adev->ddev->dmat, kva, size);
amdgpu_ih.c 72 void *kva; local in function:amdgpu_ih_ring_init
87 &kva, BUS_DMA_WAITOK|BUS_DMA_COHERENT);
93 r = -bus_dmamap_load(adev->ddev->dmat, ih->ring_map, kva, size,
96 fail3: __unused bus_dmamem_unmap(adev->ddev->dmat, kva, size);
99 ih->ring = kva;
164 void *kva = __UNVOLATILE(ih->ring); local in function:amdgpu_ih_ring_fini
166 bus_dmamem_unmap(adev->ddev->dmat, kva, size);
amdgpu_ih.c 72 void *kva; local in function:amdgpu_ih_ring_init
87 &kva, BUS_DMA_WAITOK|BUS_DMA_COHERENT);
93 r = -bus_dmamap_load(adev->ddev->dmat, ih->ring_map, kva, size,
96 fail3: __unused bus_dmamem_unmap(adev->ddev->dmat, kva, size);
99 ih->ring = kva;
164 void *kva = __UNVOLATILE(ih->ring); local in function:amdgpu_ih_ring_fini
166 bus_dmamem_unmap(adev->ddev->dmat, kva, size);
  /src/sys/arch/hp300/hp300/
bus_space.c 54 vaddr_t kva; local in function:bus_space_map
78 &kva);
85 physaccess((void *)kva, (void *)bpa, size, PG_RW|PG_CI);
90 *bshp = (bus_space_handle_t)(kva + offset);
119 vaddr_t kva; local in function:bus_space_unmap
134 kva = m68k_trunc_page(bsh);
147 physunaccess((void *)kva, size);
152 if (extent_free(extio_ex, kva, size,
154 printf("%s: kva 0x%lx size 0x%lx: "
bus_space.c 54 vaddr_t kva; local in function:bus_space_map
78 &kva);
85 physaccess((void *)kva, (void *)bpa, size, PG_RW|PG_CI);
90 *bshp = (bus_space_handle_t)(kva + offset);
119 vaddr_t kva; local in function:bus_space_unmap
134 kva = m68k_trunc_page(bsh);
147 physunaccess((void *)kva, size);
152 if (extent_free(extio_ex, kva, size,
154 printf("%s: kva 0x%lx size 0x%lx: "
bus_space.c 54 vaddr_t kva; local in function:bus_space_map
78 &kva);
85 physaccess((void *)kva, (void *)bpa, size, PG_RW|PG_CI);
90 *bshp = (bus_space_handle_t)(kva + offset);
119 vaddr_t kva; local in function:bus_space_unmap
134 kva = m68k_trunc_page(bsh);
147 physunaccess((void *)kva, size);
152 if (extent_free(extio_ex, kva, size,
154 printf("%s: kva 0x%lx size 0x%lx: "
  /src/sys/arch/aarch64/aarch64/
efi_machdep.c 76 /* even if TBI is disabled, AARCH64_ADDRTOP_TAG means KVA */
77 bool kva = (va & AARCH64_ADDRTOP_TAG) != 0; local in function:cpu_efirt_map_range
78 if (kva) {
93 if (kva) {
102 if (kva)
efi_machdep.c 76 /* even if TBI is disabled, AARCH64_ADDRTOP_TAG means KVA */
77 bool kva = (va & AARCH64_ADDRTOP_TAG) != 0; local in function:cpu_efirt_map_range
78 if (kva) {
93 if (kva) {
102 if (kva)
efi_machdep.c 76 /* even if TBI is disabled, AARCH64_ADDRTOP_TAG means KVA */
77 bool kva = (va & AARCH64_ADDRTOP_TAG) != 0; local in function:cpu_efirt_map_range
78 if (kva) {
93 if (kva) {
102 if (kva)
  /src/sys/arch/m68k/m68k/
vm_machdep.c 153 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
163 kva = uvm_km_alloc(phys_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
164 bp->b_data = (void *)(kva + off);
172 pmap_enter(kpmap, kva, pa, VM_PROT_READ | VM_PROT_WRITE,
175 pmap_kenter_pa(kva, pa, VM_PROT_READ | VM_PROT_WRITE, 0);
178 kva += PAGE_SIZE;
192 vaddr_t kva; local in function:vunmapbuf
198 kva = m68k_trunc_page(bp->b_data);
199 off = (vaddr_t)bp->b_data - kva;
203 pmap_remove(vm_map_pmap(phys_map), kva, kva + len)
    [all...]
vm_machdep.c 153 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
163 kva = uvm_km_alloc(phys_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
164 bp->b_data = (void *)(kva + off);
172 pmap_enter(kpmap, kva, pa, VM_PROT_READ | VM_PROT_WRITE,
175 pmap_kenter_pa(kva, pa, VM_PROT_READ | VM_PROT_WRITE, 0);
178 kva += PAGE_SIZE;
192 vaddr_t kva; local in function:vunmapbuf
198 kva = m68k_trunc_page(bp->b_data);
199 off = (vaddr_t)bp->b_data - kva;
203 pmap_remove(vm_map_pmap(phys_map), kva, kva + len)
    [all...]
vm_machdep.c 153 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
163 kva = uvm_km_alloc(phys_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
164 bp->b_data = (void *)(kva + off);
172 pmap_enter(kpmap, kva, pa, VM_PROT_READ | VM_PROT_WRITE,
175 pmap_kenter_pa(kva, pa, VM_PROT_READ | VM_PROT_WRITE, 0);
178 kva += PAGE_SIZE;
192 vaddr_t kva; local in function:vunmapbuf
198 kva = m68k_trunc_page(bp->b_data);
199 off = (vaddr_t)bp->b_data - kva;
203 pmap_remove(vm_map_pmap(phys_map), kva, kva + len)
    [all...]
  /src/sys/arch/hppa/hppa/
vm_machdep.c 208 vaddr_t uva, kva; local in function:vmapbuf
224 kva = uvm_km_alloc(phys_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
225 bp->b_data = (void *)(kva + off);
230 pmap_enter(kpmap, kva, pa,
233 kva += PAGE_SIZE;
247 vaddr_t kva; local in function:vunmapbuf
254 kva = trunc_page((vaddr_t)bp->b_data);
255 off = (vaddr_t)bp->b_data - kva;
258 pmap_remove(pmap, kva, kva + len)
    [all...]
vm_machdep.c 208 vaddr_t uva, kva; local in function:vmapbuf
224 kva = uvm_km_alloc(phys_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
225 bp->b_data = (void *)(kva + off);
230 pmap_enter(kpmap, kva, pa,
233 kva += PAGE_SIZE;
247 vaddr_t kva; local in function:vunmapbuf
254 kva = trunc_page((vaddr_t)bp->b_data);
255 off = (vaddr_t)bp->b_data - kva;
258 pmap_remove(pmap, kva, kva + len)
    [all...]
vm_machdep.c 208 vaddr_t uva, kva; local in function:vmapbuf
224 kva = uvm_km_alloc(phys_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
225 bp->b_data = (void *)(kva + off);
230 pmap_enter(kpmap, kva, pa,
233 kva += PAGE_SIZE;
247 vaddr_t kva; local in function:vunmapbuf
254 kva = trunc_page((vaddr_t)bp->b_data);
255 off = (vaddr_t)bp->b_data - kva;
258 pmap_remove(pmap, kva, kva + len)
    [all...]
  /src/sys/arch/sparc/sparc/
vm_machdep.c 85 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
100 kva = uvm_km_alloc(kernel_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
101 bp->b_data = (void *)(kva + off);
117 pmap_enter(kpmap, kva, pa,
120 kva += PAGE_SIZE;
134 vaddr_t kva; local in function:vunmapbuf
140 kva = trunc_page((vaddr_t)bp->b_data);
141 off = (vaddr_t)bp->b_data - kva;
143 pmap_remove(vm_map_pmap(kernel_map), kva, kva + len)
    [all...]
vm_machdep.c 85 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
100 kva = uvm_km_alloc(kernel_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
101 bp->b_data = (void *)(kva + off);
117 pmap_enter(kpmap, kva, pa,
120 kva += PAGE_SIZE;
134 vaddr_t kva; local in function:vunmapbuf
140 kva = trunc_page((vaddr_t)bp->b_data);
141 off = (vaddr_t)bp->b_data - kva;
143 pmap_remove(vm_map_pmap(kernel_map), kva, kva + len)
    [all...]
vm_machdep.c 85 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
100 kva = uvm_km_alloc(kernel_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
101 bp->b_data = (void *)(kva + off);
117 pmap_enter(kpmap, kva, pa,
120 kva += PAGE_SIZE;
134 vaddr_t kva; local in function:vunmapbuf
140 kva = trunc_page((vaddr_t)bp->b_data);
141 off = (vaddr_t)bp->b_data - kva;
143 pmap_remove(vm_map_pmap(kernel_map), kva, kva + len)
    [all...]
  /src/sys/arch/riscv/riscv/
vm_machdep.c 215 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
224 kva = uvm_km_alloc(phys_map, len, atop(uva) & uvmexp.colormask,
226 KASSERT((atop(kva ^ uva) & uvmexp.colormask) == 0);
228 bp->b_data = (void *)(kva + off);
234 pmap_kenter_pa(kva, pa, VM_PROT_READ | VM_PROT_WRITE,
237 kva += PAGE_SIZE;
251 vaddr_t kva; local in function:vunmapbuf
255 kva = trunc_page((vaddr_t)bp->b_data);
256 len = round_page((vaddr_t)bp->b_data - kva + len);
257 pmap_kremove(kva, len)
    [all...]
vm_machdep.c 215 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
224 kva = uvm_km_alloc(phys_map, len, atop(uva) & uvmexp.colormask,
226 KASSERT((atop(kva ^ uva) & uvmexp.colormask) == 0);
228 bp->b_data = (void *)(kva + off);
234 pmap_kenter_pa(kva, pa, VM_PROT_READ | VM_PROT_WRITE,
237 kva += PAGE_SIZE;
251 vaddr_t kva; local in function:vunmapbuf
255 kva = trunc_page((vaddr_t)bp->b_data);
256 len = round_page((vaddr_t)bp->b_data - kva + len);
257 pmap_kremove(kva, len)
    [all...]
vm_machdep.c 215 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
224 kva = uvm_km_alloc(phys_map, len, atop(uva) & uvmexp.colormask,
226 KASSERT((atop(kva ^ uva) & uvmexp.colormask) == 0);
228 bp->b_data = (void *)(kva + off);
234 pmap_kenter_pa(kva, pa, VM_PROT_READ | VM_PROT_WRITE,
237 kva += PAGE_SIZE;
251 vaddr_t kva; local in function:vunmapbuf
255 kva = trunc_page((vaddr_t)bp->b_data);
256 len = round_page((vaddr_t)bp->b_data - kva + len);
257 pmap_kremove(kva, len)
    [all...]
  /src/sys/arch/sparc64/sparc64/
vm_machdep.c 84 vaddr_t kva; /* Kernel VA (new to) */ local in function:vmapbuf
95 kva = uvm_km_alloc(kernel_map, len, 0, UVM_KMF_VAONLY | UVM_KMF_WAITVA);
96 bp->b_data = (void *)(kva + off);
103 pmap_kenter_pa(kva, pa, VM_PROT_READ | VM_PROT_WRITE, 0);
106 kva += PAGE_SIZE;
120 vaddr_t kva; local in function:vunmapbuf
126 kva = trunc_page((vaddr_t)bp->b_data);
127 off = (vaddr_t)bp->b_data - kva;
129 pmap_kremove(kva, len);
130 uvm_km_free(kernel_map, kva, len, UVM_KMF_VAONLY)
    [all...]

Completed in 41 milliseconds

1 2 3 4 5 6 7 8