/src/sys/arch/mips/mips/ |
cache_tx39.c | 70 tx3900_icache_sync_range_16(register_t va, vsize_t size) 72 vaddr_t eva = round_line(va + size); 74 va = trunc_line(va); 76 if ((eva - va) >= mips_cache_info.mci_picache_size) { 78 va = MIPS_PHYS_TO_KSEG0(0); 82 tx3900_icache_do_inv_index_16(va, eva); 96 vaddr_t va = MIPS_PHYS_TO_KSEG0(0); local in function:tx3900_pdcache_wbinv_all_4 97 vaddr_t eva = va + mips_cache_info.mci_pdcache_size; 106 while (va < eva) 190 vaddr_t va = MIPS_PHYS_TO_KSEG0(0); local in function:tx3920_pdcache_wbinv_all_16wt 208 vaddr_t va = MIPS_PHYS_TO_KSEG0(0); local in function:tx3920_pdcache_wbinv_all_16wb [all...] |
cache_r10k.c | 86 vaddr_t va = MIPS_PHYS_TO_KSEG0(0); local in function:r10k_icache_sync_all 87 vaddr_t eva = va + mci->mci_picache_way_size; 93 while (va < eva) { 94 cache_op_r4k_line(va, CACHE_R4K_I|CACHEOP_R4K_INDEX_INV); 95 va++; 96 cache_op_r4k_line(va, CACHE_R4K_I|CACHEOP_R4K_INDEX_INV); 97 va += 63; 102 r10k_icache_sync_range(register_t va, vsize_t size) 104 vaddr_t eva = round_line(va + size); 106 va = trunc_line(va) 162 vaddr_t va = MIPS_PHYS_TO_KSEG0(0); local in function:r10k_pdcache_wbinv_all 248 vaddr_t va = MIPS_PHYS_TO_KSEG0(0); local in function:r10k_sdcache_wbinv_all [all...] |
cache_ls2.c | 56 ls2_icache_sync_range(register_t va, vsize_t size) 59 const vaddr_t eva = round_line(va + size); 61 va = trunc_line(va); 63 if (va + mci->mci_picache_size <= eva) { 68 for (; va + 8 * 32 <= eva; va += 8 * 32) { 69 cache_op_ls2_8line(va, CACHEOP_LS2_D_HIT_WB_INV); 70 cache_op_ls2_8line(va, CACHEOP_LS2_I_INDEX_INV); 73 for (; va < eva; va += 32) [all...] |
cache_r5k.c | 106 r5k_picache_sync_range(register_t va, vsize_t size) 109 mips_intern_dcache_sync_range(va, size); 110 mips_intern_icache_sync_range(va, size); 114 r5k_picache_sync_range_index(vaddr_t va, vsize_t size) 129 va = MIPS_PHYS_TO_KSEG0(va & way_mask); 131 eva = round_line(va + size, line_size); 132 va = trunc_line(va, line_size); 133 size = eva - va; [all...] |
cache_r3k.c | 61 vaddr_t va = MIPS_PHYS_TO_KSEG0(0); local in function:r3k_icache_sync_all 62 vaddr_t eva = va + mips_cache_info.mci_picache_size; 64 r3k_picache_do_inv(va, eva); 68 r3k_icache_sync_range(register_t va, vsize_t size) 70 vaddr_t eva = round_line(va + size); 72 va = trunc_line(va); 74 if ((eva - va) >= mips_cache_info.mci_picache_size) { 79 r3k_picache_do_inv(va, eva); 85 vaddr_t va = MIPS_PHYS_TO_KSEG0(0) local in function:r3k_pdcache_wbinv_all [all...] |
cache_r4k.c | 67 r4k_icache_sync_range_generic(register_t va, vsize_t size) 69 mips_dcache_wb_range(va, size); 71 mips_intern_icache_sync_range_index(va, size); 75 r4k_icache_sync_range_index_generic(vaddr_t va, vsize_t size) 77 mips_dcache_wbinv_range_index(va, size); 85 va = MIPS_PHYS_TO_KSEG0(va & mips_cache_info.mci_picache_way_mask); 88 mips_intern_icache_sync_range_index(va, size);
|
cache_mipsNN.c | 101 mipsNN_picache_sync_range(register_t va, vsize_t size) 104 mips_intern_dcache_sync_range(va, size); 105 mips_intern_icache_sync_range(va, size); 109 mipsNN_picache_sync_range_index(vaddr_t va, vsize_t size) 124 va = MIPS_PHYS_TO_KSEG0(va & way_mask); 126 eva = round_line(va + size, line_size); 127 va = trunc_line(va, line_size); 128 size = eva - va; [all...] |
/src/sys/arch/powerpc/booke/ |
booke_cache.c | 47 dcbf(vaddr_t va, vsize_t off) 49 __asm volatile("dcbf\t%0,%1" : : "b" (va), "r" (off)); 53 dcbst(vaddr_t va, vsize_t off) 55 __asm volatile("dcbst\t%0,%1" : : "b" (va), "r" (off)); 59 dcbi(vaddr_t va, vsize_t off) 61 __asm volatile("dcbi\t%0,%1" : : "b" (va), "r" (off)); 65 dcbz(vaddr_t va, vsize_t off) 67 __asm volatile("dcbz\t%0,%1" : : "b" (va), "r" (off)); 71 dcba(vaddr_t va, vsize_t off) 73 __asm volatile("dcba\t%0,%1" : : "b" (va), "r" (off)) [all...] |
/src/sys/arch/cobalt/stand/boot/ |
cache.c | 52 pdcache_inv(uint32_t va, u_int size) 56 eva = round_line(va + size); 57 va = trunc_line(va); 59 while (va < eva) { 60 cache_op_r4k_line(va, CACHE_R4K_D|CACHEOP_R4K_HIT_INV); 61 va += CACHELINESIZE; 66 pdcache_wb(uint32_t va, u_int size) 70 eva = round_line(va + size); 71 va = trunc_line(va) [all...] |
/src/sys/arch/vax/include/ |
pte.h | 75 #define kvtopte(va) kvtopte0((vaddr_t) (va)) 77 kvtopte0(vaddr_t va) 85 : "g"(va), "o"(*Sysmap)); 91 : "g"(va)); 96 #define kvtophys(va) kvtophys0((vaddr_t) (va)) 98 kvtophys0(vaddr_t va) 107 : "g"(va), "o"(*Sysmap) : "cc"); 114 : "g"(va) : "cc") [all...] |
/src/lib/libc/sys/ |
vadvise.c | 35 int vadvise(int va); 38 vadvise(int va) 40 __USE(va);
|
/src/sys/arch/sparc/stand/common/ |
dvma.c | 51 #define getsegmap(va) (lduha(va, ASI_SEGMAP)) 52 #define setsegmap(va, pmeg) do stha(va, ASI_SEGMAP, pmeg); while(0) 84 int va = (int)addr; local in function:dvma_mapin 86 va -= base_va; 90 if (va < 0 || va >= DVMA_MAPLEN) 91 panic("dvma_mapin: va %x (DMA base %x)", va+base_va, base_va) 105 int va = (int)addr; local in function:dvma_mapout [all...] |
/src/sys/rump/librump/rumpvfs/ |
rumpvfs_compat50.c | 86 rump_vattr50_to_vattr(const struct vattr *_va50, struct vattr *va) 90 va->va_type = va50->va_type; 91 va->va_mode = va50->va_mode; 92 va->va_nlink = va50->va_nlink; 93 va->va_uid = va50->va_uid; 94 va->va_gid = va50->va_gid; 95 va->va_fsid = (long)va50->va_fsid; 96 va->va_fileid = va50->va_fileid; 97 va->va_size = va50->va_size; 98 va->va_blocksize = va50->va_blocksize [all...] |
/src/sys/arch/sh3/sh3/ |
cache_sh4.c | 204 cache_sh4_op_line_32(vaddr_t va, vaddr_t base, uint32_t mask, uint32_t bits) 208 cca = base | (va & mask); 218 cache_sh4_op_8lines_32(vaddr_t va, vaddr_t base, uint32_t mask, uint32_t bits) 221 (base | (va & mask)); 236 vaddr_t va = 0; local in function:sh4_icache_sync_all 243 while (va < eva) { 244 cache_sh4_op_8lines_32(va, SH4_CCIA, CCIA_ENTRY_MASK, CCIA_V); 245 va += 32 * 8; 252 sh4_icache_sync_range(vaddr_t va, vsize_t sz) 255 vaddr_t eva = round_line(va + sz) 297 vaddr_t va = 0; local in function:sh4_dcache_wbinv_all 432 vaddr_t va = 0; local in function:sh4_emode_icache_sync_all 474 vaddr_t va = 0; local in function:sh4_emode_dcache_wbinv_all [all...] |
cache_sh3.c | 142 cache_sh3_op_line_16_nway(int n, vaddr_t va, uint32_t bits) 148 va &= sh_cache_entry_mask; 152 cca = (SH3_CCA | way << sh_cache_way_shift | va); 164 cache_sh3_op_8lines_16_nway(int n, vaddr_t va, uint32_t bits) 170 va &= sh_cache_entry_mask; 175 (SH3_CCA | way << sh_cache_way_shift | va); 190 vaddr_t va; local in function:sh3_cache_wbinv_all 192 for (va = 0; va < sh_cache_way_size; va += 16 * 8 [all...] |
/src/sys/arch/hpcmips/vr/ |
ite8181_vrip.c | 65 struct vrip_attach_args *va = aux; local in function:ite8181_vrip_probe 69 if (va->va_addr == VRIPIFCF_ADDR_DEFAULT) 77 if (bus_space_map(va->va_iot, va->va_addr, va->va_size, 0, &ioh)) { 81 res = ite8181_probe(va->va_iot, ioh); 82 bus_space_unmap(va->va_iot, ioh, va->va_size); 93 struct vrip_attach_args *va = aux; local in function:ite8181_vrip_attach 96 sc->sc_baseaddr = va->va_addr [all...] |
/src/sys/arch/ia64/stand/efi/libefi/ |
copy.c | 41 efi_copyin(void *src, vaddr_t va, size_t len) 44 memcpy((void *)efimd_va2pa(va), src, len); 49 efi_copyout(vaddr_t va, void *dst, size_t len) 52 memcpy(dst, (void *)efimd_va2pa(va), len); 57 efi_readin(int fd, vaddr_t va, size_t len) 60 return (read(fd, (void *)efimd_va2pa(va), len));
|
/src/sys/arch/atari/vme/ |
vme.c | 82 struct vme_attach_args *va = aux; local in function:vmeprint 84 if (va->va_iosize) 85 aprint_normal(" port 0x%x", va->va_iobase); 86 if (va->va_iosize > 1) 87 aprint_normal("-0x%x", va->va_iobase + va->va_iosize - 1); 88 if (va->va_msize) 89 aprint_normal(" iomem 0x%x", va->va_maddr); 90 if (va->va_msize > 1) 91 aprint_normal("-0x%x", va->va_maddr + va->va_msize - 1) 101 struct vme_attach_args va; local in function:vmesearch [all...] |
/src/sys/arch/mips/include/ |
cache_ls2.h | 48 * The way is encoded in the bottom 2 bits of VA. 51 #define cache_op_ls2_8line_4way(va, op) \ 72 : "r" (va), "i" (op) \ 75 #define cache_op_ls2_line_4way(va, op) \ 82 : "r" (va), "i" (op) \ 85 #define cache_op_ls2_8line(va, op) \ 94 : "r" (va), "i" (op) \ 97 #define cache_op_ls2_line(va, op) \ 103 : "r" (va), "i" (op) \
|
/src/sys/arch/sun3/sun3/ |
control.c | 53 get_pte(vaddr_t va) 55 return (get_control_word(CONTROL_ADDR_BUILD(PGMAP_BASE, va))); 59 set_pte(vaddr_t va, u_int pte) 61 set_control_word(CONTROL_ADDR_BUILD(PGMAP_BASE, va), pte); 65 get_segmap(vaddr_t va) 67 return (get_control_byte(CONTROL_ADDR_BUILD(SEGMAP_BASE, va))); 71 set_segmap(vaddr_t va, int sme) 73 set_control_byte(CONTROL_ADDR_BUILD(SEGMAP_BASE, va), sme);
|
cache.c | 66 char *va, *endva; local in function:cache_flush_page 79 va = (char *) pgva; 84 __asm volatile ("movsl %0, %1@" : : "d" (data), "a" (va)); 85 va += VAC_FLUSH_INCR; 86 } while (va < endva); 95 char *va, *endva; local in function:cache_flush_segment 108 va = (char *) sgva; 113 __asm volatile ("movsl %0, %1@" : : "d" (data), "a" (va)); 114 va += VAC_FLUSH_INCR; 115 } while (va < endva) 124 char *va, *endva; local in function:cache_flush_context 150 char *va, *endva; local in function:cache_clear_tags [all...] |
/src/sys/arch/sun68k/stand/libsa/ |
sun3.c | 105 u_int i, pa, pte, pgva, va; local in function:dev3_mapin 122 va = pgva = sun3_devmap; 130 va += (physaddr & PGOFSET); 134 printf("dev3_mapin: va=0x%x pte=0x%x\n", 135 va, sun3_get_pte(va)); 137 return ((char*)va); 183 int va = (int)addr; local in function:dvma3_mapin 186 if ((va < SA_MIN_VA) || (va >= SA_MAX_VA) 199 int va = (int)addr; local in function:dvma3_mapout [all...] |
/src/sys/arch/evbppc/evbppc/ |
evbppc_machdep.c | 107 vaddr_t taddr, va; local in function:mapiodev 127 va = taddr = uvm_km_alloc(kernel_map, len, 0, UVM_KMF_VAONLY); 129 if (va == 0) 139 return (void *)(va + off); 143 unmapiodev(vaddr_t va, vsize_t sz) 146 if (va < VM_MIN_KERNEL_ADDRESS || va > VM_MAX_KERNEL_ADDRESS) 149 sz = round_page((va & PAGE_MASK) + sz); 150 va = trunc_page(va); [all...] |
/src/sys/arch/sh3/include/ |
mmu.h | 84 #define sh_tlb_invalidate_addr(a, va) (*__sh_tlb_invalidate_addr)(a, va) 87 #define sh_tlb_update(a, va, pte) (*__sh_tlb_update)(a, va, pte) 92 #define sh_tlb_invalidate_addr(a, va) sh3_tlb_invalidate_addr(a, va) 95 #define sh_tlb_update(a, va, pte) sh3_tlb_update(a, va, pte) 100 #define sh_tlb_invalidate_addr(a, va) sh4_tlb_invalidate_addr(a, va) [all...] |
/src/sys/rump/kern/lib/libsljit/arch/aarch64/ |
cpufunc.c | 44 aarch64_icache_sync_range(vaddr_t va, vsize_t sz) 48 // (void)rumpcomp_sync_icache((void *)va, (uint64_t)sz); 50 __builtin___clear_cache((void *)va, (char *)va + sz);
|