/src/sys/arch/arm/arm32/ |
arm32_kvminit.c | 333 pv_addr_t **pvp = &SLIST_FIRST(&bmi->bmi_chunks); local in function:add_pages 334 while ((*pvp) != NULL && (*pvp)->pv_va <= pv->pv_va) { 335 pv_addr_t * const pv0 = (*pvp); 354 KASSERT(pv->pv_va != (*pvp)->pv_va); 355 pvp = &SLIST_NEXT(*pvp, pv_list); 357 KASSERT((*pvp) == NULL || pv->pv_va < (*pvp)->pv_va); 362 SLIST_NEXT(new_pv, pv_list) = *pvp; [all...] |
pmap.c | 990 struct pv_entry **pvp; local in function:pmap_enter_pv 996 pvp = &SLIST_FIRST(&md->pvh_list); 1003 while (*pvp != NULL && PV_IS_KENTRY_P((*pvp)->pv_flags)) 1004 pvp = &SLIST_NEXT(*pvp, pv_link); 1007 while (*pvp != NULL && PV_IS_WRITE_P((*pvp)->pv_flags)) 1008 pvp = &SLIST_NEXT(*pvp, pv_link) 2861 struct pv_entry **pvp = &SLIST_FIRST(&md->pvh_list); local in function:pmap_page_remove 8106 pv_addr_t *pv, **pvp; local in function:pmap_boot_pagealloc [all...] |
/src/usr.sbin/npf/npfctl/ |
npf_data.c | 335 npfvar_t *pvp = npfvar_create(); local in function:npfctl_parse_port_range_variable 347 npfvar_add_elements(pvp, npfctl_parse_port_range(p, p)); 351 npfvar_add_element(pvp, NPFVAR_PORT_RANGE, pr, 356 npfvar_add_elements(pvp, npfctl_parse_port_range(p, p)); 366 npfvar_destroy(pvp); 370 return pvp;
|
/src/sys/arch/hppa/hppa/ |
pmap.c | 551 struct pv_entry *pve, *npve, **pvp; local in function:pmap_resolve_alias 555 pvp = &md->pvh_list; 625 *pvp = pve; 626 pvp = &pve->pv_next; 631 *pvp = npve; 652 *pvp = pve; 653 pvp = &pve->pv_next; 657 *pvp = NULL; 1613 struct pv_entry *pve, *npve, **pvp; local in function:pmap_page_remove 1622 pvp = &md->pvh_list [all...] |
/src/sys/uvm/pmap/ |
pmap.c | 930 pv_entry_t pvp = NULL; local in function:pmap_page_remove 946 * pvp is non-null when we already have a PV_KENTER 951 if (pvp) { 956 pvp->pv_next = pv; 962 pvp = pv; 1002 if (pvp) { 1003 KASSERT(pvp->pv_pmap == pmap_kernel()); 1004 KASSERT(pvp->pv_next == NULL);
|
/src/sys/arch/alpha/alpha/ |
pmap.c | 3347 pv_entry_t pv, *pvp; local in function:pmap_pv_remove 3360 for (pvp = (struct pv_entry **)&md->pvh_listx, pv = VM_MDPAGE_PVS(pg); 3361 pv != NULL; pvp = &pv->pv_next, pv = *pvp) 3373 *pvp = (pv_entry_t)((uintptr_t)pv->pv_next | 3374 (((uintptr_t)*pvp) & PGA_ATTRS));
|
/src/sys/arch/x86/x86/ |
pmap.c | 2054 struct pv_page *pvp = (struct pv_page *)obj; local in function:pmap_pvp_ctor 2061 LIST_INIT(&pvp->pvp_pves); 2062 pvp->pvp_nfree = PVE_PER_PVP; 2063 pvp->pvp_pmap = NULL; 2066 LIST_INSERT_HEAD(&pvp->pvp_pves, pve, pve_list); 2078 struct pv_page *pvp __diagused = obj; 2080 KASSERT(pvp->pvp_pmap == NULL); 2081 KASSERT(pvp->pvp_nfree == PVE_PER_PVP); 2091 struct pv_page *pvp; local in function:pmap_alloc_pv 2095 if (__predict_false((pvp = LIST_FIRST(&pmap->pm_pvp_part)) == NULL)) 2134 struct pv_page *pvp = (struct pv_page *)trunc_page((vaddr_t)pve); local in function:pmap_free_pv 2160 struct pv_page *pvp; local in function:pmap_drain_pv [all...] |