| /src/sys/arch/vax/include/ |
| macros.h | 338 struct lwp *prevlwp; local in function:cpu_switchto 345 : "=g"(prevlwp) 348 return prevlwp;
|
| macros.h | 338 struct lwp *prevlwp; local in function:cpu_switchto 345 : "=g"(prevlwp) 348 return prevlwp;
|
| /src/sys/kern/ |
| kern_synch.c | 728 struct lwp *prevlwp; local in function:mi_switch 811 prevlwp = cpu_switchto(l, newl, returning); 816 KASSERTMSG(l == curlwp, "l %p curlwp %p prevlwp %p", 817 l, curlwp, prevlwp); 818 KASSERT(prevlwp != NULL); 833 KASSERT((prevlwp->l_pflag & LP_RUNNING) != 0); 834 lock = prevlwp->l_mutex; 835 if (__predict_false(prevlwp->l_stat == LSZOMB)) { 836 atomic_store_release(&prevlwp->l_pflag, 837 prevlwp->l_pflag & ~LP_RUNNING) [all...] |
| kern_synch.c | 728 struct lwp *prevlwp; local in function:mi_switch 811 prevlwp = cpu_switchto(l, newl, returning); 816 KASSERTMSG(l == curlwp, "l %p curlwp %p prevlwp %p", 817 l, curlwp, prevlwp); 818 KASSERT(prevlwp != NULL); 833 KASSERT((prevlwp->l_pflag & LP_RUNNING) != 0); 834 lock = prevlwp->l_mutex; 835 if (__predict_false(prevlwp->l_stat == LSZOMB)) { 836 atomic_store_release(&prevlwp->l_pflag, 837 prevlwp->l_pflag & ~LP_RUNNING) [all...] |