Lines Matching refs:spc_lwplock
297 KASSERT(lwp_locked(l, l->l_cpu->ci_schedstate.spc_lwplock));
322 KASSERT(lwp_locked(l, l->l_cpu->ci_schedstate.spc_lwplock));
558 * is currently held. Idle LWPs are always locked by spc_lwplock,
560 * in all cases newl is locked by spc_lwplock.
572 lwp_setlock(newl, spc->spc_lwplock);
665 KASSERT(lwp_locked(l, spc->spc_lwplock));
733 * Drop spc_lwplock, if the current LWP has been moved
736 mutex_spin_exit(spc->spc_lwplock);
908 KASSERT(lwp_locked(l, l->l_cpu->ci_schedstate.spc_lwplock));
917 KASSERT(lwp_locked(l, l->l_cpu->ci_schedstate.spc_lwplock));
1073 KASSERT(lwp_locked(l, spc->spc_lwplock));
1103 KASSERT(lwp_locked(l, spc->spc_lwplock));