mutex.h revision 1.17
11.17Sriastrad/*	$NetBSD: mutex.h,v 1.17 2022/04/06 22:47:57 riastradh Exp $	*/
21.1Smatt
31.1Smatt/*-
41.1Smatt * Copyright (c) 2002, 2007 The NetBSD Foundation, Inc.
51.1Smatt * All rights reserved.
61.1Smatt *
71.1Smatt * This code is derived from software contributed to The NetBSD Foundation
81.1Smatt * by Jason R. Thorpe and Andrew Doran.
91.1Smatt *
101.1Smatt * Redistribution and use in source and binary forms, with or without
111.1Smatt * modification, are permitted provided that the following conditions
121.1Smatt * are met:
131.1Smatt * 1. Redistributions of source code must retain the above copyright
141.1Smatt *    notice, this list of conditions and the following disclaimer.
151.1Smatt * 2. Redistributions in binary form must reproduce the above copyright
161.1Smatt *    notice, this list of conditions and the following disclaimer in the
171.1Smatt *    documentation and/or other materials provided with the distribution.
181.1Smatt *
191.1Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.1Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.1Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.1Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.1Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.1Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.1Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.1Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.1Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.1Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.1Smatt * POSSIBILITY OF SUCH DAMAGE.
301.1Smatt */
311.1Smatt
321.1Smatt#ifndef _VAX_MUTEX_H_
331.1Smatt#define	_VAX_MUTEX_H_
341.1Smatt
351.1Smatt/*
361.11Smatt * The VAX mutex implementation is troublesome, because VAX lacks a
371.17Sriastrad * compare-and-swap operation.
381.1Smatt *
391.11Smatt * So we emulate compare-and-swap by raising IPL to lock out interrupts
401.11Smatt * and (if MP) using BBSSI/BBCCI to lock out the other processors.
411.1Smatt */
421.1Smatt
431.1Smatt#ifndef __MUTEX_PRIVATE
441.1Smatt
451.1Smattstruct kmutex {
461.1Smatt	uintptr_t	mtx_pad1;
471.1Smatt};
481.1Smatt
491.1Smatt#else	/* __MUTEX_PRIVATE */
501.1Smatt
511.1Smattstruct kmutex {
521.1Smatt	union {
531.11Smatt		/* Adaptive mutex */
541.11Smatt		volatile uintptr_t	mtxa_owner;	/* 0-3 */
551.11Smatt
561.11Smatt		/* Spin mutex */
571.7Smatt		struct {
581.11Smatt			volatile uint8_t	mtxs_dummy;
591.11Smatt			ipl_cookie_t		mtxs_ipl;
601.11Smatt			__cpu_simple_lock_t	mtxs_lock;
611.11Smatt			volatile uint8_t	mtxs_unused;
621.11Smatt		} s;
631.11Smatt	} u;
641.1Smatt};
651.11Smatt
661.11Smatt#define	mtx_owner		u.mtxa_owner
671.11Smatt#define	mtx_ipl			u.s.mtxs_ipl
681.11Smatt#define	mtx_lock		u.s.mtxs_lock
691.1Smatt
701.3Smatt#define	__HAVE_MUTEX_STUBS		1
711.3Smatt#define	__HAVE_SPIN_MUTEX_STUBS		1
721.11Smatt#define	__HAVE_SIMPLE_MUTEXES		1
731.11Smatt
741.1Smatt#endif	/* __MUTEX_PRIVATE */
751.1Smatt
761.1Smatt#endif /* _VAX_MUTEX_H_ */
77