11.20Sriastrad/*	$NetBSD: mutex.h,v 1.20 2023/07/12 12:50:13 riastradh Exp $	*/
21.1Smatt
31.1Smatt/*-
41.1Smatt * Copyright (c) 2002, 2007 The NetBSD Foundation, Inc.
51.1Smatt * All rights reserved.
61.1Smatt *
71.1Smatt * This code is derived from software contributed to The NetBSD Foundation
81.1Smatt * by Jason R. Thorpe and Andrew Doran.
91.1Smatt *
101.1Smatt * Redistribution and use in source and binary forms, with or without
111.1Smatt * modification, are permitted provided that the following conditions
121.1Smatt * are met:
131.1Smatt * 1. Redistributions of source code must retain the above copyright
141.1Smatt *    notice, this list of conditions and the following disclaimer.
151.1Smatt * 2. Redistributions in binary form must reproduce the above copyright
161.1Smatt *    notice, this list of conditions and the following disclaimer in the
171.1Smatt *    documentation and/or other materials provided with the distribution.
181.1Smatt *
191.1Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.1Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.1Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.1Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.1Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.1Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.1Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.1Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.1Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.1Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.1Smatt * POSSIBILITY OF SUCH DAMAGE.
301.1Smatt */
311.1Smatt
321.1Smatt#ifndef _VAX_MUTEX_H_
331.1Smatt#define	_VAX_MUTEX_H_
341.1Smatt
351.1Smatt/*
361.11Smatt * The VAX mutex implementation is troublesome, because VAX lacks a
371.17Sriastrad * compare-and-swap operation.
381.1Smatt *
391.11Smatt * So we emulate compare-and-swap by raising IPL to lock out interrupts
401.11Smatt * and (if MP) using BBSSI/BBCCI to lock out the other processors.
411.1Smatt */
421.1Smatt
431.20Sriastrad#include <sys/types.h>
441.20Sriastrad
451.1Smatt#ifndef __MUTEX_PRIVATE
461.1Smatt
471.1Smattstruct kmutex {
481.1Smatt	uintptr_t	mtx_pad1;
491.1Smatt};
501.1Smatt
511.1Smatt#else	/* __MUTEX_PRIVATE */
521.1Smatt
531.18Sriastrad#include <machine/intr.h>
541.18Sriastrad
551.1Smattstruct kmutex {
561.1Smatt	union {
571.11Smatt		/* Adaptive mutex */
581.11Smatt		volatile uintptr_t	mtxa_owner;	/* 0-3 */
591.11Smatt
601.11Smatt		/* Spin mutex */
611.7Smatt		struct {
621.11Smatt			volatile uint8_t	mtxs_dummy;
631.11Smatt			ipl_cookie_t		mtxs_ipl;
641.11Smatt			__cpu_simple_lock_t	mtxs_lock;
651.11Smatt			volatile uint8_t	mtxs_unused;
661.11Smatt		} s;
671.11Smatt	} u;
681.1Smatt};
691.11Smatt
701.11Smatt#define	mtx_owner		u.mtxa_owner
711.11Smatt#define	mtx_ipl			u.s.mtxs_ipl
721.11Smatt#define	mtx_lock		u.s.mtxs_lock
731.1Smatt
741.3Smatt#define	__HAVE_MUTEX_STUBS		1
751.3Smatt#define	__HAVE_SPIN_MUTEX_STUBS		1
761.11Smatt#define	__HAVE_SIMPLE_MUTEXES		1
771.11Smatt
781.1Smatt#endif	/* __MUTEX_PRIVATE */
791.1Smatt
801.1Smatt#endif /* _VAX_MUTEX_H_ */
81