lock.h revision 1.13
11.13Schristos/*	$NetBSD: lock.h,v 1.13 2017/09/17 00:01:08 christos Exp $	*/
21.1Sthorpej
31.1Sthorpej/*-
41.9Sad * Copyright (c) 2000, 2007 The NetBSD Foundation, Inc.
51.1Sthorpej * All rights reserved.
61.1Sthorpej *
71.1Sthorpej * This code is derived from software contributed to The NetBSD Foundation
81.9Sad * by Jason R. Thorpe and Andrew Doran.
91.1Sthorpej *
101.1Sthorpej * Redistribution and use in source and binary forms, with or without
111.1Sthorpej * modification, are permitted provided that the following conditions
121.1Sthorpej * are met:
131.1Sthorpej * 1. Redistributions of source code must retain the above copyright
141.1Sthorpej *    notice, this list of conditions and the following disclaimer.
151.1Sthorpej * 2. Redistributions in binary form must reproduce the above copyright
161.1Sthorpej *    notice, this list of conditions and the following disclaimer in the
171.1Sthorpej *    documentation and/or other materials provided with the distribution.
181.1Sthorpej *
191.1Sthorpej * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.1Sthorpej * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.1Sthorpej * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.1Sthorpej * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.1Sthorpej * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.1Sthorpej * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.1Sthorpej * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.1Sthorpej * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.1Sthorpej * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.1Sthorpej * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.1Sthorpej * POSSIBILITY OF SUCH DAMAGE.
301.1Sthorpej */
311.1Sthorpej
321.1Sthorpej/*
331.1Sthorpej * Machine-dependent spin lock operations.
341.1Sthorpej */
351.1Sthorpej
361.1Sthorpej#ifndef _POWERPC_LOCK_H_
371.3Stsubai#define _POWERPC_LOCK_H_
381.3Stsubai
391.10Sskrllstatic __inline int
401.13Schristos__SIMPLELOCK_LOCKED_P(const __cpu_simple_lock_t *__ptr)
411.10Sskrll{
421.10Sskrll	return *__ptr == __SIMPLELOCK_LOCKED;
431.10Sskrll}
441.10Sskrll
451.10Sskrllstatic __inline int
461.13Schristos__SIMPLELOCK_UNLOCKED_P(const __cpu_simple_lock_t *__ptr)
471.10Sskrll{
481.10Sskrll	return *__ptr == __SIMPLELOCK_UNLOCKED;
491.10Sskrll}
501.10Sskrll
511.10Sskrllstatic __inline void
521.10Sskrll__cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
531.10Sskrll{
541.10Sskrll	*__ptr = __SIMPLELOCK_UNLOCKED;
551.10Sskrll}
561.10Sskrll
571.10Sskrllstatic __inline void
581.10Sskrll__cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
591.10Sskrll{
601.10Sskrll	*__ptr = __SIMPLELOCK_LOCKED;
611.10Sskrll}
621.10Sskrll
631.8Sperrystatic __inline void
641.3Stsubai__cpu_simple_lock_init(__cpu_simple_lock_t *alp)
651.3Stsubai{
661.3Stsubai	*alp = __SIMPLELOCK_UNLOCKED;
671.7Sperry	__asm volatile ("sync");
681.3Stsubai}
691.3Stsubai
701.8Sperrystatic __inline void
711.3Stsubai__cpu_simple_lock(__cpu_simple_lock_t *alp)
721.3Stsubai{
731.3Stsubai	int old;
741.3Stsubai
751.7Sperry	__asm volatile ("	\
761.3Stsubai				\n\
771.3Stsubai1:	lwarx	%0,0,%1		\n\
781.3Stsubai	cmpwi	%0,%2		\n\
791.3Stsubai	beq+	3f		\n\
801.4Stsubai2:	lwzx	%0,0,%1		\n\
811.3Stsubai	cmpwi	%0,%2		\n\
821.3Stsubai	beq+	1b		\n\
831.3Stsubai	b	2b		\n\
841.3Stsubai3:	stwcx.	%3,0,%1		\n\
851.3Stsubai	bne-	1b		\n\
861.3Stsubai	isync			\n\
871.3Stsubai				\n"
881.3Stsubai	: "=&r"(old)
891.3Stsubai	: "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED)
901.3Stsubai	: "memory");
911.3Stsubai}
921.3Stsubai
931.8Sperrystatic __inline int
941.3Stsubai__cpu_simple_lock_try(__cpu_simple_lock_t *alp)
951.3Stsubai{
961.4Stsubai	int old, dummy;
971.3Stsubai
981.7Sperry	__asm volatile ("	\
991.3Stsubai				\n\
1001.3Stsubai1:	lwarx	%0,0,%1		\n\
1011.3Stsubai	cmpwi	%0,%2		\n\
1021.3Stsubai	bne	2f		\n\
1031.3Stsubai	stwcx.	%3,0,%1		\n\
1041.3Stsubai	bne-	1b		\n\
1051.4Stsubai2:	stwcx.	%3,0,%4		\n\
1061.3Stsubai	isync			\n\
1071.4Stsubai				\n"
1081.3Stsubai	: "=&r"(old)
1091.4Stsubai	: "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED),
1101.4Stsubai	  "r"(&dummy)
1111.3Stsubai	: "memory");
1121.3Stsubai
1131.3Stsubai	return (old == __SIMPLELOCK_UNLOCKED);
1141.3Stsubai}
1151.3Stsubai
1161.8Sperrystatic __inline void
1171.3Stsubai__cpu_simple_unlock(__cpu_simple_lock_t *alp)
1181.3Stsubai{
1191.7Sperry	__asm volatile ("sync");
1201.3Stsubai	*alp = __SIMPLELOCK_UNLOCKED;
1211.3Stsubai}
1221.1Sthorpej
1231.9Sadstatic __inline void
1241.9Sadmb_read(void)
1251.9Sad{
1261.9Sad	__asm volatile ("isync" ::: "memory");
1271.9Sad}
1281.9Sad
1291.9Sadstatic __inline void
1301.9Sadmb_write(void)
1311.9Sad{
1321.9Sad	__asm volatile ("sync" ::: "memory");
1331.9Sad}
1341.9Sad
1351.9Sadstatic __inline void
1361.9Sadmb_memory(void)
1371.9Sad{
1381.9Sad	__asm volatile ("sync" ::: "memory");
1391.9Sad}
1401.9Sad
1411.1Sthorpej#endif /* _POWERPC_LOCK_H_ */
142