lock.h revision 1.6
11.6Sthorpej/* $NetBSD: lock.h,v 1.6 2000/05/02 04:41:06 thorpej Exp $ */ 21.1Sthorpej 31.1Sthorpej/*- 41.3Sthorpej * Copyright (c) 1998, 1999 The NetBSD Foundation, Inc. 51.1Sthorpej * All rights reserved. 61.1Sthorpej * 71.1Sthorpej * This code is derived from software contributed to The NetBSD Foundation 81.1Sthorpej * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility, 91.1Sthorpej * NASA Ames Research Center. 101.1Sthorpej * 111.1Sthorpej * Redistribution and use in source and binary forms, with or without 121.1Sthorpej * modification, are permitted provided that the following conditions 131.1Sthorpej * are met: 141.1Sthorpej * 1. Redistributions of source code must retain the above copyright 151.1Sthorpej * notice, this list of conditions and the following disclaimer. 161.1Sthorpej * 2. Redistributions in binary form must reproduce the above copyright 171.1Sthorpej * notice, this list of conditions and the following disclaimer in the 181.1Sthorpej * documentation and/or other materials provided with the distribution. 191.1Sthorpej * 3. All advertising materials mentioning features or use of this software 201.1Sthorpej * must display the following acknowledgement: 211.1Sthorpej * This product includes software developed by the NetBSD 221.1Sthorpej * Foundation, Inc. and its contributors. 231.1Sthorpej * 4. Neither the name of The NetBSD Foundation nor the names of its 241.1Sthorpej * contributors may be used to endorse or promote products derived 251.1Sthorpej * from this software without specific prior written permission. 261.1Sthorpej * 271.1Sthorpej * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 281.1Sthorpej * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 291.1Sthorpej * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 301.1Sthorpej * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 311.1Sthorpej * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 321.1Sthorpej * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 331.1Sthorpej * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 341.1Sthorpej * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 351.1Sthorpej * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 361.1Sthorpej * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 371.1Sthorpej * POSSIBILITY OF SUCH DAMAGE. 381.1Sthorpej */ 391.1Sthorpej 401.1Sthorpej/* 411.1Sthorpej * Machine-dependent spin lock operations. 421.1Sthorpej */ 431.1Sthorpej 441.4Sthorpej#ifndef _ALPHA_LOCK_H_ 451.4Sthorpej#define _ALPHA_LOCK_H_ 461.4Sthorpej 471.6Sthorpejtypedef __volatile int __cpu_simple_lock_t; 481.6Sthorpej 491.5Sthorpej#define __SIMPLELOCK_LOCKED 1 501.5Sthorpej#define __SIMPLELOCK_UNLOCKED 0 511.5Sthorpej 521.6Sthorpejstatic __inline void __cpu_simple_lock_init __P((__cpu_simple_lock_t *)) 531.4Sthorpej __attribute__((__unused__)); 541.6Sthorpejstatic __inline void __cpu_simple_lock __P((__cpu_simple_lock_t *)) 551.4Sthorpej __attribute__((__unused__)); 561.6Sthorpejstatic __inline int __cpu_simple_lock_try __P((__cpu_simple_lock_t *)) 571.4Sthorpej __attribute__((__unused__)); 581.6Sthorpejstatic __inline void __cpu_simple_unlock __P((__cpu_simple_lock_t *)) 591.4Sthorpej __attribute__((__unused__)); 601.4Sthorpej 611.4Sthorpejstatic __inline void 621.6Sthorpej__cpu_simple_lock_init(__cpu_simple_lock_t *alp) 631.4Sthorpej{ 641.4Sthorpej 651.4Sthorpej __asm __volatile( 661.5Sthorpej "# BEGIN __cpu_simple_lock_init\n" 671.4Sthorpej " stl $31, %0 \n" 681.4Sthorpej " mb \n" 691.5Sthorpej " # END __cpu_simple_lock_init" 701.5Sthorpej : "=m" (*alp)); 711.4Sthorpej} 721.4Sthorpej 731.4Sthorpejstatic __inline void 741.6Sthorpej__cpu_simple_lock(__cpu_simple_lock_t *alp) 751.4Sthorpej{ 761.4Sthorpej unsigned long t0; 771.4Sthorpej 781.4Sthorpej /* 791.4Sthorpej * Note, if we detect that the lock is held when 801.4Sthorpej * we do the initial load-locked, we spin using 811.4Sthorpej * a non-locked load to save the coherency logic 821.4Sthorpej * some work. 831.4Sthorpej */ 841.4Sthorpej 851.4Sthorpej __asm __volatile( 861.5Sthorpej "# BEGIN __cpu_simple_lock\n" 871.4Sthorpej "1: ldl_l %0, %3 \n" 881.4Sthorpej " bne %0, 2f \n" 891.4Sthorpej " bis $31, %2, %0 \n" 901.4Sthorpej " stl_c %0, %1 \n" 911.4Sthorpej " beq %0, 3f \n" 921.4Sthorpej " mb \n" 931.4Sthorpej " br 4f \n" 941.4Sthorpej "2: ldl %0, %3 \n" 951.4Sthorpej " beq %0, 1b \n" 961.4Sthorpej " br 2b \n" 971.4Sthorpej "3: br 1b \n" 981.4Sthorpej "4: \n" 991.5Sthorpej " # END __cpu_simple_lock\n" 1001.5Sthorpej : "=r" (t0), "=m" (*alp) 1011.5Sthorpej : "i" (__SIMPLELOCK_LOCKED), "1" (*alp)); 1021.4Sthorpej} 1031.4Sthorpej 1041.4Sthorpejstatic __inline int 1051.6Sthorpej__cpu_simple_lock_try(__cpu_simple_lock_t *alp) 1061.4Sthorpej{ 1071.4Sthorpej unsigned long t0, v0; 1081.4Sthorpej 1091.4Sthorpej __asm __volatile( 1101.5Sthorpej "# BEGIN __cpu_simple_lock_try\n" 1111.4Sthorpej "1: ldl_l %0, %4 \n" 1121.4Sthorpej " bne %0, 2f \n" 1131.4Sthorpej " bis $31, %3, %0 \n" 1141.4Sthorpej " stl_c %0, %2 \n" 1151.4Sthorpej " beq %0, 3f \n" 1161.4Sthorpej " mb \n" 1171.4Sthorpej " bis $31, 1, %1 \n" 1181.4Sthorpej " br 4f \n" 1191.4Sthorpej "2: bis $31, $31, %1 \n" 1201.4Sthorpej " br 4f \n" 1211.4Sthorpej "3: br 1b \n" 1221.4Sthorpej "4: \n" 1231.5Sthorpej " # END __cpu_simple_lock_try" 1241.5Sthorpej : "=r" (t0), "=r" (v0), "=m" (*alp) 1251.5Sthorpej : "i" (__SIMPLELOCK_LOCKED), "2" (*alp)); 1261.4Sthorpej 1271.4Sthorpej return (v0); 1281.4Sthorpej} 1291.4Sthorpej 1301.4Sthorpejstatic __inline void 1311.6Sthorpej__cpu_simple_unlock(__cpu_simple_lock_t *alp) 1321.4Sthorpej{ 1331.4Sthorpej 1341.4Sthorpej __asm __volatile( 1351.5Sthorpej "# BEGIN __cpu_simple_unlock\n" 1361.4Sthorpej " stl $31, %0 \n" 1371.4Sthorpej " mb \n" 1381.5Sthorpej " # END __cpu_simple_unlock" 1391.5Sthorpej : "=m" (*alp)); 1401.4Sthorpej} 1411.4Sthorpej 1421.4Sthorpej#endif /* _ALPHA_LOCK_H_ */ 143