Home | History | Annotate | Line # | Download | only in include
lock.h revision 1.1
      1  1.1  matt /* $NetBSD: lock.h,v 1.1 2014/08/10 05:47:38 matt Exp $ */
      2  1.1  matt 
      3  1.1  matt /*-
      4  1.1  matt  * Copyright (c) 2014 The NetBSD Foundation, Inc.
      5  1.1  matt  * All rights reserved.
      6  1.1  matt  *
      7  1.1  matt  * This code is derived from software contributed to The NetBSD Foundation
      8  1.1  matt  * by Matt Thomas of 3am Software Foundry.
      9  1.1  matt  *
     10  1.1  matt  * Redistribution and use in source and binary forms, with or without
     11  1.1  matt  * modification, are permitted provided that the following conditions
     12  1.1  matt  * are met:
     13  1.1  matt  * 1. Redistributions of source code must retain the above copyright
     14  1.1  matt  *    notice, this list of conditions and the following disclaimer.
     15  1.1  matt  * 2. Redistributions in binary form must reproduce the above copyright
     16  1.1  matt  *    notice, this list of conditions and the following disclaimer in the
     17  1.1  matt  *    documentation and/or other materials provided with the distribution.
     18  1.1  matt  *
     19  1.1  matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20  1.1  matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21  1.1  matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22  1.1  matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23  1.1  matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24  1.1  matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25  1.1  matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26  1.1  matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27  1.1  matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28  1.1  matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29  1.1  matt  * POSSIBILITY OF SUCH DAMAGE.
     30  1.1  matt  */
     31  1.1  matt /*
     32  1.1  matt  * Machine-dependent spin lock operations.
     33  1.1  matt  */
     34  1.1  matt 
     35  1.1  matt #ifndef _AARCH64_LOCK_H_
     36  1.1  matt #define	_AARCH64_LOCK_H_
     37  1.1  matt 
     38  1.1  matt #ifdef __aarch64__
     39  1.1  matt 
     40  1.1  matt static __inline int
     41  1.1  matt __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
     42  1.1  matt {
     43  1.1  matt 	return *__ptr != __SIMPLELOCK_UNLOCKED;
     44  1.1  matt }
     45  1.1  matt 
     46  1.1  matt static __inline int
     47  1.1  matt __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
     48  1.1  matt {
     49  1.1  matt 	return *__ptr == __SIMPLELOCK_UNLOCKED;
     50  1.1  matt }
     51  1.1  matt 
     52  1.1  matt static __inline void
     53  1.1  matt __cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
     54  1.1  matt {
     55  1.1  matt 	__atomic_clear(__ptr, __ATOMIC_RELAXED);
     56  1.1  matt }
     57  1.1  matt 
     58  1.1  matt static __inline void
     59  1.1  matt __cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
     60  1.1  matt {
     61  1.1  matt 	(void)__atomic_test_and_set(__ptr, __ATOMIC_RELAXED);
     62  1.1  matt }
     63  1.1  matt 
     64  1.1  matt static __inline void __unused
     65  1.1  matt __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
     66  1.1  matt {
     67  1.1  matt 	__atomic_clear(alp, __ATOMIC_ACQUIRE);
     68  1.1  matt }
     69  1.1  matt 
     70  1.1  matt static __inline void __unused
     71  1.1  matt __cpu_simple_lock(__cpu_simple_lock_t *alp)
     72  1.1  matt {
     73  1.1  matt 	while (__atomic_test_and_set(alp, __ATOMIC_ACQUIRE)) {
     74  1.1  matt 		/* do nothing */
     75  1.1  matt 	}
     76  1.1  matt }
     77  1.1  matt 
     78  1.1  matt static __inline int __unused
     79  1.1  matt __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
     80  1.1  matt {
     81  1.1  matt 	return !__atomic_test_and_set(alp, __ATOMIC_ACQUIRE);
     82  1.1  matt }
     83  1.1  matt 
     84  1.1  matt static __inline void __unused
     85  1.1  matt __cpu_simple_unlock(__cpu_simple_lock_t *alp)
     86  1.1  matt {
     87  1.1  matt 	__atomic_clear(alp, __ATOMIC_RELEASE);
     88  1.1  matt }
     89  1.1  matt 
     90  1.1  matt #elif defined(__arm__)
     91  1.1  matt 
     92  1.1  matt #include <arm/lock.h>
     93  1.1  matt 
     94  1.1  matt #endif
     95  1.1  matt 
     96  1.1  matt #endif /* _AARCH64_LOCK_H_ */
     97