Home | History | Annotate | Line # | Download | only in include
lock.h revision 1.1.2.1
      1  1.1.2.1  skrll /* $NetBSD: lock.h,v 1.1.2.1 2015/04/06 15:18:01 skrll Exp $ */
      2      1.1   matt 
      3      1.1   matt /*-
      4      1.1   matt  * Copyright (c) 2014 The NetBSD Foundation, Inc.
      5      1.1   matt  * All rights reserved.
      6      1.1   matt  *
      7      1.1   matt  * This code is derived from software contributed to The NetBSD Foundation
      8      1.1   matt  * by Matt Thomas of 3am Software Foundry.
      9      1.1   matt  *
     10      1.1   matt  * Redistribution and use in source and binary forms, with or without
     11      1.1   matt  * modification, are permitted provided that the following conditions
     12      1.1   matt  * are met:
     13      1.1   matt  * 1. Redistributions of source code must retain the above copyright
     14      1.1   matt  *    notice, this list of conditions and the following disclaimer.
     15      1.1   matt  * 2. Redistributions in binary form must reproduce the above copyright
     16      1.1   matt  *    notice, this list of conditions and the following disclaimer in the
     17      1.1   matt  *    documentation and/or other materials provided with the distribution.
     18      1.1   matt  *
     19      1.1   matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20      1.1   matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21      1.1   matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22      1.1   matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23      1.1   matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24      1.1   matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25      1.1   matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26      1.1   matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27      1.1   matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28      1.1   matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29      1.1   matt  * POSSIBILITY OF SUCH DAMAGE.
     30      1.1   matt  */
     31      1.1   matt /*
     32      1.1   matt  * Machine-dependent spin lock operations.
     33      1.1   matt  */
     34      1.1   matt 
     35      1.1   matt #ifndef _RISCV_LOCK_H_
     36      1.1   matt #define	_RISCV_LOCK_H_
     37      1.1   matt 
     38      1.1   matt static __inline int
     39      1.1   matt __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
     40      1.1   matt {
     41      1.1   matt 	return *__ptr != __SIMPLELOCK_UNLOCKED;
     42      1.1   matt }
     43      1.1   matt 
     44      1.1   matt static __inline int
     45      1.1   matt __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
     46      1.1   matt {
     47      1.1   matt 	return *__ptr == __SIMPLELOCK_UNLOCKED;
     48      1.1   matt }
     49      1.1   matt 
     50      1.1   matt static __inline void
     51      1.1   matt __cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
     52      1.1   matt {
     53  1.1.2.1  skrll #if 1
     54      1.1   matt 	*__ptr = __SIMPLELOCK_UNLOCKED;
     55  1.1.2.1  skrll #else
     56  1.1.2.1  skrll 	__atomic_store_n(__ptr, __SIMPLELOCK_UNLOCKED, __ATOMIC_RELAXED);
     57      1.1   matt #endif
     58      1.1   matt }
     59      1.1   matt 
     60      1.1   matt static __inline void
     61      1.1   matt __cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
     62      1.1   matt {
     63  1.1.2.1  skrll #if 1
     64      1.1   matt 	*__ptr = __SIMPLELOCK_LOCKED;
     65  1.1.2.1  skrll #else
     66  1.1.2.1  skrll 	__atomic_store_n(__ptr, __SIMPLELOCK_LOCKED, __ATOMIC_RELAXED);
     67      1.1   matt #endif
     68      1.1   matt }
     69      1.1   matt 
     70      1.1   matt static __inline void __unused
     71      1.1   matt __cpu_simple_lock_init(__cpu_simple_lock_t *__ptr)
     72      1.1   matt {
     73  1.1.2.1  skrll #if 1
     74      1.1   matt 	*__ptr = __SIMPLELOCK_UNLOCKED;
     75  1.1.2.1  skrll #else
     76  1.1.2.1  skrll 	__atomic_store_n(__ptr, __SIMPLELOCK_UNLOCKED, __ATOMIC_RELAXED);
     77      1.1   matt #endif
     78      1.1   matt }
     79      1.1   matt 
     80      1.1   matt static __inline void __unused
     81      1.1   matt __cpu_simple_lock(__cpu_simple_lock_t *__ptr)
     82      1.1   matt {
     83  1.1.2.1  skrll 	while (__atomic_exchange_n(__ptr, __SIMPLELOCK_LOCKED, __ATOMIC_ACQUIRE) == __SIMPLELOCK_LOCKED) {
     84      1.1   matt 		/* do nothing */
     85      1.1   matt 	}
     86      1.1   matt }
     87      1.1   matt 
     88      1.1   matt static __inline int __unused
     89      1.1   matt __cpu_simple_lock_try(__cpu_simple_lock_t *__ptr)
     90      1.1   matt {
     91  1.1.2.1  skrll 	return __atomic_exchange_n(__ptr, __SIMPLELOCK_LOCKED, __ATOMIC_ACQUIRE) == __SIMPLELOCK_LOCKED;
     92      1.1   matt }
     93      1.1   matt 
     94      1.1   matt static __inline void __unused
     95      1.1   matt __cpu_simple_unlock(__cpu_simple_lock_t *__ptr)
     96      1.1   matt {
     97  1.1.2.1  skrll 	__atomic_store_n(__ptr, __SIMPLELOCK_UNLOCKED, __ATOMIC_RELEASE);
     98      1.1   matt }
     99      1.1   matt 
    100      1.1   matt #endif /* _RISCV_LOCK_H_ */
    101