atomic_cas.S revision 1.4.4.2 1 1.4.4.2 matt /* $NetBSD: atomic_cas.S,v 1.4.4.2 2008/01/09 01:21:08 matt Exp $ */
2 1.4.4.2 matt
3 1.4.4.2 matt /*-
4 1.4.4.2 matt * Copyright (c) 2007 The NetBSD Foundation, Inc.
5 1.4.4.2 matt * All rights reserved.
6 1.4.4.2 matt *
7 1.4.4.2 matt * This code is derived from software contributed to The NetBSD Foundation
8 1.4.4.2 matt * by Andrew Doran and Jason R. Thorpe.
9 1.4.4.2 matt *
10 1.4.4.2 matt * Redistribution and use in source and binary forms, with or without
11 1.4.4.2 matt * modification, are permitted provided that the following conditions
12 1.4.4.2 matt * are met:
13 1.4.4.2 matt * 1. Redistributions of source code must retain the above copyright
14 1.4.4.2 matt * notice, this list of conditions and the following disclaimer.
15 1.4.4.2 matt * 2. Redistributions in binary form must reproduce the above copyright
16 1.4.4.2 matt * notice, this list of conditions and the following disclaimer in the
17 1.4.4.2 matt * documentation and/or other materials provided with the distribution.
18 1.4.4.2 matt * 3. All advertising materials mentioning features or use of this software
19 1.4.4.2 matt * must display the following acknowledgement:
20 1.4.4.2 matt * This product includes software developed by the NetBSD
21 1.4.4.2 matt * Foundation, Inc. and its contributors.
22 1.4.4.2 matt * 4. Neither the name of The NetBSD Foundation nor the names of its
23 1.4.4.2 matt * contributors may be used to endorse or promote products derived
24 1.4.4.2 matt * from this software without specific prior written permission.
25 1.4.4.2 matt *
26 1.4.4.2 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
27 1.4.4.2 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
28 1.4.4.2 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 1.4.4.2 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
30 1.4.4.2 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31 1.4.4.2 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32 1.4.4.2 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33 1.4.4.2 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34 1.4.4.2 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35 1.4.4.2 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36 1.4.4.2 matt * POSSIBILITY OF SUCH DAMAGE.
37 1.4.4.2 matt */
38 1.4.4.2 matt
39 1.4.4.2 matt #include "atomic_op_asm.h"
40 1.4.4.2 matt
41 1.4.4.2 matt #if defined(_KERNEL)
42 1.4.4.2 matt
43 1.4.4.2 matt #include <machine/psl.h>
44 1.4.4.2 matt
45 1.4.4.2 matt #include "opt_multiprocessor.h"
46 1.4.4.2 matt
47 1.4.4.2 matt #define DISABLE_INTERRUPTS \
48 1.4.4.2 matt rd %psr, %o4 /* disable interrupts */;\
49 1.4.4.2 matt or %o4, PSR_PIL, %o5 ;\
50 1.4.4.2 matt wr %o5, 0, %psr ;\
51 1.4.4.2 matt nop ;\
52 1.4.4.2 matt nop ;\
53 1.4.4.2 matt nop
54 1.4.4.2 matt
55 1.4.4.2 matt #define RESTORE_INTERRUPTS \
56 1.4.4.2 matt wr %o4, 0, %psr /* enable interrupts */ ;\
57 1.4.4.2 matt nop ;\
58 1.4.4.2 matt nop ;\
59 1.4.4.2 matt nop
60 1.4.4.2 matt
61 1.4.4.2 matt #else /* _KERNEL */
62 1.4.4.2 matt
63 1.4.4.2 matt #define MULTIPROCESSOR 1
64 1.4.4.2 matt #define DISABLE_INTERRUPTS /* nothing */
65 1.4.4.2 matt #define RESTORE_INTERRUPTS /* nothing */
66 1.4.4.2 matt
67 1.4.4.2 matt #endif /* _KERNEL */
68 1.4.4.2 matt
69 1.4.4.2 matt #if defined(MULTIPROCESSOR)
70 1.4.4.2 matt .section .bss
71 1.4.4.2 matt .align 1024
72 1.4.4.2 matt OTYPE(_C_LABEL(_atomic_cas_locktab))
73 1.4.4.2 matt _C_LABEL(_atomic_cas_locktab):
74 1.4.4.2 matt .space 1024
75 1.4.4.2 matt
76 1.4.4.2 matt #define ACQUIRE_INTERLOCK \
77 1.4.4.2 matt DISABLE_INTERRUPTS ;\
78 1.4.4.2 matt srl %o0, 3, %o5 /* get lock address */ ;\
79 1.4.4.2 matt and %o5, 1023, %o5 ;\
80 1.4.4.2 matt sethi %hi(_C_LABEL(_atomic_cas_locktab)), %o3 ;\
81 1.4.4.2 matt add %o5, %o3, %o5 ;\
82 1.4.4.2 matt ;\
83 1.4.4.2 matt /* %o5 has interlock address */ ;\
84 1.4.4.2 matt ;\
85 1.4.4.2 matt 1: ldstub [%o5], %o3 /* acquire lock */ ;\
86 1.4.4.2 matt tst %o3 ;\
87 1.4.4.2 matt bz,a 2f ;\
88 1.4.4.2 matt nop ;\
89 1.4.4.2 matt nop ;\
90 1.4.4.2 matt nop ;\
91 1.4.4.2 matt b,a 1b /* spin */ ;\
92 1.4.4.2 matt nop ;\
93 1.4.4.2 matt /* We now hold the interlock */ ;\
94 1.4.4.2 matt 2:
95 1.4.4.2 matt
96 1.4.4.2 matt #define RELEASE_INTERLOCK \
97 1.4.4.2 matt stb %g0, [%o5] /* release interlock */ ;\
98 1.4.4.2 matt RESTORE_INTERRUPTS
99 1.4.4.2 matt
100 1.4.4.2 matt #else /* ! MULTIPROCESSOR */
101 1.4.4.2 matt
102 1.4.4.2 matt #define ACQUIRE_INTERLOCK DISABLE_INTERRUPTS
103 1.4.4.2 matt
104 1.4.4.2 matt #define RELEASE_INTERLOCK RESTORE_INTERRUPTS
105 1.4.4.2 matt
106 1.4.4.2 matt #endif /* MULTIPROCESSOR */
107 1.4.4.2 matt
108 1.4.4.2 matt .text
109 1.4.4.2 matt
110 1.4.4.2 matt /*
111 1.4.4.2 matt * The v7 and v8 SPARC doesn't have compare-and-swap, so we block interrupts
112 1.4.4.2 matt * and use an interlock.
113 1.4.4.2 matt *
114 1.4.4.2 matt * XXX On single CPU systems, this should use a restartable sequence:
115 1.4.4.2 matt * XXX there we don't need the overhead of interlocking.
116 1.4.4.2 matt *
117 1.4.4.2 matt * XXX NOTE! The interlock trick only works if EVERYTHING writes to
118 1.4.4.2 matt * XXX the memory cell through this code path!
119 1.4.4.2 matt */
120 1.4.4.2 matt ENTRY_NOPROFILE(_atomic_cas_32)
121 1.4.4.2 matt ACQUIRE_INTERLOCK
122 1.4.4.2 matt ! %o4 has saved PSR value
123 1.4.4.2 matt ! %o5 has interlock address
124 1.4.4.2 matt
125 1.4.4.2 matt ld [%o0], %o3 ! get old value
126 1.4.4.2 matt cmp %o1, %o3 ! old == new?
127 1.4.4.2 matt beq,a 3f ! yes, do the store
128 1.4.4.2 matt st %o2, [%o0] ! (in the delay slot)
129 1.4.4.2 matt
130 1.4.4.2 matt 3: RELEASE_INTERLOCK
131 1.4.4.2 matt
132 1.4.4.2 matt retl
133 1.4.4.2 matt mov %o3, %o0 ! return old value
134 1.4.4.2 matt
135 1.4.4.2 matt ATOMIC_OP_ALIAS(atomic_cas_32,_atomic_cas_32)
136 1.4.4.2 matt ATOMIC_OP_ALIAS(atomic_cas_uint,_atomic_cas_32)
137 1.4.4.2 matt STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32)
138 1.4.4.2 matt ATOMIC_OP_ALIAS(atomic_cas_ulong,_atomic_cas_32)
139 1.4.4.2 matt STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_32)
140 1.4.4.2 matt ATOMIC_OP_ALIAS(atomic_cas_ptr,_atomic_cas_32)
141 1.4.4.2 matt STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_32)
142