pthread_md.h revision 1.15 1 1.15 ad /* $NetBSD: pthread_md.h,v 1.15 2008/06/23 10:39:38 ad Exp $ */
2 1.2 thorpej
3 1.2 thorpej /*-
4 1.12 ad * Copyright (c) 2001, 2007, 2008 The NetBSD Foundation, Inc.
5 1.2 thorpej * All rights reserved.
6 1.2 thorpej *
7 1.2 thorpej * This code is derived from software contributed to The NetBSD Foundation
8 1.9 ad * by Nathan J. Williams, and by Andrew Doran.
9 1.2 thorpej *
10 1.2 thorpej * Redistribution and use in source and binary forms, with or without
11 1.2 thorpej * modification, are permitted provided that the following conditions
12 1.2 thorpej * are met:
13 1.2 thorpej * 1. Redistributions of source code must retain the above copyright
14 1.2 thorpej * notice, this list of conditions and the following disclaimer.
15 1.2 thorpej * 2. Redistributions in binary form must reproduce the above copyright
16 1.2 thorpej * notice, this list of conditions and the following disclaimer in the
17 1.2 thorpej * documentation and/or other materials provided with the distribution.
18 1.2 thorpej *
19 1.2 thorpej * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 1.2 thorpej * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 1.2 thorpej * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 1.2 thorpej * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 1.2 thorpej * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 1.2 thorpej * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 1.2 thorpej * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 1.2 thorpej * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 1.2 thorpej * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 1.2 thorpej * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 1.2 thorpej * POSSIBILITY OF SUCH DAMAGE.
30 1.2 thorpej */
31 1.2 thorpej
32 1.2 thorpej #ifndef _LIB_PTHREAD_I386_MD_H
33 1.2 thorpej #define _LIB_PTHREAD_I386_MD_H
34 1.2 thorpej
35 1.2 thorpej #include <sys/ucontext.h>
36 1.7 cube #include <ucontext.h>
37 1.2 thorpej
38 1.2 thorpej extern int (*_md_getcontext_u)(ucontext_t *);
39 1.2 thorpej extern int (*_md_setcontext_u)(const ucontext_t *);
40 1.2 thorpej extern int (*_md_swapcontext_u)(ucontext_t *, const ucontext_t *);
41 1.2 thorpej
42 1.2 thorpej #define _getcontext_u(uc) (*_md_getcontext_u)((uc))
43 1.2 thorpej #define _setcontext_u(uc) (*_md_setcontext_u)((uc))
44 1.2 thorpej #define _swapcontext_u(ouc, nuc) (*_md_swapcontext_u)((ouc), (nuc))
45 1.2 thorpej
46 1.2 thorpej int _getcontext_u_s87(ucontext_t *);
47 1.2 thorpej int _setcontext_u_s87(const ucontext_t *);
48 1.2 thorpej int _swapcontext_u_s87(ucontext_t *, const ucontext_t *);
49 1.2 thorpej int _getcontext_u_xmm(ucontext_t *);
50 1.2 thorpej int _setcontext_u_xmm(const ucontext_t *);
51 1.2 thorpej int _swapcontext_u_xmm(ucontext_t *, const ucontext_t *);
52 1.2 thorpej
53 1.2 thorpej void pthread__i386_init(void);
54 1.2 thorpej
55 1.2 thorpej #define PTHREAD_MD_INIT pthread__i386_init();
56 1.2 thorpej
57 1.6 perry static inline long
58 1.2 thorpej pthread__sp(void)
59 1.2 thorpej {
60 1.2 thorpej long ret;
61 1.2 thorpej __asm("movl %%esp, %0" : "=g" (ret));
62 1.2 thorpej
63 1.2 thorpej return ret;
64 1.2 thorpej }
65 1.2 thorpej
66 1.2 thorpej #define pthread__uc_sp(ucp) ((ucp)->uc_mcontext.__gregs[_REG_UESP])
67 1.2 thorpej #define pthread__uc_pc(ucp) ((ucp)->uc_mcontext.__gregs[_REG_EIP])
68 1.2 thorpej
69 1.2 thorpej /*
70 1.2 thorpej * Set initial, sane values for registers whose values aren't just
71 1.2 thorpej * "don't care".
72 1.7 cube *
73 1.7 cube * We use the current context instead of a guessed one because we cannot
74 1.7 cube * assume how the GDT entries are ordered: what is true on i386 is not
75 1.7 cube * true anymore on amd64.
76 1.2 thorpej */
77 1.2 thorpej #define _INITCONTEXT_U_MD(ucp) \
78 1.7 cube do { \
79 1.7 cube ucontext_t ucur; \
80 1.7 cube (void)getcontext(&ucur); \
81 1.7 cube (ucp)->uc_mcontext.__gregs[_REG_GS] = \
82 1.7 cube ucur.uc_mcontext.__gregs[_REG_GS], \
83 1.7 cube (ucp)->uc_mcontext.__gregs[_REG_FS] = \
84 1.7 cube ucur.uc_mcontext.__gregs[_REG_FS], \
85 1.7 cube (ucp)->uc_mcontext.__gregs[_REG_ES] = \
86 1.7 cube ucur.uc_mcontext.__gregs[_REG_ES], \
87 1.7 cube (ucp)->uc_mcontext.__gregs[_REG_DS] = \
88 1.7 cube ucur.uc_mcontext.__gregs[_REG_DS], \
89 1.7 cube (ucp)->uc_mcontext.__gregs[_REG_CS] = \
90 1.7 cube ucur.uc_mcontext.__gregs[_REG_CS], \
91 1.7 cube (ucp)->uc_mcontext.__gregs[_REG_SS] = \
92 1.7 cube ucur.uc_mcontext.__gregs[_REG_SS], \
93 1.7 cube (ucp)->uc_mcontext.__gregs[_REG_EFL] = \
94 1.7 cube ucur.uc_mcontext.__gregs[_REG_EFL]; \
95 1.7 cube } while (/*CONSTCOND*/0);
96 1.2 thorpej
97 1.2 thorpej /*
98 1.2 thorpej * Usable stack space below the ucontext_t.
99 1.2 thorpej * See comment in pthread_switch.S about STACK_SWITCH.
100 1.2 thorpej */
101 1.2 thorpej #define STACKSPACE 32 /* room for 8 integer values */
102 1.2 thorpej
103 1.2 thorpej /*
104 1.2 thorpej * Conversions between struct reg and struct mcontext. Used by
105 1.2 thorpej * libpthread_dbg.
106 1.2 thorpej */
107 1.2 thorpej
108 1.2 thorpej #define PTHREAD_UCONTEXT_TO_REG(reg, uc) do { \
109 1.2 thorpej (reg)->r_gs = (uc)->uc_mcontext.__gregs[_REG_GS]; \
110 1.2 thorpej (reg)->r_fs = (uc)->uc_mcontext.__gregs[_REG_FS]; \
111 1.2 thorpej (reg)->r_es = (uc)->uc_mcontext.__gregs[_REG_ES]; \
112 1.2 thorpej (reg)->r_ds = (uc)->uc_mcontext.__gregs[_REG_DS]; \
113 1.2 thorpej (reg)->r_edi = (uc)->uc_mcontext.__gregs[_REG_EDI]; \
114 1.2 thorpej (reg)->r_esi = (uc)->uc_mcontext.__gregs[_REG_ESI]; \
115 1.2 thorpej (reg)->r_ebp = (uc)->uc_mcontext.__gregs[_REG_EBP]; \
116 1.2 thorpej (reg)->r_ebx = (uc)->uc_mcontext.__gregs[_REG_EBX]; \
117 1.2 thorpej (reg)->r_edx = (uc)->uc_mcontext.__gregs[_REG_EDX]; \
118 1.2 thorpej (reg)->r_ecx = (uc)->uc_mcontext.__gregs[_REG_ECX]; \
119 1.2 thorpej (reg)->r_eax = (uc)->uc_mcontext.__gregs[_REG_EAX]; \
120 1.2 thorpej (reg)->r_eip = (uc)->uc_mcontext.__gregs[_REG_EIP]; \
121 1.2 thorpej (reg)->r_cs = (uc)->uc_mcontext.__gregs[_REG_CS]; \
122 1.2 thorpej (reg)->r_eflags = (uc)->uc_mcontext.__gregs[_REG_EFL]; \
123 1.2 thorpej (reg)->r_esp = (uc)->uc_mcontext.__gregs[_REG_UESP]; \
124 1.2 thorpej (reg)->r_ss = (uc)->uc_mcontext.__gregs[_REG_SS]; \
125 1.2 thorpej } while (/*CONSTCOND*/0)
126 1.2 thorpej
127 1.2 thorpej #define PTHREAD_REG_TO_UCONTEXT(uc, reg) do { \
128 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_GS] = (reg)->r_gs; \
129 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_FS] = (reg)->r_fs; \
130 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_ES] = (reg)->r_es; \
131 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_DS] = (reg)->r_ds; \
132 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_EDI] = (reg)->r_edi; \
133 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_ESI] = (reg)->r_esi; \
134 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_EBP] = (reg)->r_ebp; \
135 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_EBX] = (reg)->r_ebx; \
136 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_EDX] = (reg)->r_edx; \
137 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_ECX] = (reg)->r_ecx; \
138 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_EAX] = (reg)->r_eax; \
139 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_EIP] = (reg)->r_eip; \
140 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_CS] = (reg)->r_cs; \
141 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_EFL] = (reg)->r_eflags; \
142 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_UESP]= (reg)->r_esp; \
143 1.2 thorpej (uc)->uc_mcontext.__gregs[_REG_SS] = (reg)->r_ss; \
144 1.4 christos /*LINTED precision loss */ \
145 1.4 christos (uc)->uc_flags = ((uc)->uc_flags | _UC_CPU) & ~_UC_USER; \
146 1.2 thorpej } while (/*CONSTCOND*/0)
147 1.2 thorpej
148 1.4 christos
149 1.2 thorpej #define PTHREAD_UCONTEXT_TO_FPREG(freg, uc) \
150 1.3 christos (void)memcpy((freg)->__data, \
151 1.2 thorpej (uc)->uc_mcontext.__fpregs.__fp_reg_set.__fpchip_state.__fp_state, \
152 1.2 thorpej sizeof(struct fpreg))
153 1.2 thorpej
154 1.2 thorpej #define PTHREAD_FPREG_TO_UCONTEXT(uc, freg) do { \
155 1.3 christos (void)memcpy( \
156 1.2 thorpej (uc)->uc_mcontext.__fpregs.__fp_reg_set.__fpchip_state.__fp_state, \
157 1.2 thorpej (freg)->__data, sizeof(struct fpreg)); \
158 1.4 christos /*LINTED precision loss */ \
159 1.4 christos (uc)->uc_flags = ((uc)->uc_flags | _UC_FPU) & ~_UC_USER; \
160 1.2 thorpej } while (/*CONSTCOND*/0)
161 1.2 thorpej
162 1.5 nathanw #define PTHREAD_UCONTEXT_XREG_FLAG _UC_FXSAVE
163 1.5 nathanw
164 1.5 nathanw #define PTHREAD_UCONTEXT_TO_XREG(xreg, uc) \
165 1.5 nathanw (void)memcpy((xreg), \
166 1.5 nathanw (uc)->uc_mcontext.__fpregs.__fp_reg_set.__fp_xmm_state.__fp_xmm, \
167 1.5 nathanw sizeof(struct xmmregs))
168 1.5 nathanw
169 1.5 nathanw #define PTHREAD_XREG_TO_UCONTEXT(uc, xreg) do { \
170 1.5 nathanw (void)memcpy( \
171 1.5 nathanw (uc)->uc_mcontext.__fpregs.__fp_reg_set.__fp_xmm_state.__fp_xmm, \
172 1.5 nathanw (xreg), \
173 1.5 nathanw sizeof(struct xmmregs)); \
174 1.5 nathanw (uc)->uc_flags = ((uc)->uc_flags | _UC_FXSAVE) & ~_UC_USER; \
175 1.5 nathanw } while (/*CONSTCOND*/0)
176 1.5 nathanw
177 1.8 ad #define pthread__smt_pause() __asm __volatile("rep; nop" ::: "memory")
178 1.15 ad /* #define PTHREAD__HAVE_THREADREG */
179 1.11 ad
180 1.12 ad /* Don't need additional memory barriers. */
181 1.12 ad #define PTHREAD__ATOMIC_IS_MEMBAR
182 1.12 ad
183 1.11 ad void pthread__threadreg_set(pthread_t);
184 1.11 ad
185 1.11 ad static inline pthread_t
186 1.15 ad #ifdef __GNUC__
187 1.15 ad __attribute__ ((__const__))
188 1.15 ad #endif
189 1.11 ad pthread__threadreg_get(void)
190 1.11 ad {
191 1.11 ad pthread_t self;
192 1.11 ad
193 1.11 ad __asm volatile("movl %%gs:0, %0"
194 1.11 ad : "=r" (self)
195 1.11 ad :);
196 1.11 ad
197 1.11 ad return self;
198 1.11 ad }
199 1.11 ad
200 1.13 ad static inline void *
201 1.13 ad _atomic_cas_ptr(volatile void *ptr, void *old, void *new)
202 1.13 ad {
203 1.13 ad volatile uintptr_t *cast = ptr;
204 1.13 ad void *ret;
205 1.13 ad
206 1.13 ad __asm __volatile ("lock; cmpxchgl %2, %1"
207 1.13 ad : "=a" (ret), "=m" (*cast)
208 1.13 ad : "r" (new), "m" (*cast), "0" (old));
209 1.13 ad
210 1.13 ad return ret;
211 1.13 ad }
212 1.13 ad
213 1.13 ad static inline void *
214 1.13 ad _atomic_cas_ptr_ni(volatile void *ptr, void *old, void *new)
215 1.13 ad {
216 1.13 ad volatile uintptr_t *cast = ptr;
217 1.13 ad void *ret;
218 1.13 ad
219 1.13 ad __asm __volatile ("cmpxchgl %2, %1"
220 1.13 ad : "=a" (ret), "=m" (*cast)
221 1.13 ad : "r" (new), "m" (*cast), "0" (old));
222 1.13 ad
223 1.13 ad return ret;
224 1.13 ad }
225 1.13 ad
226 1.2 thorpej #endif /* _LIB_PTHREAD_I386_MD_H */
227