Print this page
11528 Makefile.noget can get gone
11529 Use -Wno-maybe-initialized
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/lib/libc/inc/thr_inlines.h
+++ new/usr/src/lib/libc/inc/thr_inlines.h
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
↓ open down ↓ |
16 lines elided |
↑ open up ↑ |
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21
22 22 /*
23 23 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
24 24 * Use is subject to license terms.
25 25 */
26 26
27 +/*
28 + * Copyright 2019 Joyent, Inc.
29 + */
30 +
27 31 #ifndef _THR_INLINES_H
28 32 #define _THR_INLINES_H
29 33
30 34 #include <sys/ccompile.h>
31 35
32 36 #if !defined(__lint) && defined(__GNUC__)
33 37
34 38 /* inlines for gcc */
35 39
36 40 /*
37 41 * ON-usable GCC 4.x emits register pseudo-ops declaring %g7 as ignored, rather
38 42 * than scratch, GCC 3 does the reverse. All uses, both ones it generated
39 43 * (_curthread) and ones it didn't (__curthread) must agree.
40 44 */
41 45 #if __GNUC__ > 3
42 46 #define SPARC_REG_SPEC "#ignore"
43 47 #else
44 48 #define SPARC_REG_SPEC "#scratch"
45 49 #endif
46 50
47 51 extern __GNU_INLINE ulwp_t *
48 52 _curthread(void)
49 53 {
50 54 #if defined(__amd64)
51 55 ulwp_t *__value;
52 56 __asm__ __volatile__("movq %%fs:0, %0" : "=r" (__value));
53 57 #elif defined(__i386)
54 58 ulwp_t *__value;
55 59 __asm__ __volatile__("movl %%gs:0, %0" : "=r" (__value));
56 60 #elif defined(__sparc)
57 61 register ulwp_t *__value __asm__("g7");
58 62 #else
59 63 #error "port me"
60 64 #endif
61 65 return (__value);
62 66 }
63 67
64 68 extern __GNU_INLINE ulwp_t *
65 69 __curthread(void)
66 70 {
67 71 ulwp_t *__value;
68 72 __asm__ __volatile__(
69 73 #if defined(__amd64)
70 74 "movq %%fs:0, %0\n\t"
71 75 #elif defined(__i386)
72 76 "movl %%gs:0, %0\n\t"
73 77 #elif defined(__sparcv9)
74 78 ".register %%g7, " SPARC_REG_SPEC "\n\t"
75 79 "ldx [%%g7 + 80], %0\n\t"
76 80 #elif defined(__sparc)
77 81 ".register %%g7, " SPARC_REG_SPEC "\n\t"
78 82 "ld [%%g7 + 80], %0\n\t"
79 83 #else
80 84 #error "port me"
81 85 #endif
82 86 : "=r" (__value));
83 87 return (__value);
84 88 }
85 89
86 90 extern __GNU_INLINE greg_t
87 91 stkptr(void)
88 92 {
89 93 #if defined(__amd64)
90 94 register greg_t __value __asm__("rsp");
91 95 #elif defined(__i386)
92 96 register greg_t __value __asm__("esp");
93 97 #elif defined(__sparc)
94 98 register greg_t __value __asm__("sp");
95 99 #else
96 100 #error "port me"
97 101 #endif
98 102 return (__value);
99 103 }
100 104
101 105 extern __GNU_INLINE hrtime_t
102 106 gethrtime(void) /* note: caller-saved registers are trashed */
103 107 {
104 108 #if defined(__amd64)
105 109 hrtime_t __value;
106 110 __asm__ __volatile__(
107 111 "movl $3, %%eax\n\t"
108 112 "int $0xd2"
109 113 : "=a" (__value)
110 114 : : "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11", "cc");
111 115 #elif defined(__i386)
112 116 hrtime_t __value;
113 117 __asm__ __volatile__(
114 118 "movl $3, %%eax\n\t"
115 119 "int $0xd2"
116 120 : "=A" (__value)
117 121 : : "ecx", "cc");
118 122 #elif defined(__sparcv9)
119 123 register hrtime_t __value __asm__("o0");
120 124 __asm__ __volatile__(
121 125 "ta 0x24\n\t"
122 126 "sllx %%o0, 32, %0\n\t"
123 127 "or %%o1, %0, %0"
124 128 : "=r" (__value)
125 129 : : "o1", "o2", "o3", "o4", "o5", "cc");
126 130 #elif defined(__sparc)
127 131 register hrtime_t __value __asm__("o0");
128 132 __asm__ __volatile__(
129 133 "ta 0x24"
130 134 : "=r" (__value)
↓ open down ↓ |
94 lines elided |
↑ open up ↑ |
131 135 : : "o2", "o3", "o4", "o5", "cc");
132 136 #else
133 137 #error "port me"
134 138 #endif
135 139 return (__value);
136 140 }
137 141
138 142 extern __GNU_INLINE int
139 143 set_lock_byte(volatile uint8_t *__lockp)
140 144 {
141 - int __value;
145 + int __value = 0;
142 146 #if defined(__x86)
143 147 __asm__ __volatile__(
144 148 "movl $1, %0\n\t"
145 149 "xchgb %%dl, %1"
146 150 : "+d" (__value), "+m" (*__lockp));
147 151 #elif defined(__sparc)
148 152 __asm__ __volatile__(
149 153 "ldstub %1, %0\n\t"
150 154 "membar #LoadLoad"
151 155 : "=r" (__value), "+m" (*__lockp));
152 156 #else
153 157 #error "port me"
154 158 #endif
155 159 return (__value);
156 160 }
157 161
158 162 extern __GNU_INLINE uint32_t
159 163 atomic_swap_32(volatile uint32_t *__memory, uint32_t __value)
160 164 {
161 165 #if defined(__x86)
162 166 __asm__ __volatile__(
163 167 "xchgl %0, %1"
164 168 : "+q" (__value), "+m" (*__memory));
165 169 return (__value);
166 170 #elif defined(__sparc)
167 171 uint32_t __tmp1, __tmp2;
168 172 __asm__ __volatile__(
169 173 "ld [%3], %0\n\t"
170 174 "1:\n\t"
171 175 "mov %4, %1\n\t"
172 176 "cas [%3], %0, %1\n\t"
173 177 "cmp %0, %1\n\t"
174 178 "bne,a,pn %%icc, 1b\n\t"
175 179 " mov %1, %0"
176 180 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
177 181 : "r" (__memory), "r" (__value)
178 182 : "cc");
179 183 return (__tmp2);
180 184 #else
181 185 #error "port me"
182 186 #endif
183 187 }
184 188
185 189 extern __GNU_INLINE uint32_t
186 190 atomic_cas_32(volatile uint32_t *__memory, uint32_t __cmp, uint32_t __newvalue)
187 191 {
188 192 uint32_t __oldvalue;
189 193 #if defined(__x86)
190 194 __asm__ __volatile__(
191 195 "lock; cmpxchgl %3, %0"
192 196 : "=m" (*__memory), "=a" (__oldvalue)
193 197 : "a" (__cmp), "r" (__newvalue));
194 198 #elif defined(__sparc)
195 199 __asm__ __volatile__(
196 200 "cas [%2], %3, %1"
197 201 : "=m" (*__memory), "=&r" (__oldvalue)
198 202 : "r" (__memory), "r" (__cmp), "1" (__newvalue));
199 203 #else
200 204 #error "port me"
201 205 #endif
202 206 return (__oldvalue);
203 207 }
204 208
205 209 extern __GNU_INLINE void
206 210 atomic_inc_32(volatile uint32_t *__memory)
207 211 {
208 212 #if defined(__x86)
209 213 __asm__ __volatile__(
210 214 "lock; incl %0"
211 215 : "+m" (*__memory));
212 216 #elif defined(__sparc)
213 217 uint32_t __tmp1, __tmp2;
214 218 __asm__ __volatile__(
215 219 "ld [%3], %0\n\t"
216 220 "1:\n\t"
217 221 "add %0, 1, %1\n\t"
218 222 "cas [%3], %0, %1\n\t"
219 223 "cmp %0, %1\n\t"
220 224 "bne,a,pn %%icc, 1b\n\t"
221 225 " mov %1, %0"
222 226 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
223 227 : "r" (__memory)
224 228 : "cc");
225 229 #else
226 230 #error "port me"
227 231 #endif
228 232 }
229 233
230 234 extern __GNU_INLINE void
231 235 atomic_dec_32(volatile uint32_t *__memory)
232 236 {
233 237 #if defined(__x86)
234 238 __asm__ __volatile__(
235 239 "lock; decl %0"
236 240 : "+m" (*__memory));
237 241 #elif defined(__sparc)
238 242 uint32_t __tmp1, __tmp2;
239 243 __asm__ __volatile__(
240 244 "ld [%3], %0\n\t"
241 245 "1:\n\t"
242 246 "sub %0, 1, %1\n\t"
243 247 "cas [%3], %0, %1\n\t"
244 248 "cmp %0, %1\n\t"
245 249 "bne,a,pn %%icc, 1b\n\t"
246 250 " mov %1, %0"
247 251 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
248 252 : "r" (__memory)
249 253 : "cc");
250 254 #else
251 255 #error "port me"
252 256 #endif
253 257 }
254 258
255 259 extern __GNU_INLINE void
256 260 atomic_and_32(volatile uint32_t *__memory, uint32_t __bits)
257 261 {
258 262 #if defined(__x86)
259 263 __asm__ __volatile__(
260 264 "lock; andl %1, %0"
261 265 : "+m" (*__memory)
262 266 : "r" (__bits));
263 267 #elif defined(__sparc)
264 268 uint32_t __tmp1, __tmp2;
265 269 __asm__ __volatile__(
266 270 "ld [%3], %0\n\t"
267 271 "1:\n\t"
268 272 "and %0, %4, %1\n\t"
269 273 "cas [%3], %0, %1\n\t"
270 274 "cmp %0, %1\n\t"
271 275 "bne,a,pn %%icc, 1b\n\t"
272 276 " mov %1, %0"
273 277 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
274 278 : "r" (__memory), "r" (__bits)
275 279 : "cc");
276 280 #else
277 281 #error "port me"
278 282 #endif
279 283 }
280 284
281 285 extern __GNU_INLINE void
282 286 atomic_or_32(volatile uint32_t *__memory, uint32_t __bits)
283 287 {
284 288 #if defined(__x86)
285 289 __asm__ __volatile__(
286 290 "lock; orl %1, %0"
287 291 : "+m" (*__memory)
288 292 : "r" (__bits));
289 293 #elif defined(__sparc)
290 294 uint32_t __tmp1, __tmp2;
291 295 __asm__ __volatile__(
292 296 "ld [%3], %0\n\t"
293 297 "1:\n\t"
294 298 "or %0, %4, %1\n\t"
295 299 "cas [%3], %0, %1\n\t"
296 300 "cmp %0, %1\n\t"
297 301 "bne,a,pn %%icc, 1b\n\t"
298 302 " mov %1, %0"
299 303 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
300 304 : "r" (__memory), "r" (__bits)
301 305 : "cc");
302 306 #else
303 307 #error "port me"
304 308 #endif
305 309 }
306 310
307 311 #if defined(__sparc) /* only needed on sparc */
308 312
309 313 extern __GNU_INLINE ulong_t
310 314 caller(void)
311 315 {
312 316 register ulong_t __value __asm__("i7");
313 317 return (__value);
314 318 }
315 319
316 320 extern __GNU_INLINE ulong_t
317 321 getfp(void)
318 322 {
319 323 register ulong_t __value __asm__("fp");
320 324 return (__value);
321 325 }
322 326
323 327 #endif /* __sparc */
324 328
325 329 #if defined(__x86) /* only needed on x86 */
326 330
327 331 extern __GNU_INLINE void
328 332 ht_pause(void)
329 333 {
330 334 __asm__ __volatile__("rep; nop");
331 335 }
332 336
333 337 #endif /* __x86 */
334 338
335 339 #endif /* !__lint && __GNUC__ */
336 340
337 341 #endif /* _THR_INLINES_H */
↓ open down ↓ |
186 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX