This source file includes following definitions.
- memory_barrier
- atomic_compare_and_swap
- atomic_compare_and_swap_ptr
- memory_barrier
- atomic_compare_and_swap
- atomic_compare_and_swap_ptr
- memory_barrier
- atomic_compare_and_swap
- atomic_compare_and_swap_ptr
- memory_barrier
- atomic_compare_and_swap
- atomic_compare_and_swap_ptr
- memory_barrier
- atomic_compare_and_swap
- atomic_compare_and_swap_ptr
- memory_barrier
- atomic_compare_and_swap
- atomic_compare_and_swap_ptr
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 #include <config.h>
20
21
22 #include "simple-atomic.h"
23
24 #if 0x590 <= __SUNPRO_C && __STDC__
25 # define asm __asm
26 #endif
27
28 #if defined _WIN32 && ! defined __CYGWIN__
29
30
31 # include <windows.h>
32
33 void
34 memory_barrier (void)
35 {
36
37
38 MemoryBarrier ();
39 }
40
41 unsigned int
42 atomic_compare_and_swap (unsigned int volatile *vp,
43 unsigned int cmp,
44 unsigned int newval)
45 {
46
47
48 return InterlockedCompareExchange ((LONG volatile *) vp,
49 (LONG) newval, (LONG) cmp);
50 }
51
52 uintptr_t
53 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
54 uintptr_t cmp,
55 uintptr_t newval)
56 {
57
58
59 return InterlockedCompareExchangePointer ((void * volatile *) vp,
60 (void *) newval, (void *) cmp);
61 }
62
63 #elif HAVE_PTHREAD_H
64
65
66
67
68
69 # if (((__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)) \
70 && !defined __sparc__) \
71 || __clang_major__ >= 3) \
72 && !defined __ibmxl__
73
74
75
76
77
78 void
79 memory_barrier (void)
80 {
81 __sync_synchronize ();
82 }
83
84 unsigned int
85 atomic_compare_and_swap (unsigned int volatile *vp,
86 unsigned int cmp,
87 unsigned int newval)
88 {
89 return __sync_val_compare_and_swap (vp, cmp, newval);
90 }
91
92 uintptr_t
93 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
94 uintptr_t cmp,
95 uintptr_t newval)
96 {
97 return __sync_val_compare_and_swap (vp, cmp, newval);
98 }
99
100 # elif defined _AIX
101
102
103
104
105 void
106 memory_barrier (void)
107 {
108 asm volatile ("sync");
109 }
110
111 unsigned int
112 atomic_compare_and_swap (unsigned int volatile *vp,
113 unsigned int cmp,
114 unsigned int newval)
115 {
116 asm volatile ("sync");
117
118 unsigned int oldval;
119 asm volatile (
120 # if defined __GNUC__ || defined __clang__
121 "1: lwarx %0,0,%1\n"
122 " cmpw 0,%0,%2\n"
123 " bne 0,2f\n"
124 " stwcx. %3,0,%1\n"
125 " bne 0,1b\n"
126 "2:"
127 # else
128 ".L01: lwarx %0,0,%1\n"
129 " cmpw 0,%0,%2\n"
130 " bne 0,.L02\n"
131 " stwcx. %3,0,%1\n"
132 " bne 0,.L01\n"
133 ".L02:"
134 # endif
135 : "=&r" (oldval)
136 : "r" (vp), "r" (cmp), "r" (newval)
137 : "cr0");
138
139 asm volatile ("isync");
140 return oldval;
141 }
142
143 uintptr_t
144 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
145 uintptr_t cmp,
146 uintptr_t newval)
147 {
148 asm volatile ("sync");
149
150 uintptr_t oldval;
151 asm volatile (
152 # if defined __GNUC__ || defined __clang__
153 # if defined __powerpc64__ || defined __LP64__
154 "1: ldarx %0,0,%1\n"
155 " cmpd 0,%0,%2\n"
156 " bne 0,2f\n"
157 " stdcx. %3,0,%1\n"
158 " bne 0,1b\n"
159 "2:"
160 # else
161 "1: lwarx %0,0,%1\n"
162 " cmpw 0,%0,%2\n"
163 " bne 0,2f\n"
164 " stwcx. %3,0,%1\n"
165 " bne 0,1b\n"
166 "2:"
167 # endif
168 # else
169 # if defined __powerpc64__ || defined __LP64__
170 ".L01: ldarx %0,0,%1\n"
171 " cmpd 0,%0,%2\n"
172 " bne 0,.L02\n"
173 " stdcx. %3,0,%1\n"
174 " bne 0,.L01\n"
175 ".L02:"
176 # else
177 ".L01: lwarx %0,0,%1\n"
178 " cmpw 0,%0,%2\n"
179 " bne 0,.L02\n"
180 " stwcx. %3,0,%1\n"
181 " bne 0,.L01\n"
182 ".L02:"
183 # endif
184 # endif
185 : "=&r" (oldval)
186 : "r" (vp), "r" (cmp), "r" (newval)
187 : "cr0");
188
189 asm volatile ("isync");
190 return oldval;
191 }
192
193 # elif ((defined __GNUC__ || defined __clang__ || defined __SUNPRO_C) && (defined __sparc || defined __i386 || defined __x86_64__)) || (defined __TINYC__ && (defined __i386 || defined __x86_64__))
194
195
196
197
198
199 void
200 memory_barrier (void)
201 {
202 # if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__
203 # if defined __i386 || defined __x86_64__
204 # if defined __TINYC__ && defined __i386
205
206 asm volatile ("lock orl $0,(%esp)");
207 # else
208 asm volatile ("mfence");
209 # endif
210 # endif
211 # if defined __sparc
212 asm volatile ("membar 2");
213 # endif
214 # else
215 # if defined __i386 || defined __x86_64__
216 asm ("mfence");
217 # endif
218 # if defined __sparc
219 asm ("membar 2");
220 # endif
221 # endif
222 }
223
224 unsigned int
225 atomic_compare_and_swap (unsigned int volatile *vp,
226 unsigned int cmp,
227 unsigned int newval)
228 {
229 # if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__
230 unsigned int oldval;
231 # if defined __i386 || defined __x86_64__
232 asm volatile (" lock\n cmpxchgl %3,(%1)"
233 : "=a" (oldval) : "r" (vp), "a" (cmp), "r" (newval) : "memory");
234 # endif
235 # if defined __sparc
236 asm volatile (" cas [%1],%2,%3\n"
237 " mov %3,%0"
238 : "=r" (oldval) : "r" (vp), "r" (cmp), "r" (newval) : "memory");
239 # endif
240 return oldval;
241 # else
242 # if defined __x86_64__
243 asm (" movl %esi,%eax\n"
244 " lock\n cmpxchgl %edx,(%rdi)");
245 # elif defined __i386
246 asm (" movl 16(%ebp),%ecx\n"
247 " movl 12(%ebp),%eax\n"
248 " movl 8(%ebp),%edx\n"
249 " lock\n cmpxchgl %ecx,(%edx)");
250 # endif
251 # if defined __sparc
252 asm (" cas [%i0],%i1,%i2\n"
253 " mov %i2,%i0");
254 # endif
255 # endif
256 }
257
258 uintptr_t
259 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
260 uintptr_t cmp,
261 uintptr_t newval)
262 {
263 # if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__
264 uintptr_t oldval;
265 # if defined __x86_64__
266 asm volatile (" lock\n cmpxchgq %3,(%1)"
267 : "=a" (oldval) : "r" (vp), "a" (cmp), "r" (newval) : "memory");
268 # elif defined __i386
269 asm volatile (" lock\n cmpxchgl %3,(%1)"
270 : "=a" (oldval) : "r" (vp), "a" (cmp), "r" (newval) : "memory");
271 # endif
272 # if defined __sparc && (defined __sparcv9 || defined __arch64__)
273 asm volatile (" casx [%1],%2,%3\n"
274 " mov %3,%0"
275 : "=r" (oldval) : "r" (vp), "r" (cmp), "r" (newval) : "memory");
276 # elif defined __sparc
277 asm volatile (" cas [%1],%2,%3\n"
278 " mov %3,%0"
279 : "=r" (oldval) : "r" (vp), "r" (cmp), "r" (newval) : "memory");
280 # endif
281 return oldval;
282 # else
283 # if defined __x86_64__
284 asm (" movq %rsi,%rax\n"
285 " lock\n cmpxchgq %rdx,(%rdi)");
286 # elif defined __i386
287 asm (" movl 16(%ebp),%ecx\n"
288 " movl 12(%ebp),%eax\n"
289 " movl 8(%ebp),%edx\n"
290 " lock\n cmpxchgl %ecx,(%edx)");
291 # endif
292 # if defined __sparc && (defined __sparcv9 || defined __arch64__)
293 asm (" casx [%i0],%i1,%i2\n"
294 " mov %i2,%i0");
295 # elif defined __sparc
296 asm (" cas [%i0],%i1,%i2\n"
297 " mov %i2,%i0");
298 # endif
299 # endif
300 }
301
302 # else
303
304
305 void
306 memory_barrier (void)
307 {
308 }
309
310 unsigned int
311 atomic_compare_and_swap (unsigned int volatile *vp,
312 unsigned int cmp,
313 unsigned int newval)
314 {
315 unsigned int oldval = *vp;
316 if (oldval == cmp)
317 *vp = newval;
318 return oldval;
319 }
320
321 uintptr_t
322 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
323 uintptr_t cmp,
324 uintptr_t newval)
325 {
326 uintptr_t oldval = *vp;
327 if (oldval == cmp)
328 *vp = newval;
329 return oldval;
330 }
331
332 # endif
333
334 #else
335
336
337 void
338 memory_barrier (void)
339 {
340 }
341
342 unsigned int
343 atomic_compare_and_swap (unsigned int volatile *vp,
344 unsigned int cmp,
345 unsigned int newval)
346 {
347 unsigned int oldval = *vp;
348 if (oldval == cmp)
349 *vp = newval;
350 return oldval;
351 }
352
353 uintptr_t
354 atomic_compare_and_swap_ptr (uintptr_t volatile *vp,
355 uintptr_t cmp,
356 uintptr_t newval)
357 {
358 uintptr_t oldval = *vp;
359 if (oldval == cmp)
360 *vp = newval;
361 return oldval;
362 }
363
364 #endif