Unmark eglibc being EXPERIMENTAL. For Debian to switch to eglibc,
we can safely assume that it is stable enough! ;-)
See: http://blog.aurel32.net/?p=47
-------- diffstat follows --------
/trunk/config/libc/eglibc.in | 2 1 1 0 +-
1 file changed, 1 insertion(+), 1 deletion(-)
1 Original patch from: gentoo/src/patchsets/glibc/2.9/6120_all_ppc-glibc-2.9-atomic.patch
3 -= BEGIN original header =-
6 -= END original header =-
8 diff -durN glibc-2_9.orig/sysdeps/powerpc/bits/atomic.h glibc-2_9/sysdeps/powerpc/bits/atomic.h
9 --- glibc-2_9.orig/sysdeps/powerpc/bits/atomic.h 2007-03-26 22:15:28.000000000 +0200
10 +++ glibc-2_9/sysdeps/powerpc/bits/atomic.h 2009-02-02 22:01:40.000000000 +0100
12 __typeof (*(mem)) __tmp; \
13 __typeof (mem) __memp = (mem); \
15 - "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
16 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
19 - " stwcx. %3,0,%1\n" \
20 + " stwcx. %3,%y1\n" \
22 "2: " __ARCH_ACQ_INSTR \
24 - : "b" (__memp), "r" (oldval), "r" (newval) \
25 + : "=&r" (__tmp), "+Z" (*__memp) \
26 + : "r" (oldval), "r" (newval) \
31 __typeof (*(mem)) __tmp; \
32 __typeof (mem) __memp = (mem); \
33 __asm __volatile (__ARCH_REL_INSTR "\n" \
34 - "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
35 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
38 - " stwcx. %3,0,%1\n" \
39 + " stwcx. %3,%y1\n" \
43 - : "b" (__memp), "r" (oldval), "r" (newval) \
44 + : "=&r" (__tmp), "+Z" (__memp) \
45 + : "r" (oldval), "r" (newval) \
51 __typeof (*mem) __val; \
53 - "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
54 - " stwcx. %3,0,%2\n" \
55 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
56 + " stwcx. %2,%y1\n" \
58 " " __ARCH_ACQ_INSTR \
59 - : "=&r" (__val), "=m" (*mem) \
60 - : "b" (mem), "r" (value), "m" (*mem) \
61 + : "=&r" (__val), "+Z" (*mem) \
68 __typeof (*mem) __val; \
69 __asm __volatile (__ARCH_REL_INSTR "\n" \
70 - "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
71 - " stwcx. %3,0,%2\n" \
72 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
73 + " stwcx. %2,%y1\n" \
75 - : "=&r" (__val), "=m" (*mem) \
76 - : "b" (mem), "r" (value), "m" (*mem) \
77 + : "=&r" (__val), "+Z" (*mem) \
83 #define __arch_atomic_exchange_and_add_32(mem, value) \
85 __typeof (*mem) __val, __tmp; \
86 - __asm __volatile ("1: lwarx %0,0,%3\n" \
88 - " stwcx. %1,0,%3\n" \
89 + __asm __volatile ("1: lwarx %0,%y2\n" \
91 + " stwcx. %1,%y2\n" \
93 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
94 - : "b" (mem), "r" (value), "m" (*mem) \
95 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem) \
100 @@ -157,12 +157,12 @@
101 #define __arch_atomic_increment_val_32(mem) \
103 __typeof (*(mem)) __val; \
104 - __asm __volatile ("1: lwarx %0,0,%2\n" \
105 + __asm __volatile ("1: lwarx %0,%y1\n" \
107 - " stwcx. %0,0,%2\n" \
108 + " stwcx. %0,%y1\n" \
110 - : "=&b" (__val), "=m" (*mem) \
111 - : "b" (mem), "m" (*mem) \
112 + : "=&b" (__val), "+Z" (*mem) \
114 : "cr0", "memory"); \
117 @@ -170,27 +170,27 @@
118 #define __arch_atomic_decrement_val_32(mem) \
120 __typeof (*(mem)) __val; \
121 - __asm __volatile ("1: lwarx %0,0,%2\n" \
122 + __asm __volatile ("1: lwarx %0,%y1\n" \
124 - " stwcx. %0,0,%2\n" \
125 + " stwcx. %0,%y1\n" \
127 - : "=&b" (__val), "=m" (*mem) \
128 - : "b" (mem), "m" (*mem) \
129 + : "=&b" (__val), "+Z" (*mem) \
131 : "cr0", "memory"); \
135 #define __arch_atomic_decrement_if_positive_32(mem) \
136 ({ int __val, __tmp; \
137 - __asm __volatile ("1: lwarx %0,0,%3\n" \
138 + __asm __volatile ("1: lwarx %0,%y2\n" \
142 - " stwcx. %1,0,%3\n" \
143 + " stwcx. %1,%y2\n" \
145 "2: " __ARCH_ACQ_INSTR \
146 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
147 - : "b" (mem), "m" (*mem) \
148 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem) \
150 : "cr0", "memory"); \
153 diff -durN glibc-2_9.orig/sysdeps/powerpc/powerpc32/bits/atomic.h glibc-2_9/sysdeps/powerpc/powerpc32/bits/atomic.h
154 --- glibc-2_9.orig/sysdeps/powerpc/powerpc32/bits/atomic.h 2007-03-26 22:15:45.000000000 +0200
155 +++ glibc-2_9/sysdeps/powerpc/powerpc32/bits/atomic.h 2009-02-02 22:01:40.000000000 +0100
158 unsigned int __tmp; \
160 - "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
161 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
162 " subf. %0,%2,%0\n" \
164 - " stwcx. %3,0,%1\n" \
165 + " stwcx. %3,%y1\n" \
167 "2: " __ARCH_ACQ_INSTR \
169 - : "b" (mem), "r" (oldval), "r" (newval) \
170 + : "=&r" (__tmp), "+Z" (*(mem)) \
171 + : "r" (oldval), "r" (newval) \
172 : "cr0", "memory"); \
177 unsigned int __tmp; \
178 __asm __volatile (__ARCH_REL_INSTR "\n" \
179 - "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
180 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
181 " subf. %0,%2,%0\n" \
183 - " stwcx. %3,0,%1\n" \
184 + " stwcx. %3,%y1\n" \
188 - : "b" (mem), "r" (oldval), "r" (newval) \
189 + : "=&r" (__tmp), "+Z" (*(mem)) \
190 + : "r" (oldval), "r" (newval) \
191 : "cr0", "memory"); \
194 diff -durN glibc-2_9.orig/sysdeps/powerpc/powerpc64/bits/atomic.h glibc-2_9/sysdeps/powerpc/powerpc64/bits/atomic.h
195 --- glibc-2_9.orig/sysdeps/powerpc/powerpc64/bits/atomic.h 2007-03-26 22:16:03.000000000 +0200
196 +++ glibc-2_9/sysdeps/powerpc/powerpc64/bits/atomic.h 2009-02-02 22:01:40.000000000 +0100
199 unsigned int __tmp, __tmp2; \
200 __asm __volatile (" clrldi %1,%1,32\n" \
201 - "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
202 + "1: lwarx %0,%y2" MUTEX_HINT_ACQ "\n" \
203 " subf. %0,%1,%0\n" \
205 - " stwcx. %4,0,%2\n" \
206 + " stwcx. %4,%y2\n" \
208 "2: " __ARCH_ACQ_INSTR \
209 - : "=&r" (__tmp), "=r" (__tmp2) \
210 - : "b" (mem), "1" (oldval), "r" (newval) \
211 + : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem)) \
212 + : "1" (oldval), "r" (newval) \
213 : "cr0", "memory"); \
217 unsigned int __tmp, __tmp2; \
218 __asm __volatile (__ARCH_REL_INSTR "\n" \
219 " clrldi %1,%1,32\n" \
220 - "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
221 + "1: lwarx %0,%y2" MUTEX_HINT_REL "\n" \
222 " subf. %0,%1,%0\n" \
224 - " stwcx. %4,0,%2\n" \
225 + " stwcx. %4,%y2\n" \
228 - : "=&r" (__tmp), "=r" (__tmp2) \
229 - : "b" (mem), "1" (oldval), "r" (newval) \
230 + : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem)) \
231 + : "1" (oldval), "r" (newval) \
232 : "cr0", "memory"); \
237 unsigned long __tmp; \
239 - "1: ldarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
240 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
241 " subf. %0,%2,%0\n" \
243 - " stdcx. %3,0,%1\n" \
244 + " stdcx. %3,%y1\n" \
246 "2: " __ARCH_ACQ_INSTR \
248 - : "b" (mem), "r" (oldval), "r" (newval) \
249 + : "=&r" (__tmp), "+Z" (*(mem)) \
250 + : "r" (oldval), "r" (newval) \
251 : "cr0", "memory"); \
256 unsigned long __tmp; \
257 __asm __volatile (__ARCH_REL_INSTR "\n" \
258 - "1: ldarx %0,0,%2" MUTEX_HINT_REL "\n" \
259 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
260 " subf. %0,%2,%0\n" \
262 - " stdcx. %3,0,%1\n" \
263 + " stdcx. %3,%y1\n" \
267 - : "b" (mem), "r" (oldval), "r" (newval) \
268 + : "=&r" (__tmp), "+Z" (*(mem)) \
269 + : "r" (oldval), "r" (newval) \
270 : "cr0", "memory"); \
273 @@ -115,14 +115,14 @@
274 __typeof (*(mem)) __tmp; \
275 __typeof (mem) __memp = (mem); \
277 - "1: ldarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
278 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
281 - " stdcx. %3,0,%1\n" \
282 + " stdcx. %3,%y1\n" \
284 "2: " __ARCH_ACQ_INSTR \
286 - : "b" (__memp), "r" (oldval), "r" (newval) \
287 + : "=&r" (__tmp), "+Z" (*__memp) \
288 + : "r" (oldval), "r" (newval) \
289 : "cr0", "memory"); \
292 @@ -132,14 +132,14 @@
293 __typeof (*(mem)) __tmp; \
294 __typeof (mem) __memp = (mem); \
295 __asm __volatile (__ARCH_REL_INSTR "\n" \
296 - "1: ldarx %0,0,%1" MUTEX_HINT_REL "\n" \
297 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
300 - " stdcx. %3,0,%1\n" \
301 + " stdcx. %3,%y1\n" \
305 - : "b" (__memp), "r" (oldval), "r" (newval) \
306 + : "=&r" (__tmp), "+Z" (*__memp) \
307 + : "r" (oldval), "r" (newval) \
308 : "cr0", "memory"); \
311 @@ -148,12 +148,12 @@
313 __typeof (*mem) __val; \
314 __asm __volatile (__ARCH_REL_INSTR "\n" \
315 - "1: ldarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
316 - " stdcx. %3,0,%2\n" \
317 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
318 + " stdcx. %2,%y1\n" \
320 " " __ARCH_ACQ_INSTR \
321 - : "=&r" (__val), "=m" (*mem) \
322 - : "b" (mem), "r" (value), "m" (*mem) \
323 + : "=&r" (__val), "+Z" (*(mem)) \
325 : "cr0", "memory"); \
328 @@ -162,11 +162,11 @@
330 __typeof (*mem) __val; \
331 __asm __volatile (__ARCH_REL_INSTR "\n" \
332 - "1: ldarx %0,0,%2" MUTEX_HINT_REL "\n" \
333 - " stdcx. %3,0,%2\n" \
334 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
335 + " stdcx. %2,%y1\n" \
337 - : "=&r" (__val), "=m" (*mem) \
338 - : "b" (mem), "r" (value), "m" (*mem) \
339 + : "=&r" (__val), "+Z" (*(mem)) \
341 : "cr0", "memory"); \
344 @@ -174,12 +174,12 @@
345 #define __arch_atomic_exchange_and_add_64(mem, value) \
347 __typeof (*mem) __val, __tmp; \
348 - __asm __volatile ("1: ldarx %0,0,%3\n" \
349 - " add %1,%0,%4\n" \
350 - " stdcx. %1,0,%3\n" \
351 + __asm __volatile ("1: ldarx %0,%y2\n" \
352 + " add %1,%0,%3\n" \
353 + " stdcx. %1,%y2\n" \
355 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
356 - : "b" (mem), "r" (value), "m" (*mem) \
357 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem)) \
359 : "cr0", "memory"); \
362 @@ -187,12 +187,12 @@
363 #define __arch_atomic_increment_val_64(mem) \
365 __typeof (*(mem)) __val; \
366 - __asm __volatile ("1: ldarx %0,0,%2\n" \
367 + __asm __volatile ("1: ldarx %0,%y1\n" \
369 - " stdcx. %0,0,%2\n" \
370 + " stdcx. %0,%y1\n" \
372 - : "=&b" (__val), "=m" (*mem) \
373 - : "b" (mem), "m" (*mem) \
374 + : "=&b" (__val), "+Z" (*(mem)) \
376 : "cr0", "memory"); \
379 @@ -200,27 +200,27 @@
380 #define __arch_atomic_decrement_val_64(mem) \
382 __typeof (*(mem)) __val; \
383 - __asm __volatile ("1: ldarx %0,0,%2\n" \
384 + __asm __volatile ("1: ldarx %0,%y1\n" \
386 - " stdcx. %0,0,%2\n" \
387 + " stdcx. %0,%y1\n" \
389 - : "=&b" (__val), "=m" (*mem) \
390 - : "b" (mem), "m" (*mem) \
391 + : "=&b" (__val), "+Z" (*(mem)) \
393 : "cr0", "memory"); \
397 #define __arch_atomic_decrement_if_positive_64(mem) \
398 ({ int __val, __tmp; \
399 - __asm __volatile ("1: ldarx %0,0,%3\n" \
400 + __asm __volatile ("1: ldarx %0,%y2\n" \
404 - " stdcx. %1,0,%3\n" \
405 + " stdcx. %1,%y2\n" \
407 "2: " __ARCH_ACQ_INSTR \
408 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
409 - : "b" (mem), "m" (*mem) \
410 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem)) \
412 : "cr0", "memory"); \