3 Index: sysdeps/powerpc/bits/atomic.h
4 ===================================================================
5 RCS file: /cvs/glibc/libc/sysdeps/powerpc/bits/atomic.h,v
6 retrieving revision 1.17
7 diff -u -a -p -r1.17 atomic.h
9 diff -durN glibc-2.12.1.orig/sysdeps/powerpc/bits/atomic.h glibc-2.12.1/sysdeps/powerpc/bits/atomic.h
10 --- glibc-2.12.1.orig/sysdeps/powerpc/bits/atomic.h 2007-03-26 22:15:28.000000000 +0200
11 +++ glibc-2.12.1/sysdeps/powerpc/bits/atomic.h 2009-11-13 00:51:19.000000000 +0100
13 __typeof (*(mem)) __tmp; \
14 __typeof (mem) __memp = (mem); \
16 - "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
17 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
20 - " stwcx. %3,0,%1\n" \
21 + " stwcx. %3,%y1\n" \
23 "2: " __ARCH_ACQ_INSTR \
25 - : "b" (__memp), "r" (oldval), "r" (newval) \
26 + : "=&r" (__tmp), "+Z" (*__memp) \
27 + : "r" (oldval), "r" (newval) \
32 __typeof (*(mem)) __tmp; \
33 __typeof (mem) __memp = (mem); \
34 __asm __volatile (__ARCH_REL_INSTR "\n" \
35 - "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
36 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
39 - " stwcx. %3,0,%1\n" \
40 + " stwcx. %3,%y1\n" \
44 - : "b" (__memp), "r" (oldval), "r" (newval) \
45 + : "=&r" (__tmp), "+Z" (__memp) \
46 + : "r" (oldval), "r" (newval) \
52 __typeof (*mem) __val; \
54 - "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
55 - " stwcx. %3,0,%2\n" \
56 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
57 + " stwcx. %2,%y1\n" \
59 " " __ARCH_ACQ_INSTR \
60 - : "=&r" (__val), "=m" (*mem) \
61 - : "b" (mem), "r" (value), "m" (*mem) \
62 + : "=&r" (__val), "+Z" (*mem) \
69 __typeof (*mem) __val; \
70 __asm __volatile (__ARCH_REL_INSTR "\n" \
71 - "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
72 - " stwcx. %3,0,%2\n" \
73 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
74 + " stwcx. %2,%y1\n" \
76 - : "=&r" (__val), "=m" (*mem) \
77 - : "b" (mem), "r" (value), "m" (*mem) \
78 + : "=&r" (__val), "+Z" (*mem) \
84 #define __arch_atomic_exchange_and_add_32(mem, value) \
86 __typeof (*mem) __val, __tmp; \
87 - __asm __volatile ("1: lwarx %0,0,%3\n" \
89 - " stwcx. %1,0,%3\n" \
90 + __asm __volatile ("1: lwarx %0,%y2\n" \
92 + " stwcx. %1,%y2\n" \
94 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
95 - : "b" (mem), "r" (value), "m" (*mem) \
96 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem) \
101 @@ -157,12 +157,12 @@
102 #define __arch_atomic_increment_val_32(mem) \
104 __typeof (*(mem)) __val; \
105 - __asm __volatile ("1: lwarx %0,0,%2\n" \
106 + __asm __volatile ("1: lwarx %0,%y1\n" \
108 - " stwcx. %0,0,%2\n" \
109 + " stwcx. %0,%y1\n" \
111 - : "=&b" (__val), "=m" (*mem) \
112 - : "b" (mem), "m" (*mem) \
113 + : "=&b" (__val), "+Z" (*mem) \
115 : "cr0", "memory"); \
118 @@ -170,27 +170,27 @@
119 #define __arch_atomic_decrement_val_32(mem) \
121 __typeof (*(mem)) __val; \
122 - __asm __volatile ("1: lwarx %0,0,%2\n" \
123 + __asm __volatile ("1: lwarx %0,%y1\n" \
125 - " stwcx. %0,0,%2\n" \
126 + " stwcx. %0,%y1\n" \
128 - : "=&b" (__val), "=m" (*mem) \
129 - : "b" (mem), "m" (*mem) \
130 + : "=&b" (__val), "+Z" (*mem) \
132 : "cr0", "memory"); \
136 #define __arch_atomic_decrement_if_positive_32(mem) \
137 ({ int __val, __tmp; \
138 - __asm __volatile ("1: lwarx %0,0,%3\n" \
139 + __asm __volatile ("1: lwarx %0,%y2\n" \
143 - " stwcx. %1,0,%3\n" \
144 + " stwcx. %1,%y2\n" \
146 "2: " __ARCH_ACQ_INSTR \
147 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
148 - : "b" (mem), "m" (*mem) \
149 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem) \
151 : "cr0", "memory"); \
154 diff -durN glibc-2.12.1.orig/sysdeps/powerpc/powerpc32/bits/atomic.h glibc-2.12.1/sysdeps/powerpc/powerpc32/bits/atomic.h
155 --- glibc-2.12.1.orig/sysdeps/powerpc/powerpc32/bits/atomic.h 2007-03-26 22:15:45.000000000 +0200
156 +++ glibc-2.12.1/sysdeps/powerpc/powerpc32/bits/atomic.h 2009-11-13 00:51:19.000000000 +0100
159 unsigned int __tmp; \
161 - "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
162 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
163 " subf. %0,%2,%0\n" \
165 - " stwcx. %3,0,%1\n" \
166 + " stwcx. %3,%y1\n" \
168 "2: " __ARCH_ACQ_INSTR \
170 - : "b" (mem), "r" (oldval), "r" (newval) \
171 + : "=&r" (__tmp), "+Z" (*(mem)) \
172 + : "r" (oldval), "r" (newval) \
173 : "cr0", "memory"); \
178 unsigned int __tmp; \
179 __asm __volatile (__ARCH_REL_INSTR "\n" \
180 - "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
181 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
182 " subf. %0,%2,%0\n" \
184 - " stwcx. %3,0,%1\n" \
185 + " stwcx. %3,%y1\n" \
189 - : "b" (mem), "r" (oldval), "r" (newval) \
190 + : "=&r" (__tmp), "+Z" (*(mem)) \
191 + : "r" (oldval), "r" (newval) \
192 : "cr0", "memory"); \
195 diff -durN glibc-2.12.1.orig/sysdeps/powerpc/powerpc64/bits/atomic.h glibc-2.12.1/sysdeps/powerpc/powerpc64/bits/atomic.h
196 --- glibc-2.12.1.orig/sysdeps/powerpc/powerpc64/bits/atomic.h 2007-03-26 22:16:03.000000000 +0200
197 +++ glibc-2.12.1/sysdeps/powerpc/powerpc64/bits/atomic.h 2009-11-13 00:51:19.000000000 +0100
200 unsigned int __tmp, __tmp2; \
201 __asm __volatile (" clrldi %1,%1,32\n" \
202 - "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
203 + "1: lwarx %0,%y2" MUTEX_HINT_ACQ "\n" \
204 " subf. %0,%1,%0\n" \
206 - " stwcx. %4,0,%2\n" \
207 + " stwcx. %4,%y2\n" \
209 "2: " __ARCH_ACQ_INSTR \
210 - : "=&r" (__tmp), "=r" (__tmp2) \
211 - : "b" (mem), "1" (oldval), "r" (newval) \
212 + : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem)) \
213 + : "1" (oldval), "r" (newval) \
214 : "cr0", "memory"); \
218 unsigned int __tmp, __tmp2; \
219 __asm __volatile (__ARCH_REL_INSTR "\n" \
220 " clrldi %1,%1,32\n" \
221 - "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
222 + "1: lwarx %0,%y2" MUTEX_HINT_REL "\n" \
223 " subf. %0,%1,%0\n" \
225 - " stwcx. %4,0,%2\n" \
226 + " stwcx. %4,%y2\n" \
229 - : "=&r" (__tmp), "=r" (__tmp2) \
230 - : "b" (mem), "1" (oldval), "r" (newval) \
231 + : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem)) \
232 + : "1" (oldval), "r" (newval) \
233 : "cr0", "memory"); \
238 unsigned long __tmp; \
240 - "1: ldarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
241 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
242 " subf. %0,%2,%0\n" \
244 - " stdcx. %3,0,%1\n" \
245 + " stdcx. %3,%y1\n" \
247 "2: " __ARCH_ACQ_INSTR \
249 - : "b" (mem), "r" (oldval), "r" (newval) \
250 + : "=&r" (__tmp), "+Z" (*(mem)) \
251 + : "r" (oldval), "r" (newval) \
252 : "cr0", "memory"); \
257 unsigned long __tmp; \
258 __asm __volatile (__ARCH_REL_INSTR "\n" \
259 - "1: ldarx %0,0,%2" MUTEX_HINT_REL "\n" \
260 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
261 " subf. %0,%2,%0\n" \
263 - " stdcx. %3,0,%1\n" \
264 + " stdcx. %3,%y1\n" \
268 - : "b" (mem), "r" (oldval), "r" (newval) \
269 + : "=&r" (__tmp), "+Z" (*(mem)) \
270 + : "r" (oldval), "r" (newval) \
271 : "cr0", "memory"); \
274 @@ -115,14 +115,14 @@
275 __typeof (*(mem)) __tmp; \
276 __typeof (mem) __memp = (mem); \
278 - "1: ldarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
279 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
282 - " stdcx. %3,0,%1\n" \
283 + " stdcx. %3,%y1\n" \
285 "2: " __ARCH_ACQ_INSTR \
287 - : "b" (__memp), "r" (oldval), "r" (newval) \
288 + : "=&r" (__tmp), "+Z" (*__memp) \
289 + : "r" (oldval), "r" (newval) \
290 : "cr0", "memory"); \
293 @@ -132,14 +132,14 @@
294 __typeof (*(mem)) __tmp; \
295 __typeof (mem) __memp = (mem); \
296 __asm __volatile (__ARCH_REL_INSTR "\n" \
297 - "1: ldarx %0,0,%1" MUTEX_HINT_REL "\n" \
298 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
301 - " stdcx. %3,0,%1\n" \
302 + " stdcx. %3,%y1\n" \
306 - : "b" (__memp), "r" (oldval), "r" (newval) \
307 + : "=&r" (__tmp), "+Z" (*__memp) \
308 + : "r" (oldval), "r" (newval) \
309 : "cr0", "memory"); \
312 @@ -148,12 +148,12 @@
314 __typeof (*mem) __val; \
315 __asm __volatile (__ARCH_REL_INSTR "\n" \
316 - "1: ldarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
317 - " stdcx. %3,0,%2\n" \
318 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
319 + " stdcx. %2,%y1\n" \
321 " " __ARCH_ACQ_INSTR \
322 - : "=&r" (__val), "=m" (*mem) \
323 - : "b" (mem), "r" (value), "m" (*mem) \
324 + : "=&r" (__val), "+Z" (*(mem)) \
326 : "cr0", "memory"); \
329 @@ -162,11 +162,11 @@
331 __typeof (*mem) __val; \
332 __asm __volatile (__ARCH_REL_INSTR "\n" \
333 - "1: ldarx %0,0,%2" MUTEX_HINT_REL "\n" \
334 - " stdcx. %3,0,%2\n" \
335 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
336 + " stdcx. %2,%y1\n" \
338 - : "=&r" (__val), "=m" (*mem) \
339 - : "b" (mem), "r" (value), "m" (*mem) \
340 + : "=&r" (__val), "+Z" (*(mem)) \
342 : "cr0", "memory"); \
345 @@ -174,12 +174,12 @@
346 #define __arch_atomic_exchange_and_add_64(mem, value) \
348 __typeof (*mem) __val, __tmp; \
349 - __asm __volatile ("1: ldarx %0,0,%3\n" \
350 - " add %1,%0,%4\n" \
351 - " stdcx. %1,0,%3\n" \
352 + __asm __volatile ("1: ldarx %0,%y2\n" \
353 + " add %1,%0,%3\n" \
354 + " stdcx. %1,%y2\n" \
356 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
357 - : "b" (mem), "r" (value), "m" (*mem) \
358 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem)) \
360 : "cr0", "memory"); \
363 @@ -187,12 +187,12 @@
364 #define __arch_atomic_increment_val_64(mem) \
366 __typeof (*(mem)) __val; \
367 - __asm __volatile ("1: ldarx %0,0,%2\n" \
368 + __asm __volatile ("1: ldarx %0,%y1\n" \
370 - " stdcx. %0,0,%2\n" \
371 + " stdcx. %0,%y1\n" \
373 - : "=&b" (__val), "=m" (*mem) \
374 - : "b" (mem), "m" (*mem) \
375 + : "=&b" (__val), "+Z" (*(mem)) \
377 : "cr0", "memory"); \
380 @@ -200,27 +200,27 @@
381 #define __arch_atomic_decrement_val_64(mem) \
383 __typeof (*(mem)) __val; \
384 - __asm __volatile ("1: ldarx %0,0,%2\n" \
385 + __asm __volatile ("1: ldarx %0,%y1\n" \
387 - " stdcx. %0,0,%2\n" \
388 + " stdcx. %0,%y1\n" \
390 - : "=&b" (__val), "=m" (*mem) \
391 - : "b" (mem), "m" (*mem) \
392 + : "=&b" (__val), "+Z" (*(mem)) \
394 : "cr0", "memory"); \
398 #define __arch_atomic_decrement_if_positive_64(mem) \
399 ({ int __val, __tmp; \
400 - __asm __volatile ("1: ldarx %0,0,%3\n" \
401 + __asm __volatile ("1: ldarx %0,%y2\n" \
405 - " stdcx. %1,0,%3\n" \
406 + " stdcx. %1,%y2\n" \
408 "2: " __ARCH_ACQ_INSTR \
409 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
410 - : "b" (mem), "m" (*mem) \
411 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem)) \
413 : "cr0", "memory"); \