Remove CT_BROKEN.
Only one component is actually BROKEN (libelf), make it depend on EXPERIMENTAL, but state BROKEN in the prompt.
ltrace (which depends on libelf) is now marked as EXPERIMENTAL, with the reference to the BROKEN libelf in the help entry.
/trunk/docs/overview.txt | 6 0 6 0 ------
/trunk/config/debug/ltrace.in | 7 4 3 0 ++++---
/trunk/config/global/ct-behave.in | 8 0 8 0 --------
/trunk/config/tools/libelf.in | 5 3 2 0 +++--
4 files changed, 7 insertions(+), 19 deletions(-)
1 Index: uClibc/libc/sysdeps/linux/mips/bits/atomic.h
2 ===================================================================
3 --- uClibc/libc/sysdeps/linux/mips/bits/atomic.h (revision 23002)
4 +++ uClibc/libc/sysdeps/linux/mips/bits/atomic.h (working copy)
6 /* For all "bool" routines, we return FALSE if exchange succesful. */
8 #define __arch_compare_and_exchange_bool_8_int(mem, new, old, rel, acq) \
9 -({ typeof (*mem) __prev; int __cmp; \
10 +({ __typeof (*mem) __prev; int __cmp; \
11 __arch_compare_and_exchange_xxx_8_int(mem, new, old, rel, acq); \
14 #define __arch_compare_and_exchange_bool_16_int(mem, new, old, rel, acq) \
15 -({ typeof (*mem) __prev; int __cmp; \
16 +({ __typeof (*mem) __prev; int __cmp; \
17 __arch_compare_and_exchange_xxx_16_int(mem, new, old, rel, acq); \
20 #define __arch_compare_and_exchange_bool_32_int(mem, new, old, rel, acq) \
21 -({ typeof (*mem) __prev; int __cmp; \
22 +({ __typeof (*mem) __prev; int __cmp; \
23 __arch_compare_and_exchange_xxx_32_int(mem, new, old, rel, acq); \
26 #define __arch_compare_and_exchange_bool_64_int(mem, new, old, rel, acq) \
27 -({ typeof (*mem) __prev; int __cmp; \
28 +({ __typeof (*mem) __prev; int __cmp; \
29 __arch_compare_and_exchange_xxx_64_int(mem, new, old, rel, acq); \
35 #define __arch_compare_and_exchange_val_8_int(mem, new, old, rel, acq) \
36 -({ typeof (*mem) __prev; int __cmp; \
37 +({ __typeof (*mem) __prev; int __cmp; \
38 __arch_compare_and_exchange_xxx_8_int(mem, new, old, rel, acq); \
39 - (typeof (*mem))__prev; })
40 + (__typeof (*mem))__prev; })
42 #define __arch_compare_and_exchange_val_16_int(mem, new, old, rel, acq) \
43 -({ typeof (*mem) __prev; int __cmp; \
44 +({ __typeof (*mem) __prev; int __cmp; \
45 __arch_compare_and_exchange_xxx_16_int(mem, new, old, rel, acq); \
46 - (typeof (*mem))__prev; })
47 + (__typeof (*mem))__prev; })
49 #define __arch_compare_and_exchange_val_32_int(mem, new, old, rel, acq) \
50 -({ typeof (*mem) __prev; int __cmp; \
51 +({ __typeof (*mem) __prev; int __cmp; \
52 __arch_compare_and_exchange_xxx_32_int(mem, new, old, rel, acq); \
53 - (typeof (*mem))__prev; })
54 + (__typeof (*mem))__prev; })
56 #define __arch_compare_and_exchange_val_64_int(mem, new, old, rel, acq) \
57 -({ typeof (*mem) __prev; int __cmp; \
58 +({ __typeof (*mem) __prev; int __cmp; \
59 __arch_compare_and_exchange_xxx_64_int(mem, new, old, rel, acq); \
60 - (typeof (*mem))__prev; })
61 + (__typeof (*mem))__prev; })
63 /* Compare and exchange with "acquire" semantics, ie barrier after. */
68 #define __arch_exchange_xxx_32_int(mem, newval, rel, acq) \
69 -({ typeof (*mem) __prev; int __cmp; \
70 +({ __typeof (*mem) __prev; int __cmp; \
71 __asm__ __volatile__ ("\n" \
77 #define __arch_exchange_xxx_64_int(mem, newval, rel, acq) \
78 -({ typeof (*mem) __prev; int __cmp; \
79 +({ __typeof (*mem) __prev; int __cmp; \
80 __asm__ __volatile__ ("\n" \
84 /* Atomically add value and return the previous (unincremented) value. */
86 #define __arch_exchange_and_add_8_int(mem, newval, rel, acq) \
87 - (abort (), (typeof(*mem)) 0)
88 + (abort (), (__typeof(*mem)) 0)
90 #define __arch_exchange_and_add_16_int(mem, newval, rel, acq) \
91 - (abort (), (typeof(*mem)) 0)
92 + (abort (), (__typeof(*mem)) 0)
94 #define __arch_exchange_and_add_32_int(mem, value, rel, acq) \
95 -({ typeof (*mem) __prev; int __cmp; \
96 +({ __typeof (*mem) __prev; int __cmp; \
97 __asm__ __volatile__ ("\n" \
100 @@ -263,10 +263,10 @@
101 #if _MIPS_SIM == _ABIO32
102 /* We can't do an atomic 64-bit operation in O32. */
103 #define __arch_exchange_and_add_64_int(mem, value, rel, acq) \
104 - (abort (), (typeof(*mem)) 0)
105 + (abort (), (__typeof(*mem)) 0)
107 #define __arch_exchange_and_add_64_int(mem, value, rel, acq) \
108 -({ typeof (*mem) __prev; int __cmp; \
109 +({ __typeof (*mem) __prev; int __cmp; \
110 __asm__ __volatile__ ( \