#define __BITOPS_AND "nr"
#define __BITOPS_XOR "xr"
-#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
+#define __BITOPS_LOOP(__addr, __val, __op_string) \
+({ \
+ unsigned long __old, __new; \
+ \
asm volatile( \
" l %0,%2\n" \
"0: lr %1,%0\n" \
: "=&d" (__old), "=&d" (__new), \
"=Q" (*(unsigned long *) __addr) \
: "d" (__val), "Q" (*(unsigned long *) __addr) \
- : "cc");
+ : "cc"); \
+ __old; \
+})
#else /* CONFIG_64BIT */
+#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
+
+#define __BITOPS_OR "laog"
+#define __BITOPS_AND "lang"
+#define __BITOPS_XOR "laxg"
+
+#define __BITOPS_LOOP(__addr, __val, __op_string) \
+({ \
+ unsigned long __old; \
+ \
+ asm volatile( \
+ __op_string " %0,%2,%1\n" \
+ : "=d" (__old), "+Q" (*(unsigned long *)__addr) \
+ : "d" (__val) \
+ : "cc"); \
+ __old; \
+})
+
+#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
+
#define __BITOPS_OR "ogr"
#define __BITOPS_AND "ngr"
#define __BITOPS_XOR "xgr"
-#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
+#define __BITOPS_LOOP(__addr, __val, __op_string) \
+({ \
+ unsigned long __old, __new; \
+ \
asm volatile( \
" lg %0,%2\n" \
"0: lgr %1,%0\n" \
: "=&d" (__old), "=&d" (__new), \
"=Q" (*(unsigned long *) __addr) \
: "d" (__val), "Q" (*(unsigned long *) __addr) \
- : "cc");
+ : "cc"); \
+ __old; \
+})
+
+#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
#endif /* CONFIG_64BIT */
*/
static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{
- unsigned long addr, old, new, mask;
+ unsigned long addr, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
/* make OR mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
- __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
+ __BITOPS_LOOP(addr, mask, __BITOPS_OR);
}
/*
*/
static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{
- unsigned long addr, old, new, mask;
+ unsigned long addr, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
/* make AND mask */
mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
/* Do the atomic update. */
- __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
+ __BITOPS_LOOP(addr, mask, __BITOPS_AND);
}
/*
*/
static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{
- unsigned long addr, old, new, mask;
+ unsigned long addr, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
/* make XOR mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
- __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
+ __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
}
/*
static inline int
test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{
- unsigned long addr, old, new, mask;
+ unsigned long addr, old, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
/* make OR/test mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
- __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
+ old = __BITOPS_LOOP(addr, mask, __BITOPS_OR);
barrier();
return (old & mask) != 0;
}
static inline int
test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{
- unsigned long addr, old, new, mask;
+ unsigned long addr, old, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
/* make AND/test mask */
mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
/* Do the atomic update. */
- __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
+ old = __BITOPS_LOOP(addr, mask, __BITOPS_AND);
barrier();
- return (old ^ new) != 0;
+ return (old & ~mask) != 0;
}
/*
static inline int
test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{
- unsigned long addr, old, new, mask;
+ unsigned long addr, old, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
/* make XOR/test mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
- __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
+ old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
barrier();
return (old & mask) != 0;
}