2 * atomic32.c: 32-bit atomic_t implementation
4 * Copyright (C) 2004 Keith M Wesolowski
5 * Copyright (C) 2007 Kyle McMartin
7 * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
10 #include <linux/atomic.h>
11 #include <linux/spinlock.h>
12 #include <linux/module.h>
15 #define ATOMIC_HASH_SIZE 4
16 #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
18 spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
19 [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
24 static DEFINE_SPINLOCK(dummy);
25 #define ATOMIC_HASH_SIZE 1
26 #define ATOMIC_HASH(a) (&dummy)
30 #define ATOMIC_FETCH_OP(op, c_op) \
31 int atomic_fetch_##op(int i, atomic_t *v) \
34 unsigned long flags; \
35 spin_lock_irqsave(ATOMIC_HASH(v), flags); \
40 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
43 EXPORT_SYMBOL(atomic_fetch_##op);
45 #define ATOMIC_OP_RETURN(op, c_op) \
46 int atomic_##op##_return(int i, atomic_t *v) \
49 unsigned long flags; \
50 spin_lock_irqsave(ATOMIC_HASH(v), flags); \
52 ret = (v->counter c_op i); \
54 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
57 EXPORT_SYMBOL(atomic_##op##_return);
59 ATOMIC_OP_RETURN(add, +=)
61 ATOMIC_FETCH_OP(add, +=)
62 ATOMIC_FETCH_OP(and, &=)
63 ATOMIC_FETCH_OP(or, |=)
64 ATOMIC_FETCH_OP(xor, ^=)
66 #undef ATOMIC_FETCH_OP
67 #undef ATOMIC_OP_RETURN
69 int atomic_xchg(atomic_t *v, int new)
74 spin_lock_irqsave(ATOMIC_HASH(v), flags);
77 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
80 EXPORT_SYMBOL(atomic_xchg);
82 int atomic_cmpxchg(atomic_t *v, int old, int new)
87 spin_lock_irqsave(ATOMIC_HASH(v), flags);
89 if (likely(ret == old))
92 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
95 EXPORT_SYMBOL(atomic_cmpxchg);
97 int __atomic_add_unless(atomic_t *v, int a, int u)
102 spin_lock_irqsave(ATOMIC_HASH(v), flags);
106 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
109 EXPORT_SYMBOL(__atomic_add_unless);
111 /* Atomic operations are already serializing */
112 void atomic_set(atomic_t *v, int i)
116 spin_lock_irqsave(ATOMIC_HASH(v), flags);
118 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
120 EXPORT_SYMBOL(atomic_set);
122 unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
124 unsigned long old, flags;
126 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
129 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
133 EXPORT_SYMBOL(___set_bit);
135 unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
137 unsigned long old, flags;
139 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
142 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
146 EXPORT_SYMBOL(___clear_bit);
148 unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
150 unsigned long old, flags;
152 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
155 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
159 EXPORT_SYMBOL(___change_bit);
161 unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
166 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
167 if ((prev = *ptr) == old)
169 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
171 return (unsigned long)prev;
173 EXPORT_SYMBOL(__cmpxchg_u32);
175 unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
180 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
183 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
185 return (unsigned long)prev;
187 EXPORT_SYMBOL(__xchg_u32);