2 * atomic32.c: 32-bit atomic_t implementation
4 * Copyright (C) 2004 Keith M Wesolowski
5 * Copyright (C) 2007 Kyle McMartin
7 * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
10 #include <linux/atomic.h>
11 #include <linux/spinlock.h>
12 #include <linux/module.h>
15 #define ATOMIC_HASH_SIZE 4
16 #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
18 spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
19 [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
24 static DEFINE_SPINLOCK(dummy);
25 #define ATOMIC_HASH_SIZE 1
26 #define ATOMIC_HASH(a) (&dummy)
30 #define ATOMIC_OP(op, cop) \
31 int atomic_##op##_return(int i, atomic_t *v) \
34 unsigned long flags; \
35 spin_lock_irqsave(ATOMIC_HASH(v), flags); \
37 ret = (v->counter cop i); \
39 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
42 EXPORT_SYMBOL(atomic_##op##_return);
48 int atomic_xchg(atomic_t *v, int new)
53 spin_lock_irqsave(ATOMIC_HASH(v), flags);
56 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
59 EXPORT_SYMBOL(atomic_xchg);
61 int atomic_cmpxchg(atomic_t *v, int old, int new)
66 spin_lock_irqsave(ATOMIC_HASH(v), flags);
68 if (likely(ret == old))
71 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
74 EXPORT_SYMBOL(atomic_cmpxchg);
76 int __atomic_add_unless(atomic_t *v, int a, int u)
81 spin_lock_irqsave(ATOMIC_HASH(v), flags);
85 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
88 EXPORT_SYMBOL(__atomic_add_unless);
90 /* Atomic operations are already serializing */
91 void atomic_set(atomic_t *v, int i)
95 spin_lock_irqsave(ATOMIC_HASH(v), flags);
97 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
99 EXPORT_SYMBOL(atomic_set);
101 unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
103 unsigned long old, flags;
105 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
108 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
112 EXPORT_SYMBOL(___set_bit);
114 unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
116 unsigned long old, flags;
118 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
121 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
125 EXPORT_SYMBOL(___clear_bit);
127 unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
129 unsigned long old, flags;
131 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
134 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
138 EXPORT_SYMBOL(___change_bit);
140 unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
145 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
146 if ((prev = *ptr) == old)
148 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
150 return (unsigned long)prev;
152 EXPORT_SYMBOL(__cmpxchg_u32);
154 unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
159 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
162 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
164 return (unsigned long)prev;
166 EXPORT_SYMBOL(__xchg_u32);