2 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
20 #include <linux/types.h>
21 #include <linux/compiler.h>
24 * Hardware assisted read-modify-write using ARC700 LLOCK/SCOND insns.
25 * The Kconfig glue ensures that in SMP, this is only set if the container
26 * SoC/platform has cross-core coherent LLOCK/SCOND
28 #if defined(CONFIG_ARC_HAS_LLSC)
30 static inline void set_bit(unsigned long nr, volatile unsigned long *m)
36 if (__builtin_constant_p(nr))
40 "1: llock %0, [%1] \n"
49 static inline void clear_bit(unsigned long nr, volatile unsigned long *m)
55 if (__builtin_constant_p(nr))
59 "1: llock %0, [%1] \n"
68 static inline void change_bit(unsigned long nr, volatile unsigned long *m)
74 if (__builtin_constant_p(nr))
78 "1: llock %0, [%1] \n"
91 * set it and return 0 (old value)
93 * return 1 (old value).
95 * Since ARC lacks a equivalent h/w primitive, the bit is set unconditionally
96 * and the old value of bit is returned
98 static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m)
100 unsigned long old, temp;
104 if (__builtin_constant_p(nr))
107 __asm__ __volatile__(
108 "1: llock %0, [%2] \n"
109 " bset %1, %0, %3 \n"
112 : "=&r"(old), "=&r"(temp)
116 return (old & (1 << nr)) != 0;
120 test_and_clear_bit(unsigned long nr, volatile unsigned long *m)
122 unsigned int old, temp;
126 if (__builtin_constant_p(nr))
129 __asm__ __volatile__(
130 "1: llock %0, [%2] \n"
131 " bclr %1, %0, %3 \n"
134 : "=&r"(old), "=&r"(temp)
138 return (old & (1 << nr)) != 0;
142 test_and_change_bit(unsigned long nr, volatile unsigned long *m)
144 unsigned int old, temp;
148 if (__builtin_constant_p(nr))
151 __asm__ __volatile__(
152 "1: llock %0, [%2] \n"
153 " bxor %1, %0, %3 \n"
156 : "=&r"(old), "=&r"(temp)
160 return (old & (1 << nr)) != 0;
163 #else /* !CONFIG_ARC_HAS_LLSC */
168 * Non hardware assisted Atomic-R-M-W
169 * Locking would change to irq-disabling only (UP) and spinlocks (SMP)
171 * There's "significant" micro-optimization in writing our own variants of
172 * bitops (over generic variants)
174 * (1) The generic APIs have "signed" @nr while we have it "unsigned"
175 * This avoids extra code to be generated for pointer arithmatic, since
176 * is "not sure" that index is NOT -ve
177 * (2) Utilize the fact that ARCompact bit fidding insn (BSET/BCLR/ASL) etc
178 * only consider bottom 5 bits of @nr, so NO need to mask them off.
179 * (GCC Quirk: however for constant @nr we still need to do the masking
183 static inline void set_bit(unsigned long nr, volatile unsigned long *m)
185 unsigned long temp, flags;
188 if (__builtin_constant_p(nr))
194 *m = temp | (1UL << nr);
196 bitops_unlock(flags);
199 static inline void clear_bit(unsigned long nr, volatile unsigned long *m)
201 unsigned long temp, flags;
204 if (__builtin_constant_p(nr))
210 *m = temp & ~(1UL << nr);
212 bitops_unlock(flags);
215 static inline void change_bit(unsigned long nr, volatile unsigned long *m)
217 unsigned long temp, flags;
220 if (__builtin_constant_p(nr))
226 *m = temp ^ (1UL << nr);
228 bitops_unlock(flags);
231 static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m)
233 unsigned long old, flags;
236 if (__builtin_constant_p(nr))
242 *m = old | (1 << nr);
244 bitops_unlock(flags);
246 return (old & (1 << nr)) != 0;
250 test_and_clear_bit(unsigned long nr, volatile unsigned long *m)
252 unsigned long old, flags;
255 if (__builtin_constant_p(nr))
261 *m = old & ~(1 << nr);
263 bitops_unlock(flags);
265 return (old & (1 << nr)) != 0;
269 test_and_change_bit(unsigned long nr, volatile unsigned long *m)
271 unsigned long old, flags;
274 if (__builtin_constant_p(nr))
280 *m = old ^ (1 << nr);
282 bitops_unlock(flags);
284 return (old & (1 << nr)) != 0;
287 #endif /* CONFIG_ARC_HAS_LLSC */
289 /***************************************
290 * Non atomic variants
291 **************************************/
293 static inline void __set_bit(unsigned long nr, volatile unsigned long *m)
298 if (__builtin_constant_p(nr))
302 *m = temp | (1UL << nr);
305 static inline void __clear_bit(unsigned long nr, volatile unsigned long *m)
310 if (__builtin_constant_p(nr))
314 *m = temp & ~(1UL << nr);
317 static inline void __change_bit(unsigned long nr, volatile unsigned long *m)
322 if (__builtin_constant_p(nr))
326 *m = temp ^ (1UL << nr);
330 __test_and_set_bit(unsigned long nr, volatile unsigned long *m)
335 if (__builtin_constant_p(nr))
339 *m = old | (1 << nr);
341 return (old & (1 << nr)) != 0;
345 __test_and_clear_bit(unsigned long nr, volatile unsigned long *m)
350 if (__builtin_constant_p(nr))
354 *m = old & ~(1 << nr);
356 return (old & (1 << nr)) != 0;
360 __test_and_change_bit(unsigned long nr, volatile unsigned long *m)
365 if (__builtin_constant_p(nr))
369 *m = old ^ (1 << nr);
371 return (old & (1 << nr)) != 0;
375 * This routine doesn't need to be atomic.
378 __constant_test_bit(unsigned int nr, const volatile unsigned long *addr)
380 return ((1UL << (nr & 31)) &
381 (((const volatile unsigned int *)addr)[nr >> 5])) != 0;
385 __test_bit(unsigned int nr, const volatile unsigned long *addr)
391 /* ARC700 only considers 5 bits in bit-fiddling insn */
394 return ((mask & *addr) != 0);
397 #define test_bit(nr, addr) (__builtin_constant_p(nr) ? \
398 __constant_test_bit((nr), (addr)) : \
399 __test_bit((nr), (addr)))
402 * Count the number of zeros, starting from MSB
403 * Helper for fls( ) friends
404 * This is a pure count, so (1-32) or (0-31) doesn't apply
405 * It could be 0 to 32, based on num of 0's in there
406 * clz(0x8000_0000) = 0, clz(0xFFFF_FFFF)=0, clz(0) = 32, clz(1) = 31
408 static inline __attribute__ ((const)) int clz(unsigned int x)
412 __asm__ __volatile__(
415 " add.p %0, %0, 1 \n"
423 static inline int constant_fls(int x)
429 if (!(x & 0xffff0000u)) {
433 if (!(x & 0xff000000u)) {
437 if (!(x & 0xf0000000u)) {
441 if (!(x & 0xc0000000u)) {
445 if (!(x & 0x80000000u)) {
453 * fls = Find Last Set in word
455 * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
457 static inline __attribute__ ((const)) int fls(unsigned long x)
459 if (__builtin_constant_p(x))
460 return constant_fls(x);
466 * __fls: Similar to fls, but zero based (0-31)
468 static inline __attribute__ ((const)) int __fls(unsigned long x)
477 * ffs = Find First Set in word (LSB to MSB)
478 * @result: [1-32], 0 if all 0's
480 #define ffs(x) ({ unsigned long __t = (x); fls(__t & -__t); })
483 * __ffs: Similar to ffs, but zero based (0-31)
485 static inline __attribute__ ((const)) int __ffs(unsigned long word)
490 return ffs(word) - 1;
494 * ffz = Find First Zero in word.
495 * @return:[0-31], 32 if all 1's
497 #define ffz(x) __ffs(~(x))
499 /* TODO does this affect uni-processor code */
500 #define smp_mb__before_clear_bit() barrier()
501 #define smp_mb__after_clear_bit() barrier()
503 #include <asm-generic/bitops/hweight.h>
504 #include <asm-generic/bitops/fls64.h>
505 #include <asm-generic/bitops/sched.h>
506 #include <asm-generic/bitops/lock.h>
508 #include <asm-generic/bitops/find.h>
509 #include <asm-generic/bitops/le.h>
510 #include <asm-generic/bitops/ext2-atomic-setbit.h>
512 #endif /* !__ASSEMBLY__ */
514 #endif /* __KERNEL__ */