1 #ifndef _ASM_WORD_AT_A_TIME_H
2 #define _ASM_WORD_AT_A_TIME_H
5 * Word-at-a-time interfaces for PowerPC.
8 #include <linux/kernel.h>
9 #include <asm/asm-compat.h>
13 struct word_at_a_time {
14 const unsigned long high_bits, low_bits;
17 #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0xfe) + 1, REPEAT_BYTE(0x7f) }
19 /* Bit set in the bytes that have a zero */
20 static inline long prep_zero_mask(unsigned long val, unsigned long rhs, const struct word_at_a_time *c)
22 unsigned long mask = (val & c->low_bits) + c->low_bits;
26 #define create_zero_mask(mask) (mask)
28 static inline long find_zero(unsigned long mask)
30 long leading_zero_bits;
32 asm (PPC_CNTLZL "%0,%1" : "=r" (leading_zero_bits) : "r" (mask));
33 return leading_zero_bits >> 3;
36 static inline bool has_zero(unsigned long val, unsigned long *data, const struct word_at_a_time *c)
38 unsigned long rhs = val | c->low_bits;
40 return (val + c->high_bits) & ~rhs;
45 struct word_at_a_time {
46 const unsigned long one_bits, high_bits;
49 #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
53 /* Alan Modra's little-endian strlen tail for 64-bit */
54 #define create_zero_mask(mask) (mask)
56 static inline unsigned long find_zero(unsigned long mask)
58 unsigned long leading_zero_bits;
59 long trailing_zero_bit_mask;
61 asm ("addi %1,%2,-1\n\t"
64 : "=r" (leading_zero_bits), "=&r" (trailing_zero_bit_mask)
66 return leading_zero_bits >> 3;
69 #else /* 32-bit case */
72 * This is largely generic for little-endian machines, but the
73 * optimal byte mask counting is probably going to be something
74 * that is architecture-specific. If you have a reliably fast
75 * bit count instruction, that might be better than the multiply
76 * and shift, for example.
79 /* Carl Chatfield / Jan Achrenius G+ version for 32-bit */
80 static inline long count_masked_bytes(long mask)
82 /* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
83 long a = (0x0ff0001+mask) >> 23;
84 /* Fix the 1 for 00 case */
88 static inline unsigned long create_zero_mask(unsigned long bits)
90 bits = (bits - 1) & ~bits;
94 static inline unsigned long find_zero(unsigned long mask)
96 return count_masked_bytes(mask);
101 /* Return nonzero if it has a zero */
102 static inline unsigned long has_zero(unsigned long a, unsigned long *bits, const struct word_at_a_time *c)
104 unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
109 static inline unsigned long prep_zero_mask(unsigned long a, unsigned long bits, const struct word_at_a_time *c)
114 /* The mask we created is directly usable as a bytemask */
115 #define zero_bytemask(mask) (mask)
120 * We use load_unaligned_zero() in a selftest, which builds a userspace
121 * program. Some linker scripts seem to discard the .fixup section, so allow
122 * the test code to use a different section name.
124 #ifndef FIXUP_SECTION
125 #define FIXUP_SECTION ".fixup"
128 static inline unsigned long load_unaligned_zeropad(const void *addr)
130 unsigned long ret, offset, tmp;
133 "1: " PPC_LL "%[ret], 0(%[addr])\n"
135 ".section " FIXUP_SECTION ",\"ax\"\n"
138 "clrrdi %[tmp], %[addr], 3\n\t"
139 "clrlsldi %[offset], %[addr], 61, 3\n\t"
140 "ld %[ret], 0(%[tmp])\n\t"
141 #ifdef __BIG_ENDIAN__
142 "sld %[ret], %[ret], %[offset]\n\t"
144 "srd %[ret], %[ret], %[offset]\n\t"
147 "clrrwi %[tmp], %[addr], 2\n\t"
148 "clrlslwi %[offset], %[addr], 30, 3\n\t"
149 "lwz %[ret], 0(%[tmp])\n\t"
150 #ifdef __BIG_ENDIAN__
151 "slw %[ret], %[ret], %[offset]\n\t"
153 "srw %[ret], %[ret], %[offset]\n\t"
158 ".section __ex_table,\"a\"\n\t"
159 PPC_LONG_ALIGN "\n\t"
162 : [tmp] "=&b" (tmp), [offset] "=&r" (offset), [ret] "=&r" (ret)
163 : [addr] "b" (addr), "m" (*(unsigned long *)addr));
170 #endif /* _ASM_WORD_AT_A_TIME_H */