Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef _LINUX_BITOPS_H |
| 2 | #define _LINUX_BITOPS_H |
| 3 | #include <asm/types.h> |
| 4 | |
| 5 | /* |
| 6 | * ffs: find first bit set. This is defined the same way as |
| 7 | * the libc and compiler builtin ffs routines, therefore |
| 8 | * differs in spirit from the above ffz (man ffs). |
| 9 | */ |
| 10 | |
| 11 | static inline int generic_ffs(int x) |
| 12 | { |
| 13 | int r = 1; |
| 14 | |
| 15 | if (!x) |
| 16 | return 0; |
| 17 | if (!(x & 0xffff)) { |
| 18 | x >>= 16; |
| 19 | r += 16; |
| 20 | } |
| 21 | if (!(x & 0xff)) { |
| 22 | x >>= 8; |
| 23 | r += 8; |
| 24 | } |
| 25 | if (!(x & 0xf)) { |
| 26 | x >>= 4; |
| 27 | r += 4; |
| 28 | } |
| 29 | if (!(x & 3)) { |
| 30 | x >>= 2; |
| 31 | r += 2; |
| 32 | } |
| 33 | if (!(x & 1)) { |
| 34 | x >>= 1; |
| 35 | r += 1; |
| 36 | } |
| 37 | return r; |
| 38 | } |
| 39 | |
| 40 | /* |
| 41 | * fls: find last bit set. |
| 42 | */ |
| 43 | |
| 44 | static __inline__ int generic_fls(int x) |
| 45 | { |
| 46 | int r = 32; |
| 47 | |
| 48 | if (!x) |
| 49 | return 0; |
| 50 | if (!(x & 0xffff0000u)) { |
| 51 | x <<= 16; |
| 52 | r -= 16; |
| 53 | } |
| 54 | if (!(x & 0xff000000u)) { |
| 55 | x <<= 8; |
| 56 | r -= 8; |
| 57 | } |
| 58 | if (!(x & 0xf0000000u)) { |
| 59 | x <<= 4; |
| 60 | r -= 4; |
| 61 | } |
| 62 | if (!(x & 0xc0000000u)) { |
| 63 | x <<= 2; |
| 64 | r -= 2; |
| 65 | } |
| 66 | if (!(x & 0x80000000u)) { |
| 67 | x <<= 1; |
| 68 | r -= 1; |
| 69 | } |
| 70 | return r; |
| 71 | } |
| 72 | |
| 73 | /* |
| 74 | * Include this here because some architectures need generic_ffs/fls in |
| 75 | * scope |
| 76 | */ |
| 77 | #include <asm/bitops.h> |
| 78 | |
Stephen Hemminger | 3821af2 | 2005-12-21 19:30:53 -0800 | [diff] [blame] | 79 | |
| 80 | static inline int generic_fls64(__u64 x) |
| 81 | { |
| 82 | __u32 h = x >> 32; |
| 83 | if (h) |
| 84 | return fls(x) + 32; |
| 85 | return fls(x); |
| 86 | } |
| 87 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 88 | static __inline__ int get_bitmask_order(unsigned int count) |
| 89 | { |
| 90 | int order; |
| 91 | |
| 92 | order = fls(count); |
| 93 | return order; /* We could be slightly more clever with -1 here... */ |
| 94 | } |
| 95 | |
Siddha, Suresh B | 94605ef | 2005-11-05 17:25:54 +0100 | [diff] [blame] | 96 | static __inline__ int get_count_order(unsigned int count) |
| 97 | { |
| 98 | int order; |
| 99 | |
| 100 | order = fls(count) - 1; |
| 101 | if (count & (count - 1)) |
| 102 | order++; |
| 103 | return order; |
| 104 | } |
| 105 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 106 | /* |
| 107 | * hweightN: returns the hamming weight (i.e. the number |
| 108 | * of bits set) of a N-bit word |
| 109 | */ |
| 110 | |
| 111 | static inline unsigned int generic_hweight32(unsigned int w) |
| 112 | { |
| 113 | unsigned int res = (w & 0x55555555) + ((w >> 1) & 0x55555555); |
| 114 | res = (res & 0x33333333) + ((res >> 2) & 0x33333333); |
| 115 | res = (res & 0x0F0F0F0F) + ((res >> 4) & 0x0F0F0F0F); |
| 116 | res = (res & 0x00FF00FF) + ((res >> 8) & 0x00FF00FF); |
| 117 | return (res & 0x0000FFFF) + ((res >> 16) & 0x0000FFFF); |
| 118 | } |
| 119 | |
| 120 | static inline unsigned int generic_hweight16(unsigned int w) |
| 121 | { |
| 122 | unsigned int res = (w & 0x5555) + ((w >> 1) & 0x5555); |
| 123 | res = (res & 0x3333) + ((res >> 2) & 0x3333); |
| 124 | res = (res & 0x0F0F) + ((res >> 4) & 0x0F0F); |
| 125 | return (res & 0x00FF) + ((res >> 8) & 0x00FF); |
| 126 | } |
| 127 | |
| 128 | static inline unsigned int generic_hweight8(unsigned int w) |
| 129 | { |
| 130 | unsigned int res = (w & 0x55) + ((w >> 1) & 0x55); |
| 131 | res = (res & 0x33) + ((res >> 2) & 0x33); |
| 132 | return (res & 0x0F) + ((res >> 4) & 0x0F); |
| 133 | } |
| 134 | |
| 135 | static inline unsigned long generic_hweight64(__u64 w) |
| 136 | { |
| 137 | #if BITS_PER_LONG < 64 |
| 138 | return generic_hweight32((unsigned int)(w >> 32)) + |
| 139 | generic_hweight32((unsigned int)w); |
| 140 | #else |
| 141 | u64 res; |
| 142 | res = (w & 0x5555555555555555ul) + ((w >> 1) & 0x5555555555555555ul); |
| 143 | res = (res & 0x3333333333333333ul) + ((res >> 2) & 0x3333333333333333ul); |
| 144 | res = (res & 0x0F0F0F0F0F0F0F0Ful) + ((res >> 4) & 0x0F0F0F0F0F0F0F0Ful); |
| 145 | res = (res & 0x00FF00FF00FF00FFul) + ((res >> 8) & 0x00FF00FF00FF00FFul); |
| 146 | res = (res & 0x0000FFFF0000FFFFul) + ((res >> 16) & 0x0000FFFF0000FFFFul); |
| 147 | return (res & 0x00000000FFFFFFFFul) + ((res >> 32) & 0x00000000FFFFFFFFul); |
| 148 | #endif |
| 149 | } |
| 150 | |
| 151 | static inline unsigned long hweight_long(unsigned long w) |
| 152 | { |
| 153 | return sizeof(w) == 4 ? generic_hweight32(w) : generic_hweight64(w); |
| 154 | } |
| 155 | |
| 156 | /* |
| 157 | * rol32 - rotate a 32-bit value left |
| 158 | * |
| 159 | * @word: value to rotate |
| 160 | * @shift: bits to roll |
| 161 | */ |
| 162 | static inline __u32 rol32(__u32 word, unsigned int shift) |
| 163 | { |
| 164 | return (word << shift) | (word >> (32 - shift)); |
| 165 | } |
| 166 | |
| 167 | /* |
| 168 | * ror32 - rotate a 32-bit value right |
| 169 | * |
| 170 | * @word: value to rotate |
| 171 | * @shift: bits to roll |
| 172 | */ |
| 173 | static inline __u32 ror32(__u32 word, unsigned int shift) |
| 174 | { |
| 175 | return (word >> shift) | (word << (32 - shift)); |
| 176 | } |
| 177 | |
| 178 | #endif |