Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef _LINUX_BITOPS_H |
| 2 | #define _LINUX_BITOPS_H |
| 3 | #include <asm/types.h> |
Will Deacon | b995196 | 2018-06-19 13:53:08 +0100 | [diff] [blame] | 4 | #include <linux/bits.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 5 | |
Eric Dumazet | ede9c69 | 2008-04-29 00:58:35 -0700 | [diff] [blame] | 6 | #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long)) |
Chen, Gong | 10ef6b0 | 2013-10-18 14:29:07 -0700 | [diff] [blame] | 7 | |
Borislav Petkov | 4677d4a | 2010-05-03 14:57:11 +0200 | [diff] [blame] | 8 | extern unsigned int __sw_hweight8(unsigned int w); |
| 9 | extern unsigned int __sw_hweight16(unsigned int w); |
| 10 | extern unsigned int __sw_hweight32(unsigned int w); |
| 11 | extern unsigned long __sw_hweight64(__u64 w); |
| 12 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 13 | /* |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 14 | * Include this here because some architectures need generic_ffs/fls in |
| 15 | * scope |
| 16 | */ |
| 17 | #include <asm/bitops.h> |
| 18 | |
Akinobu Mita | 984b3f5 | 2010-03-05 13:41:37 -0800 | [diff] [blame] | 19 | #define for_each_set_bit(bit, addr, size) \ |
Robert Richter | 1e2ad28 | 2011-11-18 12:35:21 +0100 | [diff] [blame] | 20 | for ((bit) = find_first_bit((addr), (size)); \ |
| 21 | (bit) < (size); \ |
| 22 | (bit) = find_next_bit((addr), (size), (bit) + 1)) |
| 23 | |
| 24 | /* same as for_each_set_bit() but use bit as value to start with */ |
Akinobu Mita | 307b1cd | 2012-03-23 15:02:03 -0700 | [diff] [blame] | 25 | #define for_each_set_bit_from(bit, addr, size) \ |
Robert Richter | 1e2ad28 | 2011-11-18 12:35:21 +0100 | [diff] [blame] | 26 | for ((bit) = find_next_bit((addr), (size), (bit)); \ |
| 27 | (bit) < (size); \ |
Shannon Nelson | 3e03745 | 2007-10-16 01:27:40 -0700 | [diff] [blame] | 28 | (bit) = find_next_bit((addr), (size), (bit) + 1)) |
| 29 | |
Akinobu Mita | 03f4a82 | 2012-03-23 15:02:04 -0700 | [diff] [blame] | 30 | #define for_each_clear_bit(bit, addr, size) \ |
| 31 | for ((bit) = find_first_zero_bit((addr), (size)); \ |
| 32 | (bit) < (size); \ |
| 33 | (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) |
| 34 | |
| 35 | /* same as for_each_clear_bit() but use bit as value to start with */ |
| 36 | #define for_each_clear_bit_from(bit, addr, size) \ |
| 37 | for ((bit) = find_next_zero_bit((addr), (size), (bit)); \ |
| 38 | (bit) < (size); \ |
| 39 | (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) |
| 40 | |
Denys Vlasenko | 1a1d48a | 2015-08-04 16:15:14 +0200 | [diff] [blame] | 41 | static inline int get_bitmask_order(unsigned int count) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 42 | { |
| 43 | int order; |
Peter Zijlstra | 9f41699 | 2010-01-22 15:59:29 +0100 | [diff] [blame] | 44 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 45 | order = fls(count); |
| 46 | return order; /* We could be slightly more clever with -1 here... */ |
| 47 | } |
| 48 | |
Denys Vlasenko | 1a1d48a | 2015-08-04 16:15:14 +0200 | [diff] [blame] | 49 | static __always_inline unsigned long hweight_long(unsigned long w) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 50 | { |
Akinobu Mita | e9bebd6 | 2006-03-26 01:39:55 -0800 | [diff] [blame] | 51 | return sizeof(w) == 4 ? hweight32(w) : hweight64(w); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 52 | } |
| 53 | |
Robert P. J. Day | 45f8bde | 2007-01-26 00:57:09 -0800 | [diff] [blame] | 54 | /** |
Alexey Dobriyan | f2ea0f5 | 2012-01-14 21:44:49 +0300 | [diff] [blame] | 55 | * rol64 - rotate a 64-bit value left |
| 56 | * @word: value to rotate |
| 57 | * @shift: bits to roll |
| 58 | */ |
| 59 | static inline __u64 rol64(__u64 word, unsigned int shift) |
| 60 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 61 | return (word << (shift & 63)) | (word >> ((-shift) & 63)); |
Alexey Dobriyan | f2ea0f5 | 2012-01-14 21:44:49 +0300 | [diff] [blame] | 62 | } |
| 63 | |
| 64 | /** |
| 65 | * ror64 - rotate a 64-bit value right |
| 66 | * @word: value to rotate |
| 67 | * @shift: bits to roll |
| 68 | */ |
| 69 | static inline __u64 ror64(__u64 word, unsigned int shift) |
| 70 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 71 | return (word >> (shift & 63)) | (word << ((-shift) & 63)); |
Alexey Dobriyan | f2ea0f5 | 2012-01-14 21:44:49 +0300 | [diff] [blame] | 72 | } |
| 73 | |
| 74 | /** |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 75 | * rol32 - rotate a 32-bit value left |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 76 | * @word: value to rotate |
| 77 | * @shift: bits to roll |
| 78 | */ |
| 79 | static inline __u32 rol32(__u32 word, unsigned int shift) |
| 80 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 81 | return (word << (shift & 31)) | (word >> ((-shift) & 31)); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 82 | } |
| 83 | |
Robert P. J. Day | 45f8bde | 2007-01-26 00:57:09 -0800 | [diff] [blame] | 84 | /** |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 85 | * ror32 - rotate a 32-bit value right |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 86 | * @word: value to rotate |
| 87 | * @shift: bits to roll |
| 88 | */ |
| 89 | static inline __u32 ror32(__u32 word, unsigned int shift) |
| 90 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 91 | return (word >> (shift & 31)) | (word << ((-shift) & 31)); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 92 | } |
| 93 | |
Harvey Harrison | 3afe392 | 2008-03-28 14:16:01 -0700 | [diff] [blame] | 94 | /** |
| 95 | * rol16 - rotate a 16-bit value left |
| 96 | * @word: value to rotate |
| 97 | * @shift: bits to roll |
| 98 | */ |
| 99 | static inline __u16 rol16(__u16 word, unsigned int shift) |
| 100 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 101 | return (word << (shift & 15)) | (word >> ((-shift) & 15)); |
Harvey Harrison | 3afe392 | 2008-03-28 14:16:01 -0700 | [diff] [blame] | 102 | } |
| 103 | |
| 104 | /** |
| 105 | * ror16 - rotate a 16-bit value right |
| 106 | * @word: value to rotate |
| 107 | * @shift: bits to roll |
| 108 | */ |
| 109 | static inline __u16 ror16(__u16 word, unsigned int shift) |
| 110 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 111 | return (word >> (shift & 15)) | (word << ((-shift) & 15)); |
Harvey Harrison | 3afe392 | 2008-03-28 14:16:01 -0700 | [diff] [blame] | 112 | } |
| 113 | |
| 114 | /** |
| 115 | * rol8 - rotate an 8-bit value left |
| 116 | * @word: value to rotate |
| 117 | * @shift: bits to roll |
| 118 | */ |
| 119 | static inline __u8 rol8(__u8 word, unsigned int shift) |
| 120 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 121 | return (word << (shift & 7)) | (word >> ((-shift) & 7)); |
Harvey Harrison | 3afe392 | 2008-03-28 14:16:01 -0700 | [diff] [blame] | 122 | } |
| 123 | |
| 124 | /** |
| 125 | * ror8 - rotate an 8-bit value right |
| 126 | * @word: value to rotate |
| 127 | * @shift: bits to roll |
| 128 | */ |
| 129 | static inline __u8 ror8(__u8 word, unsigned int shift) |
| 130 | { |
Rasmus Villemoes | e186b19 | 2019-05-14 15:43:27 -0700 | [diff] [blame] | 131 | return (word >> (shift & 7)) | (word << ((-shift) & 7)); |
Harvey Harrison | 3afe392 | 2008-03-28 14:16:01 -0700 | [diff] [blame] | 132 | } |
| 133 | |
Andreas Herrmann | 7919a57 | 2010-08-30 19:04:01 +0000 | [diff] [blame] | 134 | /** |
| 135 | * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit |
| 136 | * @value: value to sign extend |
| 137 | * @index: 0 based bit index (0<=index<32) to sign bit |
Martin Kepplinger | e2eb53a | 2015-11-06 16:30:58 -0800 | [diff] [blame] | 138 | * |
| 139 | * This is safe to use for 16- and 8-bit types as well. |
Andreas Herrmann | 7919a57 | 2010-08-30 19:04:01 +0000 | [diff] [blame] | 140 | */ |
| 141 | static inline __s32 sign_extend32(__u32 value, int index) |
| 142 | { |
| 143 | __u8 shift = 31 - index; |
| 144 | return (__s32)(value << shift) >> shift; |
| 145 | } |
| 146 | |
Martin Kepplinger | 48e203e | 2015-11-06 16:31:02 -0800 | [diff] [blame] | 147 | /** |
| 148 | * sign_extend64 - sign extend a 64-bit value using specified bit as sign-bit |
| 149 | * @value: value to sign extend |
| 150 | * @index: 0 based bit index (0<=index<64) to sign bit |
| 151 | */ |
| 152 | static inline __s64 sign_extend64(__u64 value, int index) |
| 153 | { |
| 154 | __u8 shift = 63 - index; |
| 155 | return (__s64)(value << shift) >> shift; |
| 156 | } |
| 157 | |
Andrew Morton | 962749a | 2006-03-25 03:08:01 -0800 | [diff] [blame] | 158 | static inline unsigned fls_long(unsigned long l) |
| 159 | { |
| 160 | if (sizeof(l) == 4) |
| 161 | return fls(l); |
| 162 | return fls64(l); |
| 163 | } |
| 164 | |
zijun_hu | 252e5c6 | 2016-10-07 16:57:26 -0700 | [diff] [blame] | 165 | static inline int get_count_order(unsigned int count) |
| 166 | { |
| 167 | int order; |
| 168 | |
| 169 | order = fls(count) - 1; |
| 170 | if (count & (count - 1)) |
| 171 | order++; |
| 172 | return order; |
| 173 | } |
| 174 | |
| 175 | /** |
| 176 | * get_count_order_long - get order after rounding @l up to power of 2 |
| 177 | * @l: parameter |
| 178 | * |
| 179 | * it is same as get_count_order() but with long type parameter |
| 180 | */ |
| 181 | static inline int get_count_order_long(unsigned long l) |
| 182 | { |
| 183 | if (l == 0UL) |
| 184 | return -1; |
| 185 | else if (l & (l - 1UL)) |
| 186 | return (int)fls_long(l); |
| 187 | else |
| 188 | return (int)fls_long(l) - 1; |
| 189 | } |
| 190 | |
Steven Whitehouse | 952043a | 2009-04-23 08:48:15 +0100 | [diff] [blame] | 191 | /** |
| 192 | * __ffs64 - find first set bit in a 64 bit word |
| 193 | * @word: The 64 bit word |
| 194 | * |
| 195 | * On 64 bit arches this is a synomyn for __ffs |
| 196 | * The result is not defined if no bits are set, so check that @word |
| 197 | * is non-zero before calling this. |
| 198 | */ |
| 199 | static inline unsigned long __ffs64(u64 word) |
| 200 | { |
| 201 | #if BITS_PER_LONG == 32 |
| 202 | if (((u32)word) == 0UL) |
| 203 | return __ffs((u32)(word >> 32)) + 32; |
| 204 | #elif BITS_PER_LONG != 64 |
| 205 | #error BITS_PER_LONG not 32 or 64 |
| 206 | #endif |
| 207 | return __ffs((unsigned long)word); |
| 208 | } |
| 209 | |
Alexander van Heukelum | 64970b6 | 2008-03-11 16:17:19 +0100 | [diff] [blame] | 210 | #ifdef __KERNEL__ |
Alexander van Heukelum | 77b9bd9 | 2008-04-01 11:46:19 +0200 | [diff] [blame] | 211 | |
Theodore Ts'o | 00a1a05 | 2014-03-30 10:20:01 -0400 | [diff] [blame] | 212 | #ifndef set_mask_bits |
| 213 | #define set_mask_bits(ptr, _mask, _bits) \ |
| 214 | ({ \ |
| 215 | const typeof(*ptr) mask = (_mask), bits = (_bits); \ |
| 216 | typeof(*ptr) old, new; \ |
| 217 | \ |
| 218 | do { \ |
| 219 | old = ACCESS_ONCE(*ptr); \ |
| 220 | new = (old & ~mask) | bits; \ |
| 221 | } while (cmpxchg(ptr, old, new) != old); \ |
| 222 | \ |
| 223 | new; \ |
| 224 | }) |
| 225 | #endif |
| 226 | |
Guoqing Jiang | 85ad1d1 | 2016-05-03 22:22:13 -0400 | [diff] [blame] | 227 | #ifndef bit_clear_unless |
| 228 | #define bit_clear_unless(ptr, _clear, _test) \ |
| 229 | ({ \ |
| 230 | const typeof(*ptr) clear = (_clear), test = (_test); \ |
| 231 | typeof(*ptr) old, new; \ |
| 232 | \ |
| 233 | do { \ |
| 234 | old = ACCESS_ONCE(*ptr); \ |
| 235 | new = old & ~clear; \ |
| 236 | } while (!(old & test) && \ |
| 237 | cmpxchg(ptr, old, new) != old); \ |
| 238 | \ |
| 239 | !(old & test); \ |
| 240 | }) |
| 241 | #endif |
| 242 | |
Akinobu Mita | 19de85e | 2011-05-26 16:26:09 -0700 | [diff] [blame] | 243 | #ifndef find_last_bit |
Rusty Russell | ab53d47 | 2009-01-01 10:12:19 +1030 | [diff] [blame] | 244 | /** |
| 245 | * find_last_bit - find the last set bit in a memory region |
| 246 | * @addr: The address to start the search at |
Yury Norov | 2c57a0e | 2015-04-16 12:43:13 -0700 | [diff] [blame] | 247 | * @size: The number of bits to search |
Rusty Russell | ab53d47 | 2009-01-01 10:12:19 +1030 | [diff] [blame] | 248 | * |
Yury Norov | 2c57a0e | 2015-04-16 12:43:13 -0700 | [diff] [blame] | 249 | * Returns the bit number of the last set bit, or size. |
Rusty Russell | ab53d47 | 2009-01-01 10:12:19 +1030 | [diff] [blame] | 250 | */ |
| 251 | extern unsigned long find_last_bit(const unsigned long *addr, |
| 252 | unsigned long size); |
Akinobu Mita | 19de85e | 2011-05-26 16:26:09 -0700 | [diff] [blame] | 253 | #endif |
Rusty Russell | ab53d47 | 2009-01-01 10:12:19 +1030 | [diff] [blame] | 254 | |
Alexander van Heukelum | 64970b6 | 2008-03-11 16:17:19 +0100 | [diff] [blame] | 255 | #endif /* __KERNEL__ */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 256 | #endif |