bitops.h 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _LINUX_BITOPS_H
  3. #define _LINUX_BITOPS_H
  4. #include <asm/types.h>
  5. #include <linux/bits.h>
  6. #include <linux/typecheck.h>
  7. #include <uapi/linux/kernel.h>
  8. /* Set bits in the first 'n' bytes when loaded from memory */
  9. #ifdef __LITTLE_ENDIAN
  10. # define aligned_byte_mask(n) ((1UL << 8*(n))-1)
  11. #else
  12. # define aligned_byte_mask(n) (~0xffUL << (BITS_PER_LONG - 8 - 8*(n)))
  13. #endif
  14. #define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE)
  15. #define BITS_TO_LONGS(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(long))
  16. #define BITS_TO_U64(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(u64))
  17. #define BITS_TO_U32(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(u32))
  18. #define BITS_TO_BYTES(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(char))
  19. extern unsigned int __sw_hweight8(unsigned int w);
  20. extern unsigned int __sw_hweight16(unsigned int w);
  21. extern unsigned int __sw_hweight32(unsigned int w);
  22. extern unsigned long __sw_hweight64(__u64 w);
  23. /*
  24. * Defined here because those may be needed by architecture-specific static
  25. * inlines.
  26. */
  27. #include <asm-generic/bitops/generic-non-atomic.h>
  28. /*
  29. * Many architecture-specific non-atomic bitops contain inline asm code and due
  30. * to that the compiler can't optimize them to compile-time expressions or
  31. * constants. In contrary, generic_*() helpers are defined in pure C and
  32. * compilers optimize them just well.
  33. * Therefore, to make `unsigned long foo = 0; __set_bit(BAR, &foo)` effectively
  34. * equal to `unsigned long foo = BIT(BAR)`, pick the generic C alternative when
  35. * the arguments can be resolved at compile time. That expression itself is a
  36. * constant and doesn't bring any functional changes to the rest of cases.
  37. * The casts to `uintptr_t` are needed to mitigate `-Waddress` warnings when
  38. * passing a bitmap from .bss or .data (-> `!!addr` is always true).
  39. */
  40. #define bitop(op, nr, addr) \
  41. ((__builtin_constant_p(nr) && \
  42. __builtin_constant_p((uintptr_t)(addr) != (uintptr_t)NULL) && \
  43. (uintptr_t)(addr) != (uintptr_t)NULL && \
  44. __builtin_constant_p(*(const unsigned long *)(addr))) ? \
  45. const##op(nr, addr) : op(nr, addr))
  46. #define __set_bit(nr, addr) bitop(___set_bit, nr, addr)
  47. #define __clear_bit(nr, addr) bitop(___clear_bit, nr, addr)
  48. #define __change_bit(nr, addr) bitop(___change_bit, nr, addr)
  49. #define __test_and_set_bit(nr, addr) bitop(___test_and_set_bit, nr, addr)
  50. #define __test_and_clear_bit(nr, addr) bitop(___test_and_clear_bit, nr, addr)
  51. #define __test_and_change_bit(nr, addr) bitop(___test_and_change_bit, nr, addr)
  52. #define test_bit(nr, addr) bitop(_test_bit, nr, addr)
  53. #define test_bit_acquire(nr, addr) bitop(_test_bit_acquire, nr, addr)
  54. /*
  55. * Include this here because some architectures need generic_ffs/fls in
  56. * scope
  57. */
  58. #include <asm/bitops.h>
  59. /* Check that the bitops prototypes are sane */
  60. #define __check_bitop_pr(name) \
  61. static_assert(__same_type(arch_##name, generic_##name) && \
  62. __same_type(const_##name, generic_##name) && \
  63. __same_type(_##name, generic_##name))
  64. __check_bitop_pr(__set_bit);
  65. __check_bitop_pr(__clear_bit);
  66. __check_bitop_pr(__change_bit);
  67. __check_bitop_pr(__test_and_set_bit);
  68. __check_bitop_pr(__test_and_clear_bit);
  69. __check_bitop_pr(__test_and_change_bit);
  70. __check_bitop_pr(test_bit);
  71. #undef __check_bitop_pr
  72. static inline int get_bitmask_order(unsigned int count)
  73. {
  74. int order;
  75. order = fls(count);
  76. return order; /* We could be slightly more clever with -1 here... */
  77. }
  78. static __always_inline unsigned long hweight_long(unsigned long w)
  79. {
  80. return sizeof(w) == 4 ? hweight32(w) : hweight64((__u64)w);
  81. }
  82. /**
  83. * rol64 - rotate a 64-bit value left
  84. * @word: value to rotate
  85. * @shift: bits to roll
  86. */
  87. static inline __u64 rol64(__u64 word, unsigned int shift)
  88. {
  89. return (word << (shift & 63)) | (word >> ((-shift) & 63));
  90. }
  91. /**
  92. * ror64 - rotate a 64-bit value right
  93. * @word: value to rotate
  94. * @shift: bits to roll
  95. */
  96. static inline __u64 ror64(__u64 word, unsigned int shift)
  97. {
  98. return (word >> (shift & 63)) | (word << ((-shift) & 63));
  99. }
  100. /**
  101. * rol32 - rotate a 32-bit value left
  102. * @word: value to rotate
  103. * @shift: bits to roll
  104. */
  105. static inline __u32 rol32(__u32 word, unsigned int shift)
  106. {
  107. return (word << (shift & 31)) | (word >> ((-shift) & 31));
  108. }
  109. /**
  110. * ror32 - rotate a 32-bit value right
  111. * @word: value to rotate
  112. * @shift: bits to roll
  113. */
  114. static inline __u32 ror32(__u32 word, unsigned int shift)
  115. {
  116. return (word >> (shift & 31)) | (word << ((-shift) & 31));
  117. }
  118. /**
  119. * rol16 - rotate a 16-bit value left
  120. * @word: value to rotate
  121. * @shift: bits to roll
  122. */
  123. static inline __u16 rol16(__u16 word, unsigned int shift)
  124. {
  125. return (word << (shift & 15)) | (word >> ((-shift) & 15));
  126. }
  127. /**
  128. * ror16 - rotate a 16-bit value right
  129. * @word: value to rotate
  130. * @shift: bits to roll
  131. */
  132. static inline __u16 ror16(__u16 word, unsigned int shift)
  133. {
  134. return (word >> (shift & 15)) | (word << ((-shift) & 15));
  135. }
  136. /**
  137. * rol8 - rotate an 8-bit value left
  138. * @word: value to rotate
  139. * @shift: bits to roll
  140. */
  141. static inline __u8 rol8(__u8 word, unsigned int shift)
  142. {
  143. return (word << (shift & 7)) | (word >> ((-shift) & 7));
  144. }
  145. /**
  146. * ror8 - rotate an 8-bit value right
  147. * @word: value to rotate
  148. * @shift: bits to roll
  149. */
  150. static inline __u8 ror8(__u8 word, unsigned int shift)
  151. {
  152. return (word >> (shift & 7)) | (word << ((-shift) & 7));
  153. }
  154. /**
  155. * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit
  156. * @value: value to sign extend
  157. * @index: 0 based bit index (0<=index<32) to sign bit
  158. *
  159. * This is safe to use for 16- and 8-bit types as well.
  160. */
  161. static __always_inline __s32 sign_extend32(__u32 value, int index)
  162. {
  163. __u8 shift = 31 - index;
  164. return (__s32)(value << shift) >> shift;
  165. }
  166. /**
  167. * sign_extend64 - sign extend a 64-bit value using specified bit as sign-bit
  168. * @value: value to sign extend
  169. * @index: 0 based bit index (0<=index<64) to sign bit
  170. */
  171. static __always_inline __s64 sign_extend64(__u64 value, int index)
  172. {
  173. __u8 shift = 63 - index;
  174. return (__s64)(value << shift) >> shift;
  175. }
  176. static inline unsigned fls_long(unsigned long l)
  177. {
  178. if (sizeof(l) == 4)
  179. return fls(l);
  180. return fls64(l);
  181. }
  182. static inline int get_count_order(unsigned int count)
  183. {
  184. if (count == 0)
  185. return -1;
  186. return fls(--count);
  187. }
  188. /**
  189. * get_count_order_long - get order after rounding @l up to power of 2
  190. * @l: parameter
  191. *
  192. * it is same as get_count_order() but with long type parameter
  193. */
  194. static inline int get_count_order_long(unsigned long l)
  195. {
  196. if (l == 0UL)
  197. return -1;
  198. return (int)fls_long(--l);
  199. }
  200. /**
  201. * __ffs64 - find first set bit in a 64 bit word
  202. * @word: The 64 bit word
  203. *
  204. * On 64 bit arches this is a synonym for __ffs
  205. * The result is not defined if no bits are set, so check that @word
  206. * is non-zero before calling this.
  207. */
  208. static inline unsigned long __ffs64(u64 word)
  209. {
  210. #if BITS_PER_LONG == 32
  211. if (((u32)word) == 0UL)
  212. return __ffs((u32)(word >> 32)) + 32;
  213. #elif BITS_PER_LONG != 64
  214. #error BITS_PER_LONG not 32 or 64
  215. #endif
  216. return __ffs((unsigned long)word);
  217. }
  218. /**
  219. * fns - find N'th set bit in a word
  220. * @word: The word to search
  221. * @n: Bit to find
  222. */
  223. static inline unsigned long fns(unsigned long word, unsigned int n)
  224. {
  225. unsigned int bit;
  226. while (word) {
  227. bit = __ffs(word);
  228. if (n-- == 0)
  229. return bit;
  230. __clear_bit(bit, &word);
  231. }
  232. return BITS_PER_LONG;
  233. }
  234. /**
  235. * assign_bit - Assign value to a bit in memory
  236. * @nr: the bit to set
  237. * @addr: the address to start counting from
  238. * @value: the value to assign
  239. */
  240. static __always_inline void assign_bit(long nr, volatile unsigned long *addr,
  241. bool value)
  242. {
  243. if (value)
  244. set_bit(nr, addr);
  245. else
  246. clear_bit(nr, addr);
  247. }
  248. static __always_inline void __assign_bit(long nr, volatile unsigned long *addr,
  249. bool value)
  250. {
  251. if (value)
  252. __set_bit(nr, addr);
  253. else
  254. __clear_bit(nr, addr);
  255. }
  256. /**
  257. * __ptr_set_bit - Set bit in a pointer's value
  258. * @nr: the bit to set
  259. * @addr: the address of the pointer variable
  260. *
  261. * Example:
  262. * void *p = foo();
  263. * __ptr_set_bit(bit, &p);
  264. */
  265. #define __ptr_set_bit(nr, addr) \
  266. ({ \
  267. typecheck_pointer(*(addr)); \
  268. __set_bit(nr, (unsigned long *)(addr)); \
  269. })
  270. /**
  271. * __ptr_clear_bit - Clear bit in a pointer's value
  272. * @nr: the bit to clear
  273. * @addr: the address of the pointer variable
  274. *
  275. * Example:
  276. * void *p = foo();
  277. * __ptr_clear_bit(bit, &p);
  278. */
  279. #define __ptr_clear_bit(nr, addr) \
  280. ({ \
  281. typecheck_pointer(*(addr)); \
  282. __clear_bit(nr, (unsigned long *)(addr)); \
  283. })
  284. /**
  285. * __ptr_test_bit - Test bit in a pointer's value
  286. * @nr: the bit to test
  287. * @addr: the address of the pointer variable
  288. *
  289. * Example:
  290. * void *p = foo();
  291. * if (__ptr_test_bit(bit, &p)) {
  292. * ...
  293. * } else {
  294. * ...
  295. * }
  296. */
  297. #define __ptr_test_bit(nr, addr) \
  298. ({ \
  299. typecheck_pointer(*(addr)); \
  300. test_bit(nr, (unsigned long *)(addr)); \
  301. })
  302. #ifdef __KERNEL__
  303. #ifndef set_mask_bits
  304. #define set_mask_bits(ptr, mask, bits) \
  305. ({ \
  306. const typeof(*(ptr)) mask__ = (mask), bits__ = (bits); \
  307. typeof(*(ptr)) old__, new__; \
  308. \
  309. old__ = READ_ONCE(*(ptr)); \
  310. do { \
  311. new__ = (old__ & ~mask__) | bits__; \
  312. } while (!try_cmpxchg(ptr, &old__, new__)); \
  313. \
  314. old__; \
  315. })
  316. #endif
  317. #ifndef bit_clear_unless
  318. #define bit_clear_unless(ptr, clear, test) \
  319. ({ \
  320. const typeof(*(ptr)) clear__ = (clear), test__ = (test);\
  321. typeof(*(ptr)) old__, new__; \
  322. \
  323. old__ = READ_ONCE(*(ptr)); \
  324. do { \
  325. if (old__ & test__) \
  326. break; \
  327. new__ = old__ & ~clear__; \
  328. } while (!try_cmpxchg(ptr, &old__, new__)); \
  329. \
  330. !(old__ & test__); \
  331. })
  332. #endif
  333. #endif /* __KERNEL__ */
  334. #endif