bitops.c 2.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * bitops.c: atomic operations which got too long to be inlined all over
  4. * the place.
  5. *
  6. * Copyright 1999 Philipp Rumpf ([email protected])
  7. * Copyright 2000 Grant Grundler ([email protected])
  8. */
  9. #include <linux/kernel.h>
  10. #include <linux/spinlock.h>
  11. #include <linux/atomic.h>
  12. #ifdef CONFIG_SMP
  13. arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
  14. [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED
  15. };
  16. #endif
  17. #ifdef CONFIG_64BIT
  18. unsigned long notrace __xchg64(unsigned long x, volatile unsigned long *ptr)
  19. {
  20. unsigned long temp, flags;
  21. _atomic_spin_lock_irqsave(ptr, flags);
  22. temp = *ptr;
  23. *ptr = x;
  24. _atomic_spin_unlock_irqrestore(ptr, flags);
  25. return temp;
  26. }
  27. #endif
  28. unsigned long notrace __xchg32(int x, volatile int *ptr)
  29. {
  30. unsigned long flags;
  31. long temp;
  32. _atomic_spin_lock_irqsave(ptr, flags);
  33. temp = (long) *ptr; /* XXX - sign extension wanted? */
  34. *ptr = x;
  35. _atomic_spin_unlock_irqrestore(ptr, flags);
  36. return (unsigned long)temp;
  37. }
  38. unsigned long notrace __xchg8(char x, volatile char *ptr)
  39. {
  40. unsigned long flags;
  41. long temp;
  42. _atomic_spin_lock_irqsave(ptr, flags);
  43. temp = (long) *ptr; /* XXX - sign extension wanted? */
  44. *ptr = x;
  45. _atomic_spin_unlock_irqrestore(ptr, flags);
  46. return (unsigned long)temp;
  47. }
  48. u64 notrace __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new)
  49. {
  50. unsigned long flags;
  51. u64 prev;
  52. _atomic_spin_lock_irqsave(ptr, flags);
  53. if ((prev = *ptr) == old)
  54. *ptr = new;
  55. _atomic_spin_unlock_irqrestore(ptr, flags);
  56. return prev;
  57. }
  58. unsigned long notrace __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new)
  59. {
  60. unsigned long flags;
  61. unsigned int prev;
  62. _atomic_spin_lock_irqsave(ptr, flags);
  63. if ((prev = *ptr) == old)
  64. *ptr = new;
  65. _atomic_spin_unlock_irqrestore(ptr, flags);
  66. return (unsigned long)prev;
  67. }
  68. u8 notrace __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new)
  69. {
  70. unsigned long flags;
  71. u8 prev;
  72. _atomic_spin_lock_irqsave(ptr, flags);
  73. if ((prev = *ptr) == old)
  74. *ptr = new;
  75. _atomic_spin_unlock_irqrestore(ptr, flags);
  76. return prev;
  77. }