123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293 |
- // SPDX-License-Identifier: GPL-2.0
- /*
- * bitops.c: atomic operations which got too long to be inlined all over
- * the place.
- *
- * Copyright 1999 Philipp Rumpf ([email protected])
- * Copyright 2000 Grant Grundler ([email protected])
- */
- #include <linux/kernel.h>
- #include <linux/spinlock.h>
- #include <linux/atomic.h>
- #ifdef CONFIG_SMP
- arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
- [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED
- };
- #endif
- #ifdef CONFIG_64BIT
- unsigned long notrace __xchg64(unsigned long x, volatile unsigned long *ptr)
- {
- unsigned long temp, flags;
- _atomic_spin_lock_irqsave(ptr, flags);
- temp = *ptr;
- *ptr = x;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return temp;
- }
- #endif
- unsigned long notrace __xchg32(int x, volatile int *ptr)
- {
- unsigned long flags;
- long temp;
- _atomic_spin_lock_irqsave(ptr, flags);
- temp = (long) *ptr; /* XXX - sign extension wanted? */
- *ptr = x;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return (unsigned long)temp;
- }
- unsigned long notrace __xchg8(char x, volatile char *ptr)
- {
- unsigned long flags;
- long temp;
- _atomic_spin_lock_irqsave(ptr, flags);
- temp = (long) *ptr; /* XXX - sign extension wanted? */
- *ptr = x;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return (unsigned long)temp;
- }
- u64 notrace __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new)
- {
- unsigned long flags;
- u64 prev;
- _atomic_spin_lock_irqsave(ptr, flags);
- if ((prev = *ptr) == old)
- *ptr = new;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return prev;
- }
- unsigned long notrace __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new)
- {
- unsigned long flags;
- unsigned int prev;
- _atomic_spin_lock_irqsave(ptr, flags);
- if ((prev = *ptr) == old)
- *ptr = new;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return (unsigned long)prev;
- }
- u8 notrace __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new)
- {
- unsigned long flags;
- u8 prev;
- _atomic_spin_lock_irqsave(ptr, flags);
- if ((prev = *ptr) == old)
- *ptr = new;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return prev;
- }
|