MIPS: Allow kernel use of LL/SC to be separate from the presence of LL/SC.
On some CPUs, it is more efficient to disable and enable interrupts in the kernel rather than use ll/sc for atomic operations. But if we were to set cpu_has_llsc to false, we would break the userspace futex interface (in asm/futex.h). We separate the two concepts, with a new predicate kernel_uses_llsc, that lets us disable the kernel's use of ll/sc while still allowing the futex code to use it. Also there were a couple of cases in bitops.h where we were using ll/sc unconditionally even if cpu_has_llsc were false. Signed-off-by: David Daney <ddaney@caviumnetworks.com> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
This commit is contained in:

committed by
Ralf Baechle

parent
f7ade3c168
commit
b791d1193a
@@ -61,7 +61,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
unsigned short bit = nr & SZLONG_MASK;
|
||||
unsigned long temp;
|
||||
|
||||
if (cpu_has_llsc && R10000_LLSC_WAR) {
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
__asm__ __volatile__(
|
||||
" .set mips3 \n"
|
||||
"1: " __LL "%0, %1 # set_bit \n"
|
||||
@@ -72,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
: "=&r" (temp), "=m" (*m)
|
||||
: "ir" (1UL << bit), "m" (*m));
|
||||
#ifdef CONFIG_CPU_MIPSR2
|
||||
} else if (__builtin_constant_p(bit)) {
|
||||
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
|
||||
__asm__ __volatile__(
|
||||
"1: " __LL "%0, %1 # set_bit \n"
|
||||
" " __INS "%0, %4, %2, 1 \n"
|
||||
@@ -84,7 +84,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
: "=&r" (temp), "=m" (*m)
|
||||
: "ir" (bit), "m" (*m), "r" (~0));
|
||||
#endif /* CONFIG_CPU_MIPSR2 */
|
||||
} else if (cpu_has_llsc) {
|
||||
} else if (kernel_uses_llsc) {
|
||||
__asm__ __volatile__(
|
||||
" .set mips3 \n"
|
||||
"1: " __LL "%0, %1 # set_bit \n"
|
||||
@@ -126,7 +126,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
unsigned short bit = nr & SZLONG_MASK;
|
||||
unsigned long temp;
|
||||
|
||||
if (cpu_has_llsc && R10000_LLSC_WAR) {
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
__asm__ __volatile__(
|
||||
" .set mips3 \n"
|
||||
"1: " __LL "%0, %1 # clear_bit \n"
|
||||
@@ -137,7 +137,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
: "=&r" (temp), "=m" (*m)
|
||||
: "ir" (~(1UL << bit)), "m" (*m));
|
||||
#ifdef CONFIG_CPU_MIPSR2
|
||||
} else if (__builtin_constant_p(bit)) {
|
||||
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
|
||||
__asm__ __volatile__(
|
||||
"1: " __LL "%0, %1 # clear_bit \n"
|
||||
" " __INS "%0, $0, %2, 1 \n"
|
||||
@@ -149,7 +149,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
: "=&r" (temp), "=m" (*m)
|
||||
: "ir" (bit), "m" (*m));
|
||||
#endif /* CONFIG_CPU_MIPSR2 */
|
||||
} else if (cpu_has_llsc) {
|
||||
} else if (kernel_uses_llsc) {
|
||||
__asm__ __volatile__(
|
||||
" .set mips3 \n"
|
||||
"1: " __LL "%0, %1 # clear_bit \n"
|
||||
@@ -202,7 +202,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
{
|
||||
unsigned short bit = nr & SZLONG_MASK;
|
||||
|
||||
if (cpu_has_llsc && R10000_LLSC_WAR) {
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -215,7 +215,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
|
||||
" .set mips0 \n"
|
||||
: "=&r" (temp), "=m" (*m)
|
||||
: "ir" (1UL << bit), "m" (*m));
|
||||
} else if (cpu_has_llsc) {
|
||||
} else if (kernel_uses_llsc) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -260,7 +260,7 @@ static inline int test_and_set_bit(unsigned long nr,
|
||||
|
||||
smp_llsc_mb();
|
||||
|
||||
if (cpu_has_llsc && R10000_LLSC_WAR) {
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -275,7 +275,7 @@ static inline int test_and_set_bit(unsigned long nr,
|
||||
: "=&r" (temp), "=m" (*m), "=&r" (res)
|
||||
: "r" (1UL << bit), "m" (*m)
|
||||
: "memory");
|
||||
} else if (cpu_has_llsc) {
|
||||
} else if (kernel_uses_llsc) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -328,7 +328,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
|
||||
unsigned short bit = nr & SZLONG_MASK;
|
||||
unsigned long res;
|
||||
|
||||
if (cpu_has_llsc && R10000_LLSC_WAR) {
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -343,7 +343,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
|
||||
: "=&r" (temp), "=m" (*m), "=&r" (res)
|
||||
: "r" (1UL << bit), "m" (*m)
|
||||
: "memory");
|
||||
} else if (cpu_has_llsc) {
|
||||
} else if (kernel_uses_llsc) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -397,7 +397,7 @@ static inline int test_and_clear_bit(unsigned long nr,
|
||||
|
||||
smp_llsc_mb();
|
||||
|
||||
if (cpu_has_llsc && R10000_LLSC_WAR) {
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -414,7 +414,7 @@ static inline int test_and_clear_bit(unsigned long nr,
|
||||
: "r" (1UL << bit), "m" (*m)
|
||||
: "memory");
|
||||
#ifdef CONFIG_CPU_MIPSR2
|
||||
} else if (__builtin_constant_p(nr)) {
|
||||
} else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -431,7 +431,7 @@ static inline int test_and_clear_bit(unsigned long nr,
|
||||
: "ir" (bit), "m" (*m)
|
||||
: "memory");
|
||||
#endif
|
||||
} else if (cpu_has_llsc) {
|
||||
} else if (kernel_uses_llsc) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -487,7 +487,7 @@ static inline int test_and_change_bit(unsigned long nr,
|
||||
|
||||
smp_llsc_mb();
|
||||
|
||||
if (cpu_has_llsc && R10000_LLSC_WAR) {
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
@@ -502,7 +502,7 @@ static inline int test_and_change_bit(unsigned long nr,
|
||||
: "=&r" (temp), "=m" (*m), "=&r" (res)
|
||||
: "r" (1UL << bit), "m" (*m)
|
||||
: "memory");
|
||||
} else if (cpu_has_llsc) {
|
||||
} else if (kernel_uses_llsc) {
|
||||
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
||||
unsigned long temp;
|
||||
|
||||
|
Reference in New Issue
Block a user