Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/percpu
* 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/percpu: (34 commits) m68k: rename global variable vmalloc_end to m68k_vmalloc_end percpu: add missing per_cpu_ptr_to_phys() definition for UP percpu: Fix kdump failure if booted with percpu_alloc=page percpu: make misc percpu symbols unique percpu: make percpu symbols in ia64 unique percpu: make percpu symbols in powerpc unique percpu: make percpu symbols in x86 unique percpu: make percpu symbols in xen unique percpu: make percpu symbols in cpufreq unique percpu: make percpu symbols in oprofile unique percpu: make percpu symbols in tracer unique percpu: make percpu symbols under kernel/ and mm/ unique percpu: remove some sparse warnings percpu: make alloc_percpu() handle array types vmalloc: fix use of non-existent percpu variable in put_cpu_var() this_cpu: Use this_cpu_xx in trace_functions_graph.c this_cpu: Use this_cpu_xx for ftrace this_cpu: Use this_cpu_xx in nmi handling this_cpu: Use this_cpu operations in RCU this_cpu: Use this_cpu ops for VM statistics ... Fix up trivial (famous last words) global per-cpu naming conflicts in arch/x86/kvm/svm.c mm/slab.c
This commit is contained in:
@@ -74,31 +74,31 @@ extern void __bad_percpu_size(void);
|
||||
|
||||
#define percpu_to_op(op, var, val) \
|
||||
do { \
|
||||
typedef typeof(var) T__; \
|
||||
typedef typeof(var) pto_T__; \
|
||||
if (0) { \
|
||||
T__ tmp__; \
|
||||
tmp__ = (val); \
|
||||
pto_T__ pto_tmp__; \
|
||||
pto_tmp__ = (val); \
|
||||
} \
|
||||
switch (sizeof(var)) { \
|
||||
case 1: \
|
||||
asm(op "b %1,"__percpu_arg(0) \
|
||||
: "+m" (var) \
|
||||
: "qi" ((T__)(val))); \
|
||||
: "qi" ((pto_T__)(val))); \
|
||||
break; \
|
||||
case 2: \
|
||||
asm(op "w %1,"__percpu_arg(0) \
|
||||
: "+m" (var) \
|
||||
: "ri" ((T__)(val))); \
|
||||
: "ri" ((pto_T__)(val))); \
|
||||
break; \
|
||||
case 4: \
|
||||
asm(op "l %1,"__percpu_arg(0) \
|
||||
: "+m" (var) \
|
||||
: "ri" ((T__)(val))); \
|
||||
: "ri" ((pto_T__)(val))); \
|
||||
break; \
|
||||
case 8: \
|
||||
asm(op "q %1,"__percpu_arg(0) \
|
||||
: "+m" (var) \
|
||||
: "re" ((T__)(val))); \
|
||||
: "re" ((pto_T__)(val))); \
|
||||
break; \
|
||||
default: __bad_percpu_size(); \
|
||||
} \
|
||||
@@ -106,31 +106,31 @@ do { \
|
||||
|
||||
#define percpu_from_op(op, var, constraint) \
|
||||
({ \
|
||||
typeof(var) ret__; \
|
||||
typeof(var) pfo_ret__; \
|
||||
switch (sizeof(var)) { \
|
||||
case 1: \
|
||||
asm(op "b "__percpu_arg(1)",%0" \
|
||||
: "=q" (ret__) \
|
||||
: "=q" (pfo_ret__) \
|
||||
: constraint); \
|
||||
break; \
|
||||
case 2: \
|
||||
asm(op "w "__percpu_arg(1)",%0" \
|
||||
: "=r" (ret__) \
|
||||
: "=r" (pfo_ret__) \
|
||||
: constraint); \
|
||||
break; \
|
||||
case 4: \
|
||||
asm(op "l "__percpu_arg(1)",%0" \
|
||||
: "=r" (ret__) \
|
||||
: "=r" (pfo_ret__) \
|
||||
: constraint); \
|
||||
break; \
|
||||
case 8: \
|
||||
asm(op "q "__percpu_arg(1)",%0" \
|
||||
: "=r" (ret__) \
|
||||
: "=r" (pfo_ret__) \
|
||||
: constraint); \
|
||||
break; \
|
||||
default: __bad_percpu_size(); \
|
||||
} \
|
||||
ret__; \
|
||||
pfo_ret__; \
|
||||
})
|
||||
|
||||
/*
|
||||
@@ -153,6 +153,84 @@ do { \
|
||||
#define percpu_or(var, val) percpu_to_op("or", per_cpu__##var, val)
|
||||
#define percpu_xor(var, val) percpu_to_op("xor", per_cpu__##var, val)
|
||||
|
||||
#define __this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
#define __this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
#define __this_cpu_read_4(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
|
||||
#define __this_cpu_write_1(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define __this_cpu_write_2(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define __this_cpu_write_4(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define __this_cpu_add_1(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define __this_cpu_add_2(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define __this_cpu_add_4(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define __this_cpu_and_1(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define __this_cpu_and_2(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define __this_cpu_and_4(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define __this_cpu_or_1(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define __this_cpu_or_2(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define __this_cpu_or_4(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define __this_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
#define __this_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
#define __this_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
|
||||
#define this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
#define this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
#define this_cpu_read_4(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
#define this_cpu_write_1(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define this_cpu_write_2(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define this_cpu_write_4(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define this_cpu_add_1(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define this_cpu_add_2(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define this_cpu_add_4(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define this_cpu_and_1(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define this_cpu_and_2(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define this_cpu_and_4(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define this_cpu_or_1(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define this_cpu_or_2(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define this_cpu_or_4(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define this_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
#define this_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
#define this_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
|
||||
#define irqsafe_cpu_add_1(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define irqsafe_cpu_add_2(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define irqsafe_cpu_add_4(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define irqsafe_cpu_and_1(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define irqsafe_cpu_and_2(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define irqsafe_cpu_and_4(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define irqsafe_cpu_or_1(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define irqsafe_cpu_or_2(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define irqsafe_cpu_or_4(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define irqsafe_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
#define irqsafe_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
#define irqsafe_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
|
||||
/*
|
||||
* Per cpu atomic 64 bit operations are only available under 64 bit.
|
||||
* 32 bit must fall back to generic operations.
|
||||
*/
|
||||
#ifdef CONFIG_X86_64
|
||||
#define __this_cpu_read_8(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
#define __this_cpu_write_8(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define __this_cpu_add_8(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define __this_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define __this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define __this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
|
||||
#define this_cpu_read_8(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
|
||||
#define this_cpu_write_8(pcp, val) percpu_to_op("mov", (pcp), val)
|
||||
#define this_cpu_add_8(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define this_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
|
||||
#define irqsafe_cpu_add_8(pcp, val) percpu_to_op("add", (pcp), val)
|
||||
#define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
|
||||
#define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
|
||||
#define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
|
||||
|
||||
#endif
|
||||
|
||||
/* This is not atomic against other CPUs -- CPU preemption needs to be off */
|
||||
#define x86_test_and_clear_bit_percpu(bit, var) \
|
||||
({ \
|
||||
|
Reference in New Issue
Block a user