MIPS: make CPU_HAS_LOAD_STORE_LR opt-out
CPU_HAS_LOAD_STORE_LR was introduced in 932afdeec1
("MIPS: Add Kconfig
variable for CPUs with unaligned load/store instructions") to make code
in kernel/unaligned.c and lib/mem{cpy,set}.S more intuitive and give a
possibility to easily add new CPUs without these instruction sets in
future.
Hovewer, this variant is not optimal for mainly two reasons:
* For now, we have 20+ CPUs with such instructions and only two (MIPS R6)
without. It will obviously be more effective and straightforward to
have an option for these two rather than for the rest.
* You can easily miss the fact that you need to select this option when
adding a new CPU, while all processors lacking these sets are
well-known, so the probability of missing something is way much lower.
We can address both points by turning CPU_HAS_LOAD_STORE_LR into opt-out
CPU_NO_LOAD_STORE_LR. This also makes MIPS root Kconfig more clear and
understandable.
Signed-off-by: Alexander Lobakin <alobakin@dlink.ru>
Signed-off-by: Paul Burton <paulburton@kernel.org>
Cc: Ralf Baechle <ralf@linux-mips.org>
Cc: Alexandre Belloni <alexandre.belloni@bootlin.com>
Cc: Microchip Linux Driver Support <UNGLinuxDriver@microchip.com>
Cc: Will Deacon <will@kernel.org>
Cc: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Cc: Masahiro Yamada <yamada.masahiro@socionext.com>
Cc: Paul Walmsley <paul.walmsley@sifive.com>
Cc: Michal Simek <michal.simek@xilinx.com>
Cc: Allison Randal <allison@lohutok.net>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Eric W. Biederman <ebiederm@xmission.com>
Cc: linux-mips@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
This commit is contained in:

committed by
Paul Burton

parent
7de86604bb
commit
18d84e2e55
@@ -115,7 +115,7 @@
|
||||
#endif
|
||||
.set reorder
|
||||
|
||||
#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR
|
||||
#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
|
||||
R10KCBARRIER(0(ra))
|
||||
#ifdef __MIPSEB__
|
||||
EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
|
||||
@@ -125,7 +125,7 @@
|
||||
PTR_SUBU a0, t0 /* long align ptr */
|
||||
PTR_ADDU a2, t0 /* correct size */
|
||||
|
||||
#else /* !CONFIG_CPU_HAS_LOAD_STORE_LR */
|
||||
#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
|
||||
#define STORE_BYTE(N) \
|
||||
EX(sb, a1, N(a0), .Lbyte_fixup\@); \
|
||||
.set noreorder; \
|
||||
@@ -150,7 +150,7 @@
|
||||
ori a0, STORMASK
|
||||
xori a0, STORMASK
|
||||
PTR_ADDIU a0, STORSIZE
|
||||
#endif /* !CONFIG_CPU_HAS_LOAD_STORE_LR */
|
||||
#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
|
||||
1: ori t1, a2, 0x3f /* # of full blocks */
|
||||
xori t1, 0x3f
|
||||
andi t0, a2, 0x40-STORSIZE
|
||||
@@ -185,7 +185,7 @@
|
||||
|
||||
.set noreorder
|
||||
beqz a2, 1f
|
||||
#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR
|
||||
#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
|
||||
PTR_ADDU a0, a2 /* What's left */
|
||||
.set reorder
|
||||
R10KCBARRIER(0(ra))
|
||||
@@ -194,7 +194,7 @@
|
||||
#else
|
||||
EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
|
||||
#endif
|
||||
#else
|
||||
#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
|
||||
PTR_SUBU t0, $0, a2
|
||||
.set reorder
|
||||
move a2, zero /* No remaining longs */
|
||||
@@ -211,7 +211,7 @@
|
||||
EX(sb, a1, 6(a0), .Lbyte_fixup\@)
|
||||
#endif
|
||||
0:
|
||||
#endif
|
||||
#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
|
||||
1: move a2, zero
|
||||
jr ra
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
.hidden __memset
|
||||
.endif
|
||||
|
||||
#ifndef CONFIG_CPU_HAS_LOAD_STORE_LR
|
||||
#ifdef CONFIG_CPU_NO_LOAD_STORE_LR
|
||||
.Lbyte_fixup\@:
|
||||
/*
|
||||
* unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
|
||||
@@ -243,7 +243,7 @@
|
||||
PTR_SUBU a2, t0
|
||||
PTR_ADDIU a2, 1
|
||||
jr ra
|
||||
#endif /* !CONFIG_CPU_HAS_LOAD_STORE_LR */
|
||||
#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
|
||||
|
||||
.Lfirst_fixup\@:
|
||||
/* unset_bytes already in a2 */
|
||||
|
Reference in New Issue
Block a user