summaryrefslogtreecommitdiffstats
path: root/arch/mips/lib
diff options
context:
space:
mode:
authorMatt Redfearn <matt.redfearn@mips.com>2018-04-17 16:40:02 +0100
committerJames Hogan <jhogan@kernel.org>2018-05-21 16:01:15 +0100
commit21325631f395b1885a0979ac2eb9838ab2036526 (patch)
treeeeffbbc280aa75a2a2f19066d3319582ba5d16a4 /arch/mips/lib
parent84002c88599d6b537e54b003f763215be2075243 (diff)
downloadlinux-21325631f395b1885a0979ac2eb9838ab2036526.tar.gz
linux-21325631f395b1885a0979ac2eb9838ab2036526.tar.bz2
linux-21325631f395b1885a0979ac2eb9838ab2036526.zip
MIPS: memset.S: Reinstate delay slot indentation
Assembly language within the MIPS kernel conventionally indents instructions which are in a branch delay slot to make them easier to see. Commit 8483b14aaa81 ("MIPS: lib: memset: Whitespace fixes") rather inexplicably removed all of these indentations from memset.S. Reinstate the convention for all instructions in a branch delay slot. This effectively reverts the above commit, plus other locations introduced with MIPSR6 support. Signed-off-by: Matt Redfearn <matt.redfearn@mips.com> Cc: Ralf Baechle <ralf@linux-mips.org> Cc: linux-mips@linux-mips.org Patchwork: https://patchwork.linux-mips.org/patch/19111/ Signed-off-by: James Hogan <jhogan@kernel.org>
Diffstat (limited to 'arch/mips/lib')
-rw-r--r--arch/mips/lib/memset.S28
1 files changed, 14 insertions, 14 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index f7327979a8f8..1cc306520a55 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -95,7 +95,7 @@
sltiu t0, a2, STORSIZE /* very small region? */
bnez t0, .Lsmall_memset\@
- andi t0, a0, STORMASK /* aligned? */
+ andi t0, a0, STORMASK /* aligned? */
#ifdef CONFIG_CPU_MICROMIPS
move t8, a1 /* used by 'swp' instruction */
@@ -103,12 +103,12 @@
#endif
#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
beqz t0, 1f
- PTR_SUBU t0, STORSIZE /* alignment in bytes */
+ PTR_SUBU t0, STORSIZE /* alignment in bytes */
#else
.set noat
li AT, STORSIZE
beqz t0, 1f
- PTR_SUBU t0, AT /* alignment in bytes */
+ PTR_SUBU t0, AT /* alignment in bytes */
.set at
#endif
@@ -149,7 +149,7 @@
1: ori t1, a2, 0x3f /* # of full blocks */
xori t1, 0x3f
beqz t1, .Lmemset_partial\@ /* no block to fill */
- andi t0, a2, 0x40-STORSIZE
+ andi t0, a2, 0x40-STORSIZE
PTR_ADDU t1, a0 /* end address */
.set reorder
@@ -174,7 +174,7 @@
.set at
#endif
jr t1
- PTR_ADDU a0, t0 /* dest ptr */
+ PTR_ADDU a0, t0 /* dest ptr */
.set push
.set noreorder
@@ -186,7 +186,7 @@
beqz a2, 1f
#ifndef CONFIG_CPU_MIPSR6
- PTR_ADDU a0, a2 /* What's left */
+ PTR_ADDU a0, a2 /* What's left */
R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
@@ -194,7 +194,7 @@
EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
#endif
#else
- PTR_SUBU t0, $0, a2
+ PTR_SUBU t0, $0, a2
PTR_ADDIU t0, 1
STORE_BYTE(0)
STORE_BYTE(1)
@@ -210,11 +210,11 @@
0:
#endif
1: jr ra
- move a2, zero
+ move a2, zero
.Lsmall_memset\@:
beqz a2, 2f
- PTR_ADDU t1, a0, a2
+ PTR_ADDU t1, a0, a2
1: PTR_ADDIU a0, 1 /* fill bytewise */
R10KCBARRIER(0(ra))
@@ -222,7 +222,7 @@
EX(sb, a1, -1(a0), .Lsmall_fixup\@)
2: jr ra /* done */
- move a2, zero
+ move a2, zero
.if __memset == 1
END(memset)
.set __memset, 0
@@ -238,7 +238,7 @@
.Lfirst_fixup\@:
jr ra
- nop
+ nop
.Lfwd_fixup\@:
PTR_L t0, TI_TASK($28)
@@ -246,7 +246,7 @@
LONG_L t0, THREAD_BUADDR(t0)
LONG_ADDU a2, t1
jr ra
- LONG_SUBU a2, t0
+ LONG_SUBU a2, t0
.Lpartial_fixup\@:
PTR_L t0, TI_TASK($28)
@@ -254,7 +254,7 @@
LONG_L t0, THREAD_BUADDR(t0)
LONG_ADDU a2, a0
jr ra
- LONG_SUBU a2, t0
+ LONG_SUBU a2, t0
.Llast_fixup\@:
jr ra
@@ -278,7 +278,7 @@
LEAF(memset)
EXPORT_SYMBOL(memset)
beqz a1, 1f
- move v0, a0 /* result */
+ move v0, a0 /* result */
andi a1, 0xff /* spread fillword */
LONG_SLL t1, a1, 8