summaryrefslogtreecommitdiffstats
path: root/arch/metag
diff options
context:
space:
mode:
authorJames Hogan <james.hogan@imgtec.com>2017-04-04 11:43:26 +0100
committerJames Hogan <james.hogan@imgtec.com>2017-04-05 15:25:06 +0100
commitfd40eee1290ad7add7aa665e3ce6b0f9fe9734b4 (patch)
tree72a3184830909c77728191139d5aca764adafd70 /arch/metag
parent563ddc1076109f2b3f88e6d355eab7b6fd4662cb (diff)
downloadlinux-stable-fd40eee1290ad7add7aa665e3ce6b0f9fe9734b4.tar.gz
linux-stable-fd40eee1290ad7add7aa665e3ce6b0f9fe9734b4.tar.bz2
linux-stable-fd40eee1290ad7add7aa665e3ce6b0f9fe9734b4.zip
metag/usercopy: Set flags before ADDZ
The fixup code for the copy_to_user rapf loops reads TXStatus.LSM_STEP to decide how far to rewind the source pointer. There is a special case for the last execution of an MGETL/MGETD, since it leaves LSM_STEP=0 even though the number of MGETLs/MGETDs attempted was 4. This uses ADDZ which is conditional upon the Z condition flag, but the AND instruction which masked the TXStatus.LSM_STEP field didn't set the condition flags based on the result. Fix that now by using ANDS which does set the flags, and also marking the condition codes as clobbered by the inline assembly. Fixes: 373cd784d0fc ("metag: Memory handling") Signed-off-by: James Hogan <james.hogan@imgtec.com> Cc: linux-metag@vger.kernel.org Cc: stable@vger.kernel.org
Diffstat (limited to 'arch/metag')
-rw-r--r--arch/metag/lib/usercopy.c8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/metag/lib/usercopy.c b/arch/metag/lib/usercopy.c
index e1d553872fd7..4422928a1746 100644
--- a/arch/metag/lib/usercopy.c
+++ b/arch/metag/lib/usercopy.c
@@ -315,7 +315,7 @@
" .previous\n" \
: "=r" (to), "=r" (from), "=r" (ret), "=d" (n) \
: "0" (to), "1" (from), "2" (ret), "3" (n) \
- : "D1Ar1", "D0Ar2", "memory")
+ : "D1Ar1", "D0Ar2", "cc", "memory")
/* rewind 'to' and 'from' pointers when a fault occurs
*
@@ -341,7 +341,7 @@
#define __asm_copy_to_user_64bit_rapf_loop(to, from, ret, n, id)\
__asm_copy_user_64bit_rapf_loop(to, from, ret, n, id, \
"LSR D0Ar2, D0Ar2, #8\n" \
- "AND D0Ar2, D0Ar2, #0x7\n" \
+ "ANDS D0Ar2, D0Ar2, #0x7\n" \
"ADDZ D0Ar2, D0Ar2, #4\n" \
"SUB D0Ar2, D0Ar2, #1\n" \
"MOV D1Ar1, #4\n" \
@@ -486,7 +486,7 @@
" .previous\n" \
: "=r" (to), "=r" (from), "=r" (ret), "=d" (n) \
: "0" (to), "1" (from), "2" (ret), "3" (n) \
- : "D1Ar1", "D0Ar2", "memory")
+ : "D1Ar1", "D0Ar2", "cc", "memory")
/* rewind 'to' and 'from' pointers when a fault occurs
*
@@ -512,7 +512,7 @@
#define __asm_copy_to_user_32bit_rapf_loop(to, from, ret, n, id)\
__asm_copy_user_32bit_rapf_loop(to, from, ret, n, id, \
"LSR D0Ar2, D0Ar2, #8\n" \
- "AND D0Ar2, D0Ar2, #0x7\n" \
+ "ANDS D0Ar2, D0Ar2, #0x7\n" \
"ADDZ D0Ar2, D0Ar2, #4\n" \
"SUB D0Ar2, D0Ar2, #1\n" \
"MOV D1Ar1, #4\n" \