diff options
author | Roman Zippel <zippel@linux-m68k.org> | 2006-06-23 02:05:00 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@g5.osdl.org> | 2006-06-23 07:43:01 -0700 |
commit | d94af931af42152e34539dd4782b1724084a89fb (patch) | |
tree | 7cf8c1cce891ef6b87635a643fe62ea9d231b474 | |
parent | 1a23989475846547e5b7ce14e77f072894aaff54 (diff) | |
download | linux-d94af931af42152e34539dd4782b1724084a89fb.tar.gz linux-d94af931af42152e34539dd4782b1724084a89fb.tar.bz2 linux-d94af931af42152e34539dd4782b1724084a89fb.zip |
[PATCH] m68k: clean up uaccess.h
This uninlines a few large functions in uaccess.h and cleans up the rest.
It includes a (hopefully temporary) workaround for the broken typeof of
gcc-4.1.
Signed-off-by: Roman Zippel <zippel@linux-m68k.org>
Cc: Geert Uytterhoeven <geert@linux-m68k.org>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
-rw-r--r-- | arch/m68k/lib/Makefile | 4 | ||||
-rw-r--r-- | arch/m68k/lib/uaccess.c | 222 | ||||
-rw-r--r-- | include/asm-m68k/uaccess.h | 1084 |
3 files changed, 495 insertions, 815 deletions
diff --git a/arch/m68k/lib/Makefile b/arch/m68k/lib/Makefile index ebe51a513817..6bbf19f96007 100644 --- a/arch/m68k/lib/Makefile +++ b/arch/m68k/lib/Makefile @@ -4,5 +4,5 @@ EXTRA_AFLAGS := -traditional -lib-y := ashldi3.o ashrdi3.o lshrdi3.o muldi3.o \ - checksum.o string.o semaphore.o +lib-y := ashldi3.o ashrdi3.o lshrdi3.o muldi3.o \ + checksum.o string.o semaphore.o uaccess.o diff --git a/arch/m68k/lib/uaccess.c b/arch/m68k/lib/uaccess.c new file mode 100644 index 000000000000..1bc188c0d983 --- /dev/null +++ b/arch/m68k/lib/uaccess.c @@ -0,0 +1,222 @@ +/* + * This file is subject to the terms and conditions of the GNU General Public + * License. See the file COPYING in the main directory of this archive + * for more details. + */ + +#include <linux/module.h> +#include <asm/uaccess.h> + +unsigned long __generic_copy_from_user(void *to, const void __user *from, + unsigned long n) +{ + unsigned long tmp, res; + + asm volatile ("\n" + " tst.l %0\n" + " jeq 2f\n" + "1: moves.l (%1)+,%3\n" + " move.l %3,(%2)+\n" + " subq.l #1,%0\n" + " jne 1b\n" + "2: btst #1,%5\n" + " jeq 4f\n" + "3: moves.w (%1)+,%3\n" + " move.w %3,(%2)+\n" + "4: btst #0,%5\n" + " jeq 6f\n" + "5: moves.b (%1)+,%3\n" + " move.b %3,(%2)+\n" + "6:\n" + " .section .fixup,\"ax\"\n" + " .even\n" + "10: move.l %0,%3\n" + "7: clr.l (%2)+\n" + " subq.l #1,%3\n" + " jne 7b\n" + " lsl.l #2,%0\n" + " btst #1,%5\n" + " jeq 8f\n" + "30: clr.w (%2)+\n" + " addq.l #2,%0\n" + "8: btst #0,%5\n" + " jeq 6b\n" + "50: clr.b (%2)+\n" + " addq.l #1,%0\n" + " jra 6b\n" + " .previous\n" + "\n" + " .section __ex_table,\"a\"\n" + " .align 4\n" + " .long 1b,10b\n" + " .long 3b,30b\n" + " .long 5b,50b\n" + " .previous" + : "=d" (res), "+a" (from), "+a" (to), "=&r" (tmp) + : "0" (n / 4), "d" (n & 3)); + + return res; +} +EXPORT_SYMBOL(__generic_copy_from_user); + +unsigned long __generic_copy_to_user(void __user *to, const void *from, + unsigned long n) +{ + unsigned long tmp, res; + + asm volatile ("\n" + " tst.l %0\n" + " jeq 4f\n" + "1: move.l (%1)+,%3\n" + "2: moves.l %3,(%2)+\n" + "3: subq.l #1,%0\n" + " jne 1b\n" + "4: btst #1,%5\n" + " jeq 6f\n" + " move.w (%1)+,%3\n" + "5: moves.w %3,(%2)+\n" + "6: btst #0,%5\n" + " jeq 8f\n" + " move.b (%1)+,%3\n" + "7: moves.b %3,(%2)+\n" + "8:\n" + " .section .fixup,\"ax\"\n" + " .even\n" + "20: lsl.l #2,%0\n" + "50: add.l %5,%0\n" + " jra 7b\n" + " .previous\n" + "\n" + " .section __ex_table,\"a\"\n" + " .align 4\n" + " .long 2b,20b\n" + " .long 3b,20b\n" + " .long 5b,50b\n" + " .long 6b,50b\n" + " .long 7b,50b\n" + " .long 8b,50b\n" + " .previous" + : "=d" (res), "+a" (from), "+a" (to), "=&r" (tmp) + : "0" (n / 4), "d" (n & 3)); + + return res; +} +EXPORT_SYMBOL(__generic_copy_to_user); + +/* + * Copy a null terminated string from userspace. + */ +long strncpy_from_user(char *dst, const char __user *src, long count) +{ + long res; + char c; + + if (count <= 0) + return count; + + asm volatile ("\n" + "1: moves.b (%2)+,%4\n" + " move.b %4,(%1)+\n" + " jeq 2f\n" + " subq.l #1,%3\n" + " jne 1b\n" + "2: sub.l %3,%0\n" + "3:\n" + " .section .fixup,\"ax\"\n" + " .even\n" + "10: move.l %5,%0\n" + " jra 3b\n" + " .previous\n" + "\n" + " .section __ex_table,\"a\"\n" + " .align 4\n" + " .long 1b,10b\n" + " .previous" + : "=d" (res), "+a" (dst), "+a" (src), "+r" (count), "=&d" (c) + : "i" (-EFAULT), "0" (count)); + + return res; +} +EXPORT_SYMBOL(strncpy_from_user); + +/* + * Return the size of a string (including the ending 0) + * + * Return 0 on exception, a value greater than N if too long + */ +long strnlen_user(const char __user *src, long n) +{ + char c; + long res; + + asm volatile ("\n" + "1: subq.l #1,%1\n" + " jmi 3f\n" + "2: moves.b (%0)+,%2\n" + " tst.b %2\n" + " jne 1b\n" + " jra 4f\n" + "\n" + "3: addq.l #1,%0\n" + "4: sub.l %4,%0\n" + "5:\n" + " .section .fixup,\"ax\"\n" + " .even\n" + "20: sub.l %0,%0\n" + " jra 5b\n" + " .previous\n" + "\n" + " .section __ex_table,\"a\"\n" + " .align 4\n" + " .long 2b,20b\n" + " .previous\n" + : "=&a" (res), "+d" (n), "=&d" (c) + : "0" (src), "r" (src)); + + return res; +} +EXPORT_SYMBOL(strnlen_user); + +/* + * Zero Userspace + */ + +unsigned long clear_user(void __user *to, unsigned long n) +{ + unsigned long res; + + asm volatile ("\n" + " tst.l %0\n" + " jeq 3f\n" + "1: moves.l %2,(%1)+\n" + "2: subq.l #1,%0\n" + " jne 1b\n" + "3: btst #1,%4\n" + " jeq 5f\n" + "4: moves.w %2,(%1)+\n" + "5: btst #0,%4\n" + " jeq 7f\n" + "6: moves.b %2,(%1)\n" + "7:\n" + " .section .fixup,\"ax\"\n" + " .even\n" + "10: lsl.l #2,%0\n" + "40: add.l %4,%0\n" + " jra 7b\n" + " .previous\n" + "\n" + " .section __ex_table,\"a\"\n" + " .align 4\n" + " .long 1b,10b\n" + " .long 2b,10b\n" + " .long 4b,40b\n" + " .long 5b,40b\n" + " .long 6b,40b\n" + " .long 7b,40b\n" + " .previous" + : "=d" (res), "+a" (to) + : "r" (0), "0" (n / 4), "d" (n & 3)); + + return res; +} +EXPORT_SYMBOL(clear_user); diff --git a/include/asm-m68k/uaccess.h b/include/asm-m68k/uaccess.h index 2ffd87b0a769..b761ef218cea 100644 --- a/include/asm-m68k/uaccess.h +++ b/include/asm-m68k/uaccess.h @@ -4,8 +4,9 @@ /* * User space memory access functions */ +#include <linux/compiler.h> #include <linux/errno.h> -#include <linux/sched.h> +#include <linux/types.h> #include <asm/segment.h> #define VERIFY_READ 0 @@ -32,858 +33,315 @@ struct exception_table_entry unsigned long insn, fixup; }; +extern int __put_user_bad(void); +extern int __get_user_bad(void); + +#define __put_user_asm(res, x, ptr, bwl, reg, err) \ +asm volatile ("\n" \ + "1: moves."#bwl" %2,%1\n" \ + "2:\n" \ + " .section .fixup,\"ax\"\n" \ + " .even\n" \ + "10: moveq.l %3,%0\n" \ + " jra 2b\n" \ + " .previous\n" \ + "\n" \ + " .section __ex_table,\"a\"\n" \ + " .align 4\n" \ + " .long 1b,10b\n" \ + " .long 2b,10b\n" \ + " .previous" \ + : "+d" (res), "=m" (*(ptr)) \ + : #reg (x), "i" (err)) /* * These are the main single-value transfer routines. They automatically * use the right size if we just have the right pointer type. */ -#define put_user(x, ptr) \ -({ \ - int __pu_err; \ - typeof(*(ptr)) __pu_val = (x); \ - __chk_user_ptr(ptr); \ - switch (sizeof (*(ptr))) { \ - case 1: \ - __put_user_asm(__pu_err, __pu_val, ptr, b); \ - break; \ - case 2: \ - __put_user_asm(__pu_err, __pu_val, ptr, w); \ - break; \ - case 4: \ - __put_user_asm(__pu_err, __pu_val, ptr, l); \ - break; \ - case 8: \ - __pu_err = __constant_copy_to_user(ptr, &__pu_val, 8); \ - break; \ - default: \ - __pu_err = __put_user_bad(); \ - break; \ - } \ - __pu_err; \ +#define __put_user(x, ptr) \ +({ \ + typeof(*(ptr)) __pu_val = (x); \ + int __pu_err = 0; \ + __chk_user_ptr(ptr); \ + switch (sizeof (*(ptr))) { \ + case 1: \ + __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \ + break; \ + case 2: \ + __put_user_asm(__pu_err, __pu_val, ptr, w, d, -EFAULT); \ + break; \ + case 4: \ + __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \ + break; \ + case 8: \ + { \ + const void *__pu_ptr = (ptr); \ + asm volatile ("\n" \ + "1: moves.l %2,(%1)+\n" \ + "2: moves.l %R2,(%1)\n" \ + "3:\n" \ + " .section .fixup,\"ax\"\n" \ + " .even\n" \ + "10: movel %3,%0\n" \ + " jra 3b\n" \ + " .previous\n" \ + "\n" \ + " .section __ex_table,\"a\"\n" \ + " .align 4\n" \ + " .long 1b,10b\n" \ + " .long 2b,10b\n" \ + " .long 3b,10b\n" \ + " .previous" \ + : "+d" (__pu_err), "+a" (__pu_ptr) \ + : "r" (__pu_val), "i" (-EFAULT) \ + : "memory"); \ + break; \ + } \ + default: \ + __pu_err = __put_user_bad(); \ + break; \ + } \ + __pu_err; \ }) -#define __put_user(x, ptr) put_user(x, ptr) +#define put_user(x, ptr) __put_user(x, ptr) -extern int __put_user_bad(void); -/* - * Tell gcc we read from memory instead of writing: this is because - * we do not write to any memory gcc knows about, so there are no - * aliasing issues. - */ -#define __put_user_asm(err,x,ptr,bwl) \ -__asm__ __volatile__ \ - ("21:moves" #bwl " %2,%1\n" \ - "1:\n" \ - ".section .fixup,\"ax\"\n" \ - " .even\n" \ - "2: movel %3,%0\n" \ - " jra 1b\n" \ - ".previous\n" \ - ".section __ex_table,\"a\"\n" \ - " .align 4\n" \ - " .long 21b,2b\n" \ - " .long 1b,2b\n" \ - ".previous" \ - : "=d"(err) \ - : "m"(*(ptr)), "r"(x), "i"(-EFAULT), "0"(0)) - -#define get_user(x, ptr) \ -({ \ - int __gu_err; \ - typeof(*(ptr)) __gu_val; \ - __chk_user_ptr(ptr); \ - switch (sizeof(*(ptr))) { \ - case 1: \ - __get_user_asm(__gu_err, __gu_val, ptr, b, "=d"); \ - break; \ - case 2: \ - __get_user_asm(__gu_err, __gu_val, ptr, w, "=r"); \ - break; \ - case 4: \ - __get_user_asm(__gu_err, __gu_val, ptr, l, "=r"); \ - break; \ - case 8: \ - __gu_err = __constant_copy_from_user(&__gu_val, ptr, 8); \ - break; \ - default: \ - __gu_val = (typeof(*(ptr)))0; \ - __gu_err = __get_user_bad(); \ - break; \ - } \ - (x) = __gu_val; \ - __gu_err; \ +#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \ + type __gu_val; \ + asm volatile ("\n" \ + "1: moves."#bwl" %2,%1\n" \ + "2:\n" \ + " .section .fixup,\"ax\"\n" \ + " .even\n" \ + "10: move.l %3,%0\n" \ + " sub."#bwl" %1,%1\n" \ + " jra 2b\n" \ + " .previous\n" \ + "\n" \ + " .section __ex_table,\"a\"\n" \ + " .align 4\n" \ + " .long 1b,10b\n" \ + " .previous" \ + : "+d" (res), "=&" #reg (__gu_val) \ + : "m" (*(ptr)), "i" (err)); \ + (x) = (typeof(*(ptr)))(long)__gu_val; \ }) -#define __get_user(x, ptr) get_user(x, ptr) -extern int __get_user_bad(void); +#define __get_user(x, ptr) \ +({ \ + int __gu_err = 0; \ + __chk_user_ptr(ptr); \ + switch (sizeof(*(ptr))) { \ + case 1: \ + __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \ + break; \ + case 2: \ + __get_user_asm(__gu_err, x, ptr, u16, w, d, -EFAULT); \ + break; \ + case 4: \ + __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \ + break; \ +/* case 8: disabled because gcc-4.1 has a broken typeof \ + { \ + const void *__gu_ptr = (ptr); \ + u64 __gu_val; \ + asm volatile ("\n" \ + "1: moves.l (%2)+,%1\n" \ + "2: moves.l (%2),%R1\n" \ + "3:\n" \ + " .section .fixup,\"ax\"\n" \ + " .even\n" \ + "10: move.l %3,%0\n" \ + " sub.l %1,%1\n" \ + " sub.l %R1,%R1\n" \ + " jra 3b\n" \ + " .previous\n" \ + "\n" \ + " .section __ex_table,\"a\"\n" \ + " .align 4\n" \ + " .long 1b,10b\n" \ + " .long 2b,10b\n" \ + " .previous" \ + : "+d" (__gu_err), "=&r" (__gu_val), \ + "+a" (__gu_ptr) \ + : "i" (-EFAULT) \ + : "memory"); \ + (x) = (typeof(*(ptr)))__gu_val; \ + break; \ + } */ \ + default: \ + __gu_err = __get_user_bad(); \ + break; \ + } \ + __gu_err; \ +}) +#define get_user(x, ptr) __get_user(x, ptr) -#define __get_user_asm(err,x,ptr,bwl,reg) \ -__asm__ __volatile__ \ - ("1: moves" #bwl " %2,%1\n" \ - "2:\n" \ - ".section .fixup,\"ax\"\n" \ - " .even\n" \ - "3: movel %3,%0\n" \ - " sub" #bwl " %1,%1\n" \ - " jra 2b\n" \ - ".previous\n" \ - ".section __ex_table,\"a\"\n" \ - " .align 4\n" \ - " .long 1b,3b\n" \ - ".previous" \ - : "=d"(err), reg(x) \ - : "m"(*(ptr)), "i" (-EFAULT), "0"(0)) +unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n); +unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n); -static inline unsigned long -__generic_copy_from_user(void *to, const void __user *from, unsigned long n) -{ - unsigned long tmp; - __asm__ __volatile__ - (" tstl %2\n" - " jeq 2f\n" - "1: movesl (%1)+,%3\n" - " movel %3,(%0)+\n" - " subql #1,%2\n" - " jne 1b\n" - "2: movel %4,%2\n" - " bclr #1,%2\n" - " jeq 4f\n" - "3: movesw (%1)+,%3\n" - " movew %3,(%0)+\n" - "4: bclr #0,%2\n" - " jeq 6f\n" - "5: movesb (%1)+,%3\n" - " moveb %3,(%0)+\n" - "6:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "7: movel %2,%%d0\n" - "71:clrl (%0)+\n" - " subql #1,%%d0\n" - " jne 71b\n" - " lsll #2,%2\n" - " addl %4,%2\n" - " btst #1,%4\n" - " jne 81f\n" - " btst #0,%4\n" - " jne 91f\n" - " jra 6b\n" - "8: addql #2,%2\n" - "81:clrw (%0)+\n" - " btst #0,%4\n" - " jne 91f\n" - " jra 6b\n" - "9: addql #1,%2\n" - "91:clrb (%0)+\n" - " jra 6b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,7b\n" - " .long 3b,8b\n" - " .long 5b,9b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp) - : "d"(n & 3), "0"(to), "1"(from), "2"(n/4) - : "d0", "memory"); - return n; -} - -static inline unsigned long -__generic_copy_to_user(void __user *to, const void *from, unsigned long n) +static __always_inline unsigned long +__constant_copy_from_user(void *to, const void __user *from, unsigned long n) { - unsigned long tmp; - __asm__ __volatile__ - (" tstl %2\n" - " jeq 3f\n" - "1: movel (%1)+,%3\n" - "22:movesl %3,(%0)+\n" - "2: subql #1,%2\n" - " jne 1b\n" - "3: movel %4,%2\n" - " bclr #1,%2\n" - " jeq 4f\n" - " movew (%1)+,%3\n" - "24:movesw %3,(%0)+\n" - "4: bclr #0,%2\n" - " jeq 5f\n" - " moveb (%1)+,%3\n" - "25:movesb %3,(%0)+\n" - "5:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "60:addql #1,%2\n" - "6: lsll #2,%2\n" - " addl %4,%2\n" - " jra 5b\n" - "7: addql #2,%2\n" - " jra 5b\n" - "8: addql #1,%2\n" - " jra 5b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,60b\n" - " .long 22b,6b\n" - " .long 2b,6b\n" - " .long 24b,7b\n" - " .long 3b,60b\n" - " .long 4b,7b\n" - " .long 25b,8b\n" - " .long 5b,8b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp) - : "r"(n & 3), "0"(to), "1"(from), "2"(n / 4) - : "memory"); - return n; -} + unsigned long res = 0, tmp; -#define __copy_from_user_big(to, from, n, fixup, copy) \ - __asm__ __volatile__ \ - ("10: movesl (%1)+,%%d0\n" \ - " movel %%d0,(%0)+\n" \ - " subql #1,%2\n" \ - " jne 10b\n" \ - ".section .fixup,\"ax\"\n" \ - " .even\n" \ - "11: movel %2,%%d0\n" \ - "13: clrl (%0)+\n" \ - " subql #1,%%d0\n" \ - " jne 13b\n" \ - " lsll #2,%2\n" \ - fixup "\n" \ - " jra 12f\n" \ - ".previous\n" \ - ".section __ex_table,\"a\"\n" \ - " .align 4\n" \ - " .long 10b,11b\n" \ - ".previous\n" \ - copy "\n" \ - "12:" \ - : "=a"(to), "=a"(from), "=d"(n) \ - : "0"(to), "1"(from), "2"(n/4) \ - : "d0", "memory") + /* limit the inlined version to 3 moves */ + if (n == 11 || n > 12) + return __generic_copy_from_user(to, from, n); -static inline unsigned long -__constant_copy_from_user(void *to, const void __user *from, unsigned long n) -{ - switch (n) { - case 0: - break; - case 1: - __asm__ __volatile__ - ("1: movesb (%1)+,%%d0\n" - " moveb %%d0,(%0)+\n" - "2:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "3: addql #1,%2\n" - " clrb (%0)+\n" - " jra 2b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,3b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 2: - __asm__ __volatile__ - ("1: movesw (%1)+,%%d0\n" - " movew %%d0,(%0)+\n" - "2:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "3: addql #2,%2\n" - " clrw (%0)+\n" - " jra 2b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,3b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 3: - __asm__ __volatile__ - ("1: movesw (%1)+,%%d0\n" - " movew %%d0,(%0)+\n" - "2: movesb (%1)+,%%d0\n" - " moveb %%d0,(%0)+\n" - "3:" - ".section .fixup,\"ax\"\n" - " .even\n" - "4: addql #2,%2\n" - " clrw (%0)+\n" - "5: addql #1,%2\n" - " clrb (%0)+\n" - " jra 3b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,4b\n" - " .long 2b,5b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 4: - __asm__ __volatile__ - ("1: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "2:" - ".section .fixup,\"ax\"\n" - " .even\n" - "3: addql #4,%2\n" - " clrl (%0)+\n" - " jra 2b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,3b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 8: - __asm__ __volatile__ - ("1: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "2: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "3:" - ".section .fixup,\"ax\"\n" - " .even\n" - "4: addql #4,%2\n" - " clrl (%0)+\n" - "5: addql #4,%2\n" - " clrl (%0)+\n" - " jra 3b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,4b\n" - " .long 2b,5b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 12: - __asm__ __volatile__ - ("1: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "2: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "3: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "4:" - ".section .fixup,\"ax\"\n" - " .even\n" - "5: addql #4,%2\n" - " clrl (%0)+\n" - "6: addql #4,%2\n" - " clrl (%0)+\n" - "7: addql #4,%2\n" - " clrl (%0)+\n" - " jra 4b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,5b\n" - " .long 2b,6b\n" - " .long 3b,7b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 16: - __asm__ __volatile__ - ("1: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "2: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "3: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "4: movesl (%1)+,%%d0\n" - " movel %%d0,(%0)+\n" - "5:" - ".section .fixup,\"ax\"\n" - " .even\n" - "6: addql #4,%2\n" - " clrl (%0)+\n" - "7: addql #4,%2\n" - " clrl (%0)+\n" - "8: addql #4,%2\n" - " clrl (%0)+\n" - "9: addql #4,%2\n" - " clrl (%0)+\n" - " jra 5b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,6b\n" - " .long 2b,7b\n" - " .long 3b,8b\n" - " .long 4b,9b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - default: - switch (n & 3) { - case 0: - __copy_from_user_big(to, from, n, "", ""); - break; + switch (n) { case 1: - __copy_from_user_big(to, from, n, - /* fixup */ - "1: addql #1,%2\n" - " clrb (%0)+", - /* copy */ - "2: movesb (%1)+,%%d0\n" - " moveb %%d0,(%0)+\n" - ".section __ex_table,\"a\"\n" - " .long 2b,1b\n" - ".previous"); - break; + __get_user_asm(res, *(u8 *)to, (u8 *)from, u8, b, d, 1); + return res; case 2: - __copy_from_user_big(to, from, n, - /* fixup */ - "1: addql #2,%2\n" - " clrw (%0)+", - /* copy */ - "2: movesw (%1)+,%%d0\n" - " movew %%d0,(%0)+\n" - ".section __ex_table,\"a\"\n" - " .long 2b,1b\n" - ".previous"); - break; - case 3: - __copy_from_user_big(to, from, n, - /* fixup */ - "1: addql #2,%2\n" - " clrw (%0)+\n" - "2: addql #1,%2\n" - " clrb (%0)+", - /* copy */ - "3: movesw (%1)+,%%d0\n" - " movew %%d0,(%0)+\n" - "4: movesb (%1)+,%%d0\n" - " moveb %%d0,(%0)+\n" - ".section __ex_table,\"a\"\n" - " .long 3b,1b\n" - " .long 4b,2b\n" - ".previous"); - break; + __get_user_asm(res, *(u16 *)to, (u16 *)from, u16, w, d, 2); + return res; + case 4: + __get_user_asm(res, *(u32 *)to, (u32 *)from, u32, l, r, 4); + return res; } - break; - } - return n; -} -#define __copy_to_user_big(to, from, n, fixup, copy) \ - __asm__ __volatile__ \ - ("10: movel (%1)+,%%d0\n" \ - "31: movesl %%d0,(%0)+\n" \ - "11: subql #1,%2\n" \ - " jne 10b\n" \ - "41:\n" \ - ".section .fixup,\"ax\"\n" \ - " .even\n" \ - "22: addql #1,%2\n" \ - "12: lsll #2,%2\n" \ - fixup "\n" \ - " jra 13f\n" \ - ".previous\n" \ - ".section __ex_table,\"a\"\n" \ - " .align 4\n" \ - " .long 10b,22b\n" \ - " .long 31b,12b\n" \ - " .long 11b,12b\n" \ - " .long 41b,22b\n" \ - ".previous\n" \ - copy "\n" \ - "13:" \ - : "=a"(to), "=a"(from), "=d"(n) \ - : "0"(to), "1"(from), "2"(n/4) \ - : "d0", "memory") + asm volatile ("\n" + " .ifndef .Lfrom_user\n" + " .set .Lfrom_user,1\n" + " .macro copy_from_user to,from,tmp\n" + " .if .Lcnt >= 4\n" + "1: moves.l (\\from)+,\\tmp\n" + " move.l \\tmp,(\\to)+\n" + " .set .Lcnt,.Lcnt-4\n" + " .elseif .Lcnt & 2\n" + "1: moves.w (\\from)+,\\tmp\n" + " move.w \\tmp,(\\to)+\n" + " .set .Lcnt,.Lcnt-2\n" + " .elseif .Lcnt & 1\n" + "1: moves.b (\\from)+,\\tmp\n" + " move.b \\tmp,(\\to)+\n" + " .set .Lcnt,.Lcnt-1\n" + " .else\n" + " .exitm\n" + " .endif\n" + "\n" + " .section __ex_table,\"a\"\n" + " .align 4\n" + " .long 1b,3f\n" + " .previous\n" + " .endm\n" + " .endif\n" + "\n" + " .set .Lcnt,%c4\n" + " copy_from_user %1,%2,%3\n" + " copy_from_user %1,%2,%3\n" + " copy_from_user %1,%2,%3\n" + "2:\n" + " .section .fixup,\"ax\"\n" + " .even\n" + "3: moveq.l %4,%0\n" + " move.l %5,%1\n" + " .rept %c4 / 4\n" + " clr.l (%1)+\n" + " .endr\n" + " .if %c4 & 2\n" + " clr.w (%1)+\n" + " .endif\n" + " .if %c4 & 1\n" + " clr.b (%1)+\n" + " .endif\n" + " jra 2b\n" + " .previous\n" + : "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp) + : "i" (n), "g" (to) + : "memory"); -#define __copy_to_user_inatomic __copy_to_user -#define __copy_from_user_inatomic __copy_from_user + return res; +} -static inline unsigned long +static __always_inline unsigned long __constant_copy_to_user(void __user *to, const void *from, unsigned long n) { - switch (n) { - case 0: - break; - case 1: - __asm__ __volatile__ - (" moveb (%1)+,%%d0\n" - "21:movesb %%d0,(%0)+\n" - "1:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "2: addql #1,%2\n" - " jra 1b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n " - " .long 21b,2b\n" - " .long 1b,2b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 2: - __asm__ __volatile__ - (" movew (%1)+,%%d0\n" - "21:movesw %%d0,(%0)+\n" - "1:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "2: addql #2,%2\n" - " jra 1b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 21b,2b\n" - " .long 1b,2b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 3: - __asm__ __volatile__ - (" movew (%1)+,%%d0\n" - "21:movesw %%d0,(%0)+\n" - "1: moveb (%1)+,%%d0\n" - "22:movesb %%d0,(%0)+\n" - "2:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "3: addql #2,%2\n" - "4: addql #1,%2\n" - " jra 2b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 21b,3b\n" - " .long 1b,3b\n" - " .long 22b,4b\n" - " .long 2b,4b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 4: - __asm__ __volatile__ - (" movel (%1)+,%%d0\n" - "21:movesl %%d0,(%0)+\n" - "1:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "2: addql #4,%2\n" - " jra 1b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 21b,2b\n" - " .long 1b,2b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 8: - __asm__ __volatile__ - (" movel (%1)+,%%d0\n" - "21:movesl %%d0,(%0)+\n" - "1: movel (%1)+,%%d0\n" - "22:movesl %%d0,(%0)+\n" - "2:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "3: addql #4,%2\n" - "4: addql #4,%2\n" - " jra 2b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 21b,3b\n" - " .long 1b,3b\n" - " .long 22b,4b\n" - " .long 2b,4b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 12: - __asm__ __volatile__ - (" movel (%1)+,%%d0\n" - "21:movesl %%d0,(%0)+\n" - "1: movel (%1)+,%%d0\n" - "22:movesl %%d0,(%0)+\n" - "2: movel (%1)+,%%d0\n" - "23:movesl %%d0,(%0)+\n" - "3:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "4: addql #4,%2\n" - "5: addql #4,%2\n" - "6: addql #4,%2\n" - " jra 3b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 21b,4b\n" - " .long 1b,4b\n" - " .long 22b,5b\n" - " .long 2b,5b\n" - " .long 23b,6b\n" - " .long 3b,6b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - case 16: - __asm__ __volatile__ - (" movel (%1)+,%%d0\n" - "21:movesl %%d0,(%0)+\n" - "1: movel (%1)+,%%d0\n" - "22:movesl %%d0,(%0)+\n" - "2: movel (%1)+,%%d0\n" - "23:movesl %%d0,(%0)+\n" - "3: movel (%1)+,%%d0\n" - "24:movesl %%d0,(%0)+\n" - "4:" - ".section .fixup,\"ax\"\n" - " .even\n" - "5: addql #4,%2\n" - "6: addql #4,%2\n" - "7: addql #4,%2\n" - "8: addql #4,%2\n" - " jra 4b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 21b,5b\n" - " .long 1b,5b\n" - " .long 22b,6b\n" - " .long 2b,6b\n" - " .long 23b,7b\n" - " .long 3b,7b\n" - " .long 24b,8b\n" - " .long 4b,8b\n" - ".previous" - : "=a"(to), "=a"(from), "=d"(n) - : "0"(to), "1"(from), "2"(0) - : "d0", "memory"); - break; - default: - switch (n & 3) { - case 0: - __copy_to_user_big(to, from, n, "", ""); - break; + unsigned long res = 0, tmp; + + /* limit the inlined version to 3 moves */ + if (n == 11 || n > 12) + return __generic_copy_to_user(to, from, n); + + switch (n) { case 1: - __copy_to_user_big(to, from, n, - /* fixup */ - "1: addql #1,%2", - /* copy */ - " moveb (%1)+,%%d0\n" - "22:movesb %%d0,(%0)+\n" - "2:" - ".section __ex_table,\"a\"\n" - " .long 22b,1b\n" - " .long 2b,1b\n" - ".previous"); - break; + __put_user_asm(res, *(u8 *)from, (u8 *)to, b, d, 1); + return res; case 2: - __copy_to_user_big(to, from, n, - /* fixup */ - "1: addql #2,%2", - /* copy */ - " movew (%1)+,%%d0\n" - "22:movesw %%d0,(%0)+\n" - "2:" - ".section __ex_table,\"a\"\n" - " .long 22b,1b\n" - " .long 2b,1b\n" - ".previous"); - break; - case 3: - __copy_to_user_big(to, from, n, - /* fixup */ - "1: addql #2,%2\n" - "2: addql #1,%2", - /* copy */ - " movew (%1)+,%%d0\n" - "23:movesw %%d0,(%0)+\n" - "3: moveb (%1)+,%%d0\n" - "24:movesb %%d0,(%0)+\n" - "4:" - ".section __ex_table,\"a\"\n" - " .long 23b,1b\n" - " .long 3b,1b\n" - " .long 24b,2b\n" - " .long 4b,2b\n" - ".previous"); - break; + __put_user_asm(res, *(u16 *)from, (u16 *)to, w, d, 2); + return res; + case 4: + __put_user_asm(res, *(u32 *)from, (u32 *)to, l, r, 4); + return res; } - break; - } - return n; + + asm volatile ("\n" + " .ifndef .Lto_user\n" + " .set .Lto_user,1\n" + " .macro copy_to_user to,from,tmp\n" + " .if .Lcnt >= 4\n" + " move.l (\\from)+,\\tmp\n" + "11: moves.l \\tmp,(\\to)+\n" + "12: .set .Lcnt,.Lcnt-4\n" + " .elseif .Lcnt & 2\n" + " move.w (\\from)+,\\tmp\n" + "11: moves.w \\tmp,(\\to)+\n" + "12: .set .Lcnt,.Lcnt-2\n" + " .elseif .Lcnt & 1\n" + " move.b (\\from)+,\\tmp\n" + "11: moves.b \\tmp,(\\to)+\n" + "12: .set .Lcnt,.Lcnt-1\n" + " .else\n" + " .exitm\n" + " .endif\n" + "\n" + " .section __ex_table,\"a\"\n" + " .align 4\n" + " .long 11b,3f\n" + " .long 12b,3f\n" + " .previous\n" + " .endm\n" + " .endif\n" + "\n" + " .set .Lcnt,%c4\n" + " copy_to_user %1,%2,%3\n" + " copy_to_user %1,%2,%3\n" + " copy_to_user %1,%2,%3\n" + "2:\n" + " .section .fixup,\"ax\"\n" + " .even\n" + "3: moveq.l %4,%0\n" + " jra 2b\n" + " .previous\n" + : "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp) + : "i" (n) + : "memory"); + + return res; } -#define copy_from_user(to, from, n) \ +#define __copy_from_user(to, from, n) \ (__builtin_constant_p(n) ? \ __constant_copy_from_user(to, from, n) : \ __generic_copy_from_user(to, from, n)) -#define copy_to_user(to, from, n) \ +#define __copy_to_user(to, from, n) \ (__builtin_constant_p(n) ? \ __constant_copy_to_user(to, from, n) : \ __generic_copy_to_user(to, from, n)) -#define __copy_from_user(to, from, n) copy_from_user(to, from, n) -#define __copy_to_user(to, from, n) copy_to_user(to, from, n) +#define __copy_to_user_inatomic __copy_to_user +#define __copy_from_user_inatomic __copy_from_user -/* - * Copy a null terminated string from userspace. - */ +#define copy_from_user(to, from, n) __copy_from_user(to, from, n) +#define copy_to_user(to, from, n) __copy_to_user(to, from, n) -static inline long -strncpy_from_user(char *dst, const char __user *src, long count) -{ - long res; - if (count == 0) return count; - __asm__ __volatile__ - ("1: movesb (%2)+,%%d0\n" - "12:moveb %%d0,(%1)+\n" - " jeq 2f\n" - " subql #1,%3\n" - " jne 1b\n" - "2: subl %3,%0\n" - "3:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "4: movel %4,%0\n" - " jra 3b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,4b\n" - " .long 12b,4b\n" - ".previous" - : "=d"(res), "=a"(dst), "=a"(src), "=d"(count) - : "i"(-EFAULT), "0"(count), "1"(dst), "2"(src), "3"(count) - : "d0", "memory"); - return res; -} - -/* - * Return the size of a string (including the ending 0) - * - * Return 0 on exception, a value greater than N if too long - */ -static inline long strnlen_user(const char __user *src, long n) -{ - long res; - - res = -(unsigned long)src; - __asm__ __volatile__ - ("1:\n" - " tstl %2\n" - " jeq 3f\n" - "2: movesb (%1)+,%%d0\n" - "22:\n" - " subql #1,%2\n" - " tstb %%d0\n" - " jne 1b\n" - " jra 4f\n" - "3:\n" - " addql #1,%0\n" - "4:\n" - " addl %1,%0\n" - "5:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "6: moveq %3,%0\n" - " jra 5b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 2b,6b\n" - " .long 22b,6b\n" - ".previous" - : "=d"(res), "=a"(src), "=d"(n) - : "i"(0), "0"(res), "1"(src), "2"(n) - : "d0"); - return res; -} +long strncpy_from_user(char *dst, const char __user *src, long count); +long strnlen_user(const char __user *src, long n); +unsigned long clear_user(void __user *to, unsigned long n); #define strlen_user(str) strnlen_user(str, 32767) -/* - * Zero Userspace - */ - -static inline unsigned long -clear_user(void __user *to, unsigned long n) -{ - __asm__ __volatile__ - (" tstl %1\n" - " jeq 3f\n" - "1: movesl %3,(%0)+\n" - "2: subql #1,%1\n" - " jne 1b\n" - "3: movel %2,%1\n" - " bclr #1,%1\n" - " jeq 4f\n" - "24:movesw %3,(%0)+\n" - "4: bclr #0,%1\n" - " jeq 5f\n" - "25:movesb %3,(%0)+\n" - "5:\n" - ".section .fixup,\"ax\"\n" - " .even\n" - "61:addql #1,%1\n" - "6: lsll #2,%1\n" - " addl %2,%1\n" - " jra 5b\n" - "7: addql #2,%1\n" - " jra 5b\n" - "8: addql #1,%1\n" - " jra 5b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " .align 4\n" - " .long 1b,61b\n" - " .long 2b,6b\n" - " .long 3b,61b\n" - " .long 24b,7b\n" - " .long 4b,7b\n" - " .long 25b,8b\n" - " .long 5b,8b\n" - ".previous" - : "=a"(to), "=d"(n) - : "r"(n & 3), "r"(0), "0"(to), "1"(n/4)); - return n; -} - #endif /* _M68K_UACCESS_H */ |