summaryrefslogtreecommitdiffstats
path: root/arch/x86
diff options
context:
space:
mode:
authorThomas Gleixner <tglx@linutronix.de>2021-06-23 14:02:30 +0200
committerBorislav Petkov <bp@suse.de>2021-06-23 20:04:58 +0200
commitaee8c67a4faa40a8df4e79316dbfc92d123989c1 (patch)
treeaff7d82eda101151ac605fc2cdc999b81939f307 /arch/x86
parent0a6c2e9ec91c96bde1e8ce063180ac6e05e680f7 (diff)
downloadlinux-stable-aee8c67a4faa40a8df4e79316dbfc92d123989c1.tar.gz
linux-stable-aee8c67a4faa40a8df4e79316dbfc92d123989c1.tar.bz2
linux-stable-aee8c67a4faa40a8df4e79316dbfc92d123989c1.zip
x86/fpu: Return proper error codes from user access functions
When *RSTOR from user memory raises an exception, there is no way to differentiate them. That's bad because it forces the slow path even when the failure was not a fault. If the operation raised eg. #GP then going through the slow path is pointless. Use _ASM_EXTABLE_FAULT() which stores the trap number and let the exception fixup return the negated trap number as error. This allows to separate the fast path and let it handle faults directly and avoid the slow path for all other exceptions. Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Borislav Petkov <bp@suse.de> Link: https://lkml.kernel.org/r/20210623121457.601480369@linutronix.de
Diffstat (limited to 'arch/x86')
-rw-r--r--arch/x86/include/asm/fpu/internal.h19
1 files changed, 12 insertions, 7 deletions
diff --git a/arch/x86/include/asm/fpu/internal.h b/arch/x86/include/asm/fpu/internal.h
index 528a86826f95..5a18694a89b2 100644
--- a/arch/x86/include/asm/fpu/internal.h
+++ b/arch/x86/include/asm/fpu/internal.h
@@ -88,6 +88,7 @@ static inline void fpstate_init_soft(struct swregs_state *soft) {}
#endif
extern void save_fpregs_to_fpstate(struct fpu *fpu);
+/* Returns 0 or the negated trap number, which results in -EFAULT for #PF */
#define user_insn(insn, output, input...) \
({ \
int err; \
@@ -95,14 +96,14 @@ extern void save_fpregs_to_fpstate(struct fpu *fpu);
might_fault(); \
\
asm volatile(ASM_STAC "\n" \
- "1:" #insn "\n\t" \
+ "1: " #insn "\n" \
"2: " ASM_CLAC "\n" \
".section .fixup,\"ax\"\n" \
- "3: movl $-1,%[err]\n" \
+ "3: negl %%eax\n" \
" jmp 2b\n" \
".previous\n" \
- _ASM_EXTABLE(1b, 3b) \
- : [err] "=r" (err), output \
+ _ASM_EXTABLE_FAULT(1b, 3b) \
+ : [err] "=a" (err), output \
: "0"(0), input); \
err; \
})
@@ -196,16 +197,20 @@ static inline void fxsave(struct fxregs_state *fx)
#define XRSTOR ".byte " REX_PREFIX "0x0f,0xae,0x2f"
#define XRSTORS ".byte " REX_PREFIX "0x0f,0xc7,0x1f"
+/*
+ * After this @err contains 0 on success or the negated trap number when
+ * the operation raises an exception. For faults this results in -EFAULT.
+ */
#define XSTATE_OP(op, st, lmask, hmask, err) \
asm volatile("1:" op "\n\t" \
"xor %[err], %[err]\n" \
"2:\n\t" \
".pushsection .fixup,\"ax\"\n\t" \
- "3: movl $-2,%[err]\n\t" \
+ "3: negl %%eax\n\t" \
"jmp 2b\n\t" \
".popsection\n\t" \
- _ASM_EXTABLE(1b, 3b) \
- : [err] "=r" (err) \
+ _ASM_EXTABLE_FAULT(1b, 3b) \
+ : [err] "=a" (err) \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory")