Two exception handling register moves are faster to inline than a call
to __copy_user_ll(). We already apply the conversion for a get_user()
call, so for symmetry we should also apply the optimisation to
copy_from_user.

Signed-off-by: Chris Wilson <ch...@chris-wilson.co.uk>
Cc: Thomas Gleixner <t...@linutronix.de>
Cc: Ingo Molnar <mi...@redhat.com>
Cc: "H. Peter Anvin" <h...@zytor.com>
---
 arch/x86/include/asm/uaccess_32.h | 25 +++++++++++++++++++++----
 1 file changed, 21 insertions(+), 4 deletions(-)

diff --git a/arch/x86/include/asm/uaccess_32.h 
b/arch/x86/include/asm/uaccess_32.h
index aeda9bb8af50..44d17d1ab07c 100644
--- a/arch/x86/include/asm/uaccess_32.h
+++ b/arch/x86/include/asm/uaccess_32.h
@@ -23,30 +23,47 @@ static __always_inline unsigned long
 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
 {
        if (__builtin_constant_p(n)) {
-               unsigned long ret;
+               unsigned long ret = 0;
 
                switch (n) {
                case 1:
-                       ret = 0;
                        __uaccess_begin();
                        __get_user_asm_nozero(*(u8 *)to, from, ret,
                                              "b", "b", "=q", 1);
                        __uaccess_end();
                        return ret;
                case 2:
-                       ret = 0;
                        __uaccess_begin();
                        __get_user_asm_nozero(*(u16 *)to, from, ret,
                                              "w", "w", "=r", 2);
                        __uaccess_end();
                        return ret;
                case 4:
-                       ret = 0;
                        __uaccess_begin();
                        __get_user_asm_nozero(*(u32 *)to, from, ret,
                                              "l", "k", "=r", 4);
                        __uaccess_end();
                        return ret;
+               case 6:
+                       __uaccess_begin();
+                       __get_user_asm_nozero(*(u32 *)to, from, ret,
+                                             "l", "k", "=r", 6);
+                       if (likely(!ret))
+                               __get_user_asm_nozero(*(u16 *)(4 + (char *)to),
+                                                     (u16 __user *)(4 + (char 
__user *)from),
+                                                     ret, "w", "w", "=r", 2);
+                       __uaccess_end();
+                       return ret;
+               case 8:
+                       __uaccess_begin();
+                       __get_user_asm_nozero(*(u32 *)to, from, ret,
+                                             "l", "k", "=r", 8);
+                       if (likely(!ret))
+                               __get_user_asm_nozero(*(u32 *)(4 + (char *)to),
+                                                     (u32 __user *)(4 + (char 
__user *)from),
+                                                     ret, "l", "k", "=r", 4);
+                       __uaccess_end();
+                       return ret;
                }
        }
        return __copy_user_ll(to, (__force const void *)from, n);
-- 
2.11.0

Reply via email to