Merge tag 'v4.7-rc6' into x86/mm, to merge fixes before applying new changes
authorIngo Molnar <mingo@kernel.org>
Fri, 8 Jul 2016 09:51:28 +0000 (11:51 +0200)
committerIngo Molnar <mingo@kernel.org>
Fri, 8 Jul 2016 09:51:28 +0000 (11:51 +0200)
Signed-off-by: Ingo Molnar <mingo@kernel.org>
1  2 
arch/x86/include/asm/uaccess.h

@@@ -5,6 -5,7 +5,7 @@@
   */
  #include <linux/errno.h>
  #include <linux/compiler.h>
+ #include <linux/kasan-checks.h>
  #include <linux/thread_info.h>
  #include <linux/string.h>
  #include <asm/asm.h>
@@@ -118,7 -119,7 +119,7 @@@ struct exception_table_entry 
  
  extern int fixup_exception(struct pt_regs *regs, int trapnr);
  extern bool ex_has_fault_handler(unsigned long ip);
- extern int early_fixup_exception(unsigned long *ip);
+ extern void early_fixup_exception(struct pt_regs *regs, int trapnr);
  
  /*
   * These are the main single-value transfer routines.  They automatically
@@@ -341,26 -342,7 +342,26 @@@ do {                                                                     
  } while (0)
  
  #ifdef CONFIG_X86_32
 -#define __get_user_asm_u64(x, ptr, retval, errret)    (x) = __get_user_bad()
 +#define __get_user_asm_u64(x, ptr, retval, errret)                    \
 +({                                                                    \
 +      __typeof__(ptr) __ptr = (ptr);                                  \
 +      asm volatile(ASM_STAC "\n"                                      \
 +                   "1:        movl %2,%%eax\n"                        \
 +                   "2:        movl %3,%%edx\n"                        \
 +                   "3: " ASM_CLAC "\n"                                \
 +                   ".section .fixup,\"ax\"\n"                         \
 +                   "4:        mov %4,%0\n"                            \
 +                   "  xorl %%eax,%%eax\n"                             \
 +                   "  xorl %%edx,%%edx\n"                             \
 +                   "  jmp 3b\n"                                       \
 +                   ".previous\n"                                      \
 +                   _ASM_EXTABLE(1b, 4b)                               \
 +                   _ASM_EXTABLE(2b, 4b)                               \
 +                   : "=r" (retval), "=A"(x)                           \
 +                   : "m" (__m(__ptr)), "m" __m(((u32 *)(__ptr)) + 1), \
 +                     "i" (errret), "0" (retval));                     \
 +})
 +
  #define __get_user_asm_ex_u64(x, ptr)                 (x) = __get_user_bad()
  #else
  #define __get_user_asm_u64(x, ptr, retval, errret) \
@@@ -447,7 -429,7 +448,7 @@@ do {                                                                       
  #define __get_user_nocheck(x, ptr, size)                              \
  ({                                                                    \
        int __gu_err;                                                   \
 -      unsigned long __gu_val;                                         \
 +      __inttype(*(ptr)) __gu_val;                                     \
        __uaccess_begin();                                              \
        __get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT);    \
        __uaccess_end();                                                \
@@@ -740,6 -722,8 +741,8 @@@ copy_from_user(void *to, const void __u
  
        might_fault();
  
+       kasan_check_write(to, n);
        /*
         * While we would like to have the compiler do the checking for us
         * even in the non-constant size case, any false positives there are
@@@ -773,6 -757,8 +776,8 @@@ copy_to_user(void __user *to, const voi
  {
        int sz = __compiletime_object_size(from);
  
+       kasan_check_read(from, n);
        might_fault();
  
        /* See the comment in copy_from_user() above. */