Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

saner calling conventions for csum_and_copy_..._user()

All callers of these primitives will
* discard anything we might've copied in case of error
* ignore the csum value in case of error
* always pass 0xffffffff as the initial sum, so the
resulting csum value (in case of success, that is) will never be 0.

That suggest the following calling conventions:
* don't pass err_ptr - just return 0 on error.
* don't bother with zeroing destination, etc. in case of error
* don't pass the initial sum - just use 0xffffffff.

This commit does the minimal conversion in the instances of csum_and_copy_...();
the changes of actual asm code behind them are done later in the series.
Note that this asm code is often shared with csum_partial_copy_nocheck();
the difference is that csum_partial_copy_nocheck() passes 0 for initial
sum while csum_and_copy_..._user() pass 0xffffffff. Fortunately, we are
free to pass 0xffffffff in all cases and subsequent patches will use that
freedom without any special comments.

A part that could be split off: parisc and uml/i386 claimed to have
csum_and_copy_to_user() instances of their own, but those were identical
to the generic one, so we simply drop them. Not sure if it's worth
a separate commit...

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>

Al Viro c693cc46 99a2c96d

+183 -287
+1 -1
arch/alpha/include/asm/checksum.h
··· 43 43 */ 44 44 #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 45 45 #define _HAVE_ARCH_CSUM_AND_COPY 46 - __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len, __wsum sum, int *errp); 46 + __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len); 47 47 48 48 __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len); 49 49
+11 -14
arch/alpha/lib/csum_partial_copy.c
··· 325 325 } 326 326 327 327 __wsum 328 - csum_and_copy_from_user(const void __user *src, void *dst, int len, 329 - __wsum sum, int *errp) 328 + csum_and_copy_from_user(const void __user *src, void *dst, int len) 330 329 { 331 - unsigned long checksum = (__force u32) sum; 330 + unsigned long checksum = ~0U; 332 331 unsigned long soff = 7 & (unsigned long) src; 333 332 unsigned long doff = 7 & (unsigned long) dst; 333 + int err = 0; 334 334 335 335 if (len) { 336 - if (!access_ok(src, len)) { 337 - if (errp) *errp = -EFAULT; 338 - memset(dst, 0, len); 339 - return sum; 340 - } 336 + if (!access_ok(src, len)) 337 + return 0; 341 338 if (!doff) { 342 339 if (!soff) 343 340 checksum = csum_partial_cfu_aligned( 344 341 (const unsigned long __user *) src, 345 342 (unsigned long *) dst, 346 - len-8, checksum, errp); 343 + len-8, checksum, &err); 347 344 else 348 345 checksum = csum_partial_cfu_dest_aligned( 349 346 (const unsigned long __user *) src, 350 347 (unsigned long *) dst, 351 - soff, len-8, checksum, errp); 348 + soff, len-8, checksum, &err); 352 349 } else { 353 350 unsigned long partial_dest; 354 351 ldq_u(partial_dest, dst); ··· 354 357 (const unsigned long __user *) src, 355 358 (unsigned long *) dst, 356 359 doff, len-8, checksum, 357 - partial_dest, errp); 360 + partial_dest, &err); 358 361 else 359 362 checksum = csum_partial_cfu_unaligned( 360 363 (const unsigned long __user *) src, 361 364 (unsigned long *) dst, 362 365 soff, doff, len-8, checksum, 363 - partial_dest, errp); 366 + partial_dest, &err); 364 367 } 365 - checksum = from64to16 (checksum); 368 + checksum = err ? 0 : from64to16 (checksum); 366 369 } 367 370 return (__force __wsum)checksum; 368 371 } ··· 375 378 mm_segment_t oldfs = get_fs(); 376 379 set_fs(KERNEL_DS); 377 380 checksum = csum_and_copy_from_user((__force const void __user *)src, 378 - dst, len, 0, NULL); 381 + dst, len); 379 382 set_fs(oldfs); 380 383 return checksum; 381 384 }
+6 -7
arch/arm/include/asm/checksum.h
··· 43 43 #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 44 44 #define _HAVE_ARCH_CSUM_AND_COPY 45 45 static inline 46 - __wsum csum_and_copy_from_user (const void __user *src, void *dst, 47 - int len, __wsum sum, int *err_ptr) 46 + __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len) 48 47 { 49 - if (access_ok(src, len)) 50 - return csum_partial_copy_from_user(src, dst, len, sum, err_ptr); 48 + int err = 0; 51 49 52 - if (len) 53 - *err_ptr = -EFAULT; 50 + if (!access_ok(src, len)) 51 + return 0; 54 52 55 - return sum; 53 + sum = csum_partial_copy_from_user(src, dst, len, ~0U, &err); 54 + return err ? 0 : sum; 56 55 } 57 56 58 57 /*
+1 -2
arch/m68k/include/asm/checksum.h
··· 34 34 #define _HAVE_ARCH_CSUM_AND_COPY 35 35 extern __wsum csum_and_copy_from_user(const void __user *src, 36 36 void *dst, 37 - int len, __wsum sum, 38 - int *csum_err); 37 + int len); 39 38 40 39 extern __wsum csum_partial_copy_nocheck(const void *src, 41 40 void *dst, int len);
+3 -5
arch/m68k/lib/checksum.c
··· 129 129 */ 130 130 131 131 __wsum 132 - csum_and_copy_from_user(const void __user *src, void *dst, 133 - int len, __wsum sum, int *csum_err) 132 + csum_and_copy_from_user(const void __user *src, void *dst, int len) 134 133 { 135 134 /* 136 135 * GCC doesn't like more than 10 operands for the asm ··· 137 138 * code. 138 139 */ 139 140 unsigned long tmp1, tmp2; 141 + __wsum sum = ~0U; 140 142 141 143 __asm__("movel %2,%4\n\t" 142 144 "btst #1,%4\n\t" /* Check alignment */ ··· 311 311 : "0" (sum), "1" (len), "2" (src), "3" (dst) 312 312 ); 313 313 314 - *csum_err = tmp2; 315 - 316 - return(sum); 314 + return tmp2 ? 0 : sum; 317 315 } 318 316 319 317 EXPORT_SYMBOL(csum_and_copy_from_user);
+22 -24
arch/mips/include/asm/checksum.h
··· 60 60 61 61 #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 62 62 static inline 63 - __wsum csum_and_copy_from_user(const void __user *src, void *dst, 64 - int len, __wsum sum, int *err_ptr) 63 + __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len) 65 64 { 66 - if (access_ok(src, len)) 67 - return csum_partial_copy_from_user(src, dst, len, sum, 68 - err_ptr); 69 - if (len) 70 - *err_ptr = -EFAULT; 65 + __wsum sum = ~0U; 66 + int err = 0; 71 67 72 - return sum; 68 + if (!access_ok(src, len)) 69 + return 0; 70 + sum = csum_partial_copy_from_user(src, dst, len, sum, &err); 71 + return err ? 0 : sum; 73 72 } 74 73 75 74 /* ··· 76 77 */ 77 78 #define HAVE_CSUM_COPY_USER 78 79 static inline 79 - __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len, 80 - __wsum sum, int *err_ptr) 80 + __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len) 81 81 { 82 - might_fault(); 83 - if (access_ok(dst, len)) { 84 - if (uaccess_kernel()) 85 - return __csum_partial_copy_kernel(src, 86 - (__force void *)dst, 87 - len, sum, err_ptr); 88 - else 89 - return __csum_partial_copy_to_user(src, 90 - (__force void *)dst, 91 - len, sum, err_ptr); 92 - } 93 - if (len) 94 - *err_ptr = -EFAULT; 82 + int err = 0; 83 + __wsum sum = ~0U; 95 84 96 - return (__force __wsum)-1; /* invalid checksum */ 85 + might_fault(); 86 + if (!access_ok(dst, len)) 87 + return 0; 88 + if (uaccess_kernel()) 89 + sum = __csum_partial_copy_kernel(src, 90 + (__force void *)dst, 91 + len, sum, &err); 92 + else 93 + sum = __csum_partial_copy_to_user(src, 94 + (__force void *)dst, 95 + len, sum, &err); 96 + return err ? 0 : sum; 97 97 } 98 98 99 99 /*
-20
arch/parisc/include/asm/checksum.h
··· 173 173 return csum_fold(sum); 174 174 } 175 175 176 - /* 177 - * Copy and checksum to user 178 - */ 179 - #define HAVE_CSUM_COPY_USER 180 - static __inline__ __wsum csum_and_copy_to_user(const void *src, 181 - void __user *dst, 182 - int len, __wsum sum, 183 - int *err_ptr) 184 - { 185 - /* code stolen from include/asm-mips64 */ 186 - sum = csum_partial(src, len, sum); 187 - 188 - if (copy_to_user(dst, src, len)) { 189 - *err_ptr = -EFAULT; 190 - return (__force __wsum)-1; 191 - } 192 - 193 - return sum; 194 - } 195 - 196 176 #endif 197 177
+2 -2
arch/powerpc/include/asm/checksum.h
··· 24 24 25 25 #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 26 26 extern __wsum csum_and_copy_from_user(const void __user *src, void *dst, 27 - int len, __wsum sum, int *err_ptr); 27 + int len); 28 28 #define HAVE_CSUM_COPY_USER 29 29 extern __wsum csum_and_copy_to_user(const void *src, void __user *dst, 30 - int len, __wsum sum, int *err_ptr); 30 + int len); 31 31 32 32 #define _HAVE_ARCH_CSUM_AND_COPY 33 33 #define csum_partial_copy_nocheck(src, dst, len) \
+22 -48
arch/powerpc/lib/checksum_wrappers.c
··· 12 12 #include <linux/uaccess.h> 13 13 14 14 __wsum csum_and_copy_from_user(const void __user *src, void *dst, 15 - int len, __wsum sum, int *err_ptr) 15 + int len) 16 16 { 17 17 unsigned int csum; 18 + int err = 0; 18 19 19 20 might_sleep(); 21 + 22 + if (unlikely(!access_ok(src, len))) 23 + return 0; 24 + 20 25 allow_read_from_user(src, len); 21 26 22 - *err_ptr = 0; 23 - 24 - if (!len) { 25 - csum = 0; 26 - goto out; 27 - } 28 - 29 - if (unlikely((len < 0) || !access_ok(src, len))) { 30 - *err_ptr = -EFAULT; 31 - csum = (__force unsigned int)sum; 32 - goto out; 33 - } 34 - 35 27 csum = csum_partial_copy_generic((void __force *)src, dst, 36 - len, sum, err_ptr, NULL); 28 + len, ~0U, &err, NULL); 37 29 38 - if (unlikely(*err_ptr)) { 30 + if (unlikely(err)) { 39 31 int missing = __copy_from_user(dst, src, len); 40 32 41 - if (missing) { 42 - memset(dst + len - missing, 0, missing); 43 - *err_ptr = -EFAULT; 44 - } else { 45 - *err_ptr = 0; 46 - } 47 - 48 - csum = csum_partial(dst, len, sum); 33 + if (missing) 34 + csum = 0; 35 + else 36 + csum = csum_partial(dst, len, ~0U); 49 37 } 50 38 51 - out: 52 39 prevent_read_from_user(src, len); 53 40 return (__force __wsum)csum; 54 41 } 55 42 EXPORT_SYMBOL(csum_and_copy_from_user); 56 43 57 - __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len, 58 - __wsum sum, int *err_ptr) 44 + __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len) 59 45 { 60 46 unsigned int csum; 47 + int err = 0; 61 48 62 49 might_sleep(); 50 + if (unlikely(!access_ok(dst, len))) 51 + return 0; 52 + 63 53 allow_write_to_user(dst, len); 64 54 65 - *err_ptr = 0; 66 - 67 - if (!len) { 68 - csum = 0; 69 - goto out; 70 - } 71 - 72 - if (unlikely((len < 0) || !access_ok(dst, len))) { 73 - *err_ptr = -EFAULT; 74 - csum = -1; /* invalid checksum */ 75 - goto out; 76 - } 77 - 78 55 csum = csum_partial_copy_generic(src, (void __force *)dst, 79 - len, sum, NULL, err_ptr); 56 + len, ~0U, NULL, &err); 80 57 81 - if (unlikely(*err_ptr)) { 82 - csum = csum_partial(src, len, sum); 58 + if (unlikely(err)) { 59 + csum = csum_partial(src, len, ~0U); 83 60 84 - if (copy_to_user(dst, src, len)) { 85 - *err_ptr = -EFAULT; 86 - csum = -1; /* invalid checksum */ 87 - } 61 + if (copy_to_user(dst, src, len)) 62 + csum = 0; 88 63 } 89 64 90 - out: 91 65 prevent_write_to_user(dst, len); 92 66 return (__force __wsum)csum; 93 67 }
+17 -17
arch/sh/include/asm/checksum_32.h
··· 50 50 51 51 #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 52 52 static inline 53 - __wsum csum_and_copy_from_user(const void __user *src, void *dst, 54 - int len, __wsum sum, int *err_ptr) 53 + __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len) 55 54 { 56 - if (access_ok(src, len)) 57 - return csum_partial_copy_generic((__force const void *)src, dst, 58 - len, sum, err_ptr, NULL); 59 - if (len) 60 - *err_ptr = -EFAULT; 61 - return sum; 55 + int err = 0; 56 + __wsum sum = ~0U; 57 + 58 + if (!access_ok(src, len)) 59 + return 0; 60 + sum = csum_partial_copy_generic((__force const void *)src, dst, 61 + len, sum, &err, NULL); 62 + return err ? 0 : sum; 62 63 } 63 64 64 65 /* ··· 200 199 #define HAVE_CSUM_COPY_USER 201 200 static inline __wsum csum_and_copy_to_user(const void *src, 202 201 void __user *dst, 203 - int len, __wsum sum, 204 - int *err_ptr) 202 + int len) 205 203 { 206 - if (access_ok(dst, len)) 207 - return csum_partial_copy_generic((__force const void *)src, 208 - dst, len, sum, NULL, err_ptr); 204 + int err = 0; 205 + __wsum sum = ~0U; 209 206 210 - if (len) 211 - *err_ptr = -EFAULT; 212 - 213 - return (__force __wsum)-1; /* invalid checksum */ 207 + if (!access_ok(dst, len)) 208 + return 0; 209 + sum = csum_partial_copy_generic((__force const void *)src, 210 + dst, len, sum, NULL, &err); 211 + return err ? 0 : sum; 214 212 } 215 213 #endif /* __ASM_SH_CHECKSUM_H */
+30 -35
arch/sparc/include/asm/checksum_32.h
··· 60 60 } 61 61 62 62 static inline __wsum 63 - csum_and_copy_from_user(const void __user *src, void *dst, int len, 64 - __wsum sum, int *err) 63 + csum_and_copy_from_user(const void __user *src, void *dst, int len) 65 64 { 66 65 register unsigned long ret asm("o0") = (unsigned long)src; 67 66 register char *d asm("o1") = dst; 68 67 register int l asm("g1") = len; 69 - register __wsum s asm("g7") = sum; 68 + register __wsum s asm("g7") = ~0U; 69 + int err = 0; 70 70 71 - if (unlikely(!access_ok(src, len))) { 72 - if (len) 73 - *err = -EFAULT; 74 - return sum; 75 - } 71 + if (unlikely(!access_ok(src, len))) 72 + return 0; 76 73 77 74 __asm__ __volatile__ ( 78 75 ".section __ex_table,#alloc\n\t" ··· 80 83 "call __csum_partial_copy_sparc_generic\n\t" 81 84 " st %8, [%%sp + 64]\n" 82 85 : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s) 83 - : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err) 86 + : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err) 84 87 : "o2", "o3", "o4", "o5", "o7", "g2", "g3", "g4", "g5", 85 88 "cc", "memory"); 86 - return (__force __wsum)ret; 89 + return err ? 0 : (__force __wsum)ret; 87 90 } 88 91 89 92 #define HAVE_CSUM_COPY_USER 90 93 91 94 static inline __wsum 92 - csum_and_copy_to_user(const void *src, void __user *dst, int len, 93 - __wsum sum, int *err) 95 + csum_and_copy_to_user(const void *src, void __user *dst, int len) 94 96 { 95 - if (!access_ok(dst, len)) { 96 - *err = -EFAULT; 97 - return sum; 98 - } else { 99 - register unsigned long ret asm("o0") = (unsigned long)src; 100 - register char __user *d asm("o1") = dst; 101 - register int l asm("g1") = len; 102 - register __wsum s asm("g7") = sum; 97 + register unsigned long ret asm("o0") = (unsigned long)src; 98 + register char __user *d asm("o1") = dst; 99 + register int l asm("g1") = len; 100 + register __wsum s asm("g7") = ~0U; 101 + int err = 0; 103 102 104 - __asm__ __volatile__ ( 105 - ".section __ex_table,#alloc\n\t" 106 - ".align 4\n\t" 107 - ".word 1f,1\n\t" 108 - ".previous\n" 109 - "1:\n\t" 110 - "call __csum_partial_copy_sparc_generic\n\t" 111 - " st %8, [%%sp + 64]\n" 112 - : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s) 113 - : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err) 114 - : "o2", "o3", "o4", "o5", "o7", 115 - "g2", "g3", "g4", "g5", 116 - "cc", "memory"); 117 - return (__force __wsum)ret; 118 - } 103 + if (!access_ok(dst, len)) 104 + return 0; 105 + 106 + __asm__ __volatile__ ( 107 + ".section __ex_table,#alloc\n\t" 108 + ".align 4\n\t" 109 + ".word 1f,1\n\t" 110 + ".previous\n" 111 + "1:\n\t" 112 + "call __csum_partial_copy_sparc_generic\n\t" 113 + " st %8, [%%sp + 64]\n" 114 + : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s) 115 + : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err) 116 + : "o2", "o3", "o4", "o5", "o7", 117 + "g2", "g3", "g4", "g5", 118 + "cc", "memory"); 119 + return err ? 0 : (__force __wsum)ret; 119 120 } 120 121 121 122 /* ihl is always 5 or greater, almost always is 5, and iph is word aligned
+6 -8
arch/sparc/include/asm/checksum_64.h
··· 51 51 52 52 static inline __wsum 53 53 csum_and_copy_from_user(const void __user *src, 54 - void *dst, int len, 55 - __wsum sum, int *err) 54 + void *dst, int len) 56 55 { 57 - long ret = __csum_partial_copy_from_user(src, dst, len, sum); 56 + long ret = __csum_partial_copy_from_user(src, dst, len, ~0U); 58 57 if (ret < 0) 59 - *err = -EFAULT; 58 + return 0; 60 59 return (__force __wsum) ret; 61 60 } 62 61 ··· 69 70 70 71 static inline __wsum 71 72 csum_and_copy_to_user(const void *src, 72 - void __user *dst, int len, 73 - __wsum sum, int *err) 73 + void __user *dst, int len) 74 74 { 75 - long ret = __csum_partial_copy_to_user(src, dst, len, sum); 75 + long ret = __csum_partial_copy_to_user(src, dst, len, ~0U); 76 76 if (ret < 0) 77 - *err = -EFAULT; 77 + return 0; 78 78 return (__force __wsum) ret; 79 79 } 80 80
+14 -21
arch/x86/include/asm/checksum_32.h
··· 44 44 } 45 45 46 46 static inline __wsum csum_and_copy_from_user(const void __user *src, 47 - void *dst, int len, 48 - __wsum sum, int *err_ptr) 47 + void *dst, int len) 49 48 { 50 49 __wsum ret; 50 + int err = 0; 51 51 52 52 might_sleep(); 53 - if (!user_access_begin(src, len)) { 54 - if (len) 55 - *err_ptr = -EFAULT; 56 - return sum; 57 - } 53 + if (!user_access_begin(src, len)) 54 + return 0; 58 55 ret = csum_partial_copy_generic((__force void *)src, dst, 59 - len, sum, err_ptr, NULL); 56 + len, ~0U, &err, NULL); 60 57 user_access_end(); 61 58 62 - return ret; 59 + return err ? 0 : ret; 63 60 } 64 61 65 62 /* ··· 174 177 */ 175 178 static inline __wsum csum_and_copy_to_user(const void *src, 176 179 void __user *dst, 177 - int len, __wsum sum, 178 - int *err_ptr) 180 + int len) 179 181 { 180 182 __wsum ret; 183 + int err = 0; 181 184 182 185 might_sleep(); 183 - if (user_access_begin(dst, len)) { 184 - ret = csum_partial_copy_generic(src, (__force void *)dst, 185 - len, sum, NULL, err_ptr); 186 - user_access_end(); 187 - return ret; 188 - } 186 + if (!user_access_begin(dst, len)) 187 + return 0; 189 188 190 - if (len) 191 - *err_ptr = -EFAULT; 192 - 193 - return (__force __wsum)-1; /* invalid checksum */ 189 + ret = csum_partial_copy_generic(src, (__force void *)dst, 190 + len, ~0U, NULL, &err); 191 + user_access_end(); 192 + return err ? 0 : ret; 194 193 } 195 194 196 195 #endif /* _ASM_X86_CHECKSUM_32_H */
+2 -4
arch/x86/include/asm/checksum_64.h
··· 135 135 int *src_err_ptr, int *dst_err_ptr); 136 136 137 137 138 - extern __wsum csum_and_copy_from_user(const void __user *src, void *dst, 139 - int len, __wsum isum, int *errp); 140 - extern __wsum csum_and_copy_to_user(const void *src, void __user *dst, 141 - int len, __wsum isum, int *errp); 138 + extern __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len); 139 + extern __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len); 142 140 extern __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len); 143 141 144 142 /**
+16 -22
arch/x86/lib/csum-wrappers_64.c
··· 22 22 */ 23 23 __wsum 24 24 csum_and_copy_from_user(const void __user *src, void *dst, 25 - int len, __wsum isum, int *errp) 25 + int len) 26 26 { 27 + int err = 0; 28 + __wsum isum = ~0U; 29 + 27 30 might_sleep(); 28 - *errp = 0; 29 31 30 32 if (!user_access_begin(src, len)) 31 - goto out_err; 33 + return 0; 32 34 33 35 /* 34 36 * Why 6, not 7? To handle odd addresses aligned we ··· 55 53 } 56 54 } 57 55 isum = csum_partial_copy_generic((__force const void *)src, 58 - dst, len, isum, errp, NULL); 56 + dst, len, isum, &err, NULL); 59 57 user_access_end(); 60 - if (unlikely(*errp)) 61 - goto out_err; 62 - 58 + if (unlikely(err)) 59 + isum = 0; 63 60 return isum; 64 61 65 62 out: 66 63 user_access_end(); 67 - out_err: 68 - *errp = -EFAULT; 69 - memset(dst, 0, len); 70 - 71 - return isum; 64 + return 0; 72 65 } 73 66 EXPORT_SYMBOL(csum_and_copy_from_user); 74 67 ··· 80 83 */ 81 84 __wsum 82 85 csum_and_copy_to_user(const void *src, void __user *dst, 83 - int len, __wsum isum, int *errp) 86 + int len) 84 87 { 85 - __wsum ret; 88 + __wsum ret, isum = ~0U; 89 + int err = 0; 86 90 87 91 might_sleep(); 88 92 89 - if (!user_access_begin(dst, len)) { 90 - *errp = -EFAULT; 93 + if (!user_access_begin(dst, len)) 91 94 return 0; 92 - } 93 95 94 96 if (unlikely((unsigned long)dst & 6)) { 95 97 while (((unsigned long)dst & 6) && len >= 2) { ··· 103 107 } 104 108 } 105 109 106 - *errp = 0; 107 110 ret = csum_partial_copy_generic(src, (void __force *)dst, 108 - len, isum, NULL, errp); 111 + len, isum, NULL, &err); 109 112 user_access_end(); 110 - return ret; 113 + return err ? 0 : ret; 111 114 out: 112 115 user_access_end(); 113 - *errp = -EFAULT; 114 - return isum; 116 + return 0; 115 117 } 116 118 EXPORT_SYMBOL(csum_and_copy_to_user); 117 119
-23
arch/x86/um/asm/checksum_32.h
··· 35 35 return csum_fold(sum); 36 36 } 37 37 38 - /* 39 - * Copy and checksum to user 40 - */ 41 - #define HAVE_CSUM_COPY_USER 42 - static __inline__ __wsum csum_and_copy_to_user(const void *src, 43 - void __user *dst, 44 - int len, __wsum sum, int *err_ptr) 45 - { 46 - if (access_ok(dst, len)) { 47 - if (copy_to_user(dst, src, len)) { 48 - *err_ptr = -EFAULT; 49 - return (__force __wsum)-1; 50 - } 51 - 52 - return csum_partial(src, len, sum); 53 - } 54 - 55 - if (len) 56 - *err_ptr = -EFAULT; 57 - 58 - return (__force __wsum)-1; /* invalid checksum */ 59 - } 60 - 61 38 #endif
+16 -14
arch/xtensa/include/asm/checksum.h
··· 55 55 #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 56 56 static inline 57 57 __wsum csum_and_copy_from_user(const void __user *src, void *dst, 58 - int len, __wsum sum, int *err_ptr) 58 + int len) 59 59 { 60 - if (access_ok(src, len)) 61 - return csum_partial_copy_generic((__force const void *)src, dst, 62 - len, sum, err_ptr, NULL); 63 - if (len) 64 - *err_ptr = -EFAULT; 65 - return sum; 60 + int err = 0; 61 + 62 + if (!access_ok(src, len)) 63 + return 0; 64 + 65 + sum = csum_partial_copy_generic((__force const void *)src, dst, 66 + len, ~0U, &err, NULL); 67 + return err ? 0 : sum; 66 68 } 67 69 68 70 /* ··· 245 243 */ 246 244 #define HAVE_CSUM_COPY_USER 247 245 static __inline__ __wsum csum_and_copy_to_user(const void *src, 248 - void __user *dst, int len, 249 - __wsum sum, int *err_ptr) 246 + void __user *dst, int len) 250 247 { 251 - if (access_ok(dst, len)) 252 - return csum_partial_copy_generic(src,dst,len,sum,NULL,err_ptr); 248 + int err = 0; 249 + __wsum sum = ~0U; 253 250 254 - if (len) 255 - *err_ptr = -EFAULT; 251 + if (!access_ok(dst, len)) 252 + return 0; 256 253 257 - return (__force __wsum)-1; /* invalid checksum */ 254 + sum = csum_partial_copy_generic(src,dst,len,sum,NULL,&err); 255 + return err ? 0 : sum; 258 256 } 259 257 #endif
+6 -9
include/net/checksum.h
··· 24 24 #ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 25 25 static inline 26 26 __wsum csum_and_copy_from_user (const void __user *src, void *dst, 27 - int len, __wsum sum, int *err_ptr) 27 + int len) 28 28 { 29 29 if (copy_from_user(dst, src, len)) 30 - *err_ptr = -EFAULT; 31 - return csum_partial(dst, len, sum); 30 + return 0; 31 + return csum_partial(dst, len, ~0U); 32 32 } 33 33 #endif 34 34 35 35 #ifndef HAVE_CSUM_COPY_USER 36 36 static __inline__ __wsum csum_and_copy_to_user 37 - (const void *src, void __user *dst, int len, __wsum sum, int *err_ptr) 37 + (const void *src, void __user *dst, int len) 38 38 { 39 - sum = csum_partial(src, len, sum); 39 + __wsum sum = csum_partial(src, len, ~0U); 40 40 41 41 if (copy_to_user(dst, src, len) == 0) 42 42 return sum; 43 - if (len) 44 - *err_ptr = -EFAULT; 45 - 46 - return (__force __wsum)-1; /* invalid checksum */ 43 + return 0; 47 44 } 48 45 #endif 49 46
+8 -11
lib/iov_iter.c
··· 1449 1449 return 0; 1450 1450 } 1451 1451 iterate_and_advance(i, bytes, v, ({ 1452 - int err = 0; 1453 1452 next = csum_and_copy_from_user(v.iov_base, 1454 1453 (to += v.iov_len) - v.iov_len, 1455 - v.iov_len, ~0U, &err); 1456 - if (!err) { 1454 + v.iov_len); 1455 + if (next) { 1457 1456 sum = csum_block_add(sum, next, off); 1458 1457 off += v.iov_len; 1459 1458 } 1460 - err ? v.iov_len : 0; 1459 + next ? 0 : v.iov_len; 1461 1460 }), ({ 1462 1461 char *p = kmap_atomic(v.bv_page); 1463 1462 sum = csum_and_memcpy((to += v.bv_len) - v.bv_len, ··· 1490 1491 if (unlikely(i->count < bytes)) 1491 1492 return false; 1492 1493 iterate_all_kinds(i, bytes, v, ({ 1493 - int err = 0; 1494 1494 next = csum_and_copy_from_user(v.iov_base, 1495 1495 (to += v.iov_len) - v.iov_len, 1496 - v.iov_len, ~0U, &err); 1497 - if (err) 1496 + v.iov_len); 1497 + if (!next) 1498 1498 return false; 1499 1499 sum = csum_block_add(sum, next, off); 1500 1500 off += v.iov_len; ··· 1535 1537 return 0; 1536 1538 } 1537 1539 iterate_and_advance(i, bytes, v, ({ 1538 - int err = 0; 1539 1540 next = csum_and_copy_to_user((from += v.iov_len) - v.iov_len, 1540 1541 v.iov_base, 1541 - v.iov_len, ~0U, &err); 1542 - if (!err) { 1542 + v.iov_len); 1543 + if (next) { 1543 1544 sum = csum_block_add(sum, next, off); 1544 1545 off += v.iov_len; 1545 1546 } 1546 - err ? v.iov_len : 0; 1547 + next ? 0 : v.iov_len; 1547 1548 }), ({ 1548 1549 char *p = kmap_atomic(v.bv_page); 1549 1550 sum = csum_and_memcpy(p + v.bv_offset,