Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

put the remnants of ..._user_ret() to rest

they hadn't been used in last 15 years...

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>

Al Viro f5e6634e bb646cdb

-130
-6
arch/blackfin/include/asm/uaccess.h
··· 168 168 #define __copy_to_user_inatomic __copy_to_user 169 169 #define __copy_from_user_inatomic __copy_from_user 170 170 171 - #define copy_to_user_ret(to, from, n, retval) ({ if (copy_to_user(to, from, n))\ 172 - return retval; }) 173 - 174 - #define copy_from_user_ret(to, from, n, retval) ({ if (copy_from_user(to, from, n))\ 175 - return retval; }) 176 - 177 171 static inline unsigned long __must_check 178 172 copy_from_user(void *to, const void __user *from, unsigned long n) 179 173 {
-4
arch/m68k/include/asm/uaccess_no.h
··· 135 135 #define __copy_to_user_inatomic __copy_to_user 136 136 #define __copy_from_user_inatomic __copy_from_user 137 137 138 - #define copy_to_user_ret(to,from,n,retval) ({ if (copy_to_user(to,from,n)) return retval; }) 139 - 140 - #define copy_from_user_ret(to,from,n,retval) ({ if (copy_from_user(to,from,n)) return retval; }) 141 - 142 138 /* 143 139 * Copy a null terminated string from userspace. 144 140 */
-15
arch/mn10300/include/asm/uaccess.h
··· 110 110 #define __put_user(x, ptr) __put_user_nocheck((x), (ptr), sizeof(*(ptr))) 111 111 #define __get_user(x, ptr) __get_user_nocheck((x), (ptr), sizeof(*(ptr))) 112 112 113 - /* 114 - * The "xxx_ret" versions return constant specified in third argument, if 115 - * something bad happens. These macros can be optimized for the 116 - * case of just returning from the function xxx_ret is used. 117 - */ 118 - 119 - #define put_user_ret(x, ptr, ret) \ 120 - ({ if (put_user((x), (ptr))) return (ret); }) 121 - #define get_user_ret(x, ptr, ret) \ 122 - ({ if (get_user((x), (ptr))) return (ret); }) 123 - #define __put_user_ret(x, ptr, ret) \ 124 - ({ if (__put_user((x), (ptr))) return (ret); }) 125 - #define __get_user_ret(x, ptr, ret) \ 126 - ({ if (__get_user((x), (ptr))) return (ret); }) 127 - 128 113 struct __large_struct { unsigned long buf[100]; }; 129 114 #define __m(x) (*(struct __large_struct *)(x)) 130 115
-65
arch/sparc/include/asm/uaccess_32.h
··· 205 205 __gu_ret; \ 206 206 }) 207 207 208 - #define __get_user_check_ret(x, addr, size, type, retval) ({ \ 209 - register unsigned long __gu_val __asm__ ("l1"); \ 210 - if (__access_ok(addr, size)) { \ 211 - switch (size) { \ 212 - case 1: \ 213 - __get_user_asm_ret(__gu_val, ub, addr, retval); \ 214 - break; \ 215 - case 2: \ 216 - __get_user_asm_ret(__gu_val, uh, addr, retval); \ 217 - break; \ 218 - case 4: \ 219 - __get_user_asm_ret(__gu_val, , addr, retval); \ 220 - break; \ 221 - case 8: \ 222 - __get_user_asm_ret(__gu_val, d, addr, retval); \ 223 - break; \ 224 - default: \ 225 - if (__get_user_bad()) \ 226 - return retval; \ 227 - } \ 228 - x = (__force type) __gu_val; \ 229 - } else \ 230 - return retval; \ 231 - }) 232 - 233 208 #define __get_user_nocheck(x, addr, size, type) ({ \ 234 209 register int __gu_ret; \ 235 210 register unsigned long __gu_val; \ ··· 220 245 } \ 221 246 x = (__force type) __gu_val; \ 222 247 __gu_ret; \ 223 - }) 224 - 225 - #define __get_user_nocheck_ret(x, addr, size, type, retval) ({ \ 226 - register unsigned long __gu_val __asm__ ("l1"); \ 227 - switch (size) { \ 228 - case 1: __get_user_asm_ret(__gu_val, ub, addr, retval); break; \ 229 - case 2: __get_user_asm_ret(__gu_val, uh, addr, retval); break; \ 230 - case 4: __get_user_asm_ret(__gu_val, , addr, retval); break; \ 231 - case 8: __get_user_asm_ret(__gu_val, d, addr, retval); break; \ 232 - default: \ 233 - if (__get_user_bad()) \ 234 - return retval; \ 235 - } \ 236 - x = (__force type) __gu_val; \ 237 248 }) 238 249 239 250 #define __get_user_asm(x, size, addr, ret) \ ··· 241 280 ".previous\n\t" \ 242 281 : "=&r" (ret), "=&r" (x) : "m" (*__m(addr)), \ 243 282 "i" (-EFAULT)) 244 - 245 - #define __get_user_asm_ret(x, size, addr, retval) \ 246 - if (__builtin_constant_p(retval) && retval == -EFAULT) \ 247 - __asm__ __volatile__( \ 248 - "/* Get user asm ret, inline. */\n" \ 249 - "1:\t" "ld"#size " %1, %0\n\n\t" \ 250 - ".section __ex_table,#alloc\n\t" \ 251 - ".align 4\n\t" \ 252 - ".word 1b,__ret_efault\n\n\t" \ 253 - ".previous\n\t" \ 254 - : "=&r" (x) : "m" (*__m(addr))); \ 255 - else \ 256 - __asm__ __volatile__( \ 257 - "/* Get user asm ret, inline. */\n" \ 258 - "1:\t" "ld"#size " %1, %0\n\n\t" \ 259 - ".section .fixup,#alloc,#execinstr\n\t" \ 260 - ".align 4\n" \ 261 - "3:\n\t" \ 262 - "ret\n\t" \ 263 - " restore %%g0, %2, %%o0\n\n\t" \ 264 - ".previous\n\t" \ 265 - ".section __ex_table,#alloc\n\t" \ 266 - ".align 4\n\t" \ 267 - ".word 1b, 3b\n\n\t" \ 268 - ".previous\n\t" \ 269 - : "=&r" (x) : "m" (*__m(addr)), "i" (retval)) 270 283 271 284 int __get_user_bad(void); 272 285
-40
arch/sparc/include/asm/uaccess_64.h
··· 179 179 __gu_ret; \ 180 180 }) 181 181 182 - #define __get_user_nocheck_ret(data, addr, size, type, retval) ({ \ 183 - register unsigned long __gu_val __asm__ ("l1"); \ 184 - switch (size) { \ 185 - case 1: __get_user_asm_ret(__gu_val, ub, addr, retval); break; \ 186 - case 2: __get_user_asm_ret(__gu_val, uh, addr, retval); break; \ 187 - case 4: __get_user_asm_ret(__gu_val, uw, addr, retval); break; \ 188 - case 8: __get_user_asm_ret(__gu_val, x, addr, retval); break; \ 189 - default: \ 190 - if (__get_user_bad()) \ 191 - return retval; \ 192 - } \ 193 - data = (__force type) __gu_val; \ 194 - }) 195 - 196 182 #define __get_user_asm(x, size, addr, ret) \ 197 183 __asm__ __volatile__( \ 198 184 "/* Get user asm, inline. */\n" \ ··· 199 213 ".previous\n\t" \ 200 214 : "=r" (ret), "=r" (x) : "r" (__m(addr)), \ 201 215 "i" (-EFAULT)) 202 - 203 - #define __get_user_asm_ret(x, size, addr, retval) \ 204 - if (__builtin_constant_p(retval) && retval == -EFAULT) \ 205 - __asm__ __volatile__( \ 206 - "/* Get user asm ret, inline. */\n" \ 207 - "1:\t" "ld"#size "a [%1] %%asi, %0\n\n\t" \ 208 - ".section __ex_table,\"a\"\n\t" \ 209 - ".align 4\n\t" \ 210 - ".word 1b,__ret_efault\n\n\t" \ 211 - ".previous\n\t" \ 212 - : "=r" (x) : "r" (__m(addr))); \ 213 - else \ 214 - __asm__ __volatile__( \ 215 - "/* Get user asm ret, inline. */\n" \ 216 - "1:\t" "ld"#size "a [%1] %%asi, %0\n\n\t" \ 217 - ".section .fixup,#alloc,#execinstr\n\t" \ 218 - ".align 4\n" \ 219 - "3:\n\t" \ 220 - "ret\n\t" \ 221 - " restore %%g0, %2, %%o0\n\n\t" \ 222 - ".previous\n\t" \ 223 - ".section __ex_table,\"a\"\n\t" \ 224 - ".align 4\n\t" \ 225 - ".word 1b, 3b\n\n\t" \ 226 - ".previous\n\t" \ 227 - : "=r" (x) : "r" (__m(addr)), "i" (retval)) 228 216 229 217 int __get_user_bad(void); 230 218