Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

x86/extable: Introduce _ASM_EXTABLE_UA for uaccess fixups

Currently, most fixups for attempting to access userspace memory are
handled using _ASM_EXTABLE, which is also used for various other types of
fixups (e.g. safe MSR access, IRET failures, and a bunch of other things).
In order to make it possible to add special safety checks to uaccess fixups
(in particular, checking whether the fault address is actually in
userspace), introduce a new exception table handler ex_handler_uaccess()
and wire it up to all the user access fixups (excluding ones that
already use _ASM_EXTABLE_EX).

Signed-off-by: Jann Horn <jannh@google.com>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Tested-by: Kees Cook <keescook@chromium.org>
Cc: Andy Lutomirski <luto@kernel.org>
Cc: kernel-hardening@lists.openwall.com
Cc: dvyukov@google.com
Cc: Masami Hiramatsu <mhiramat@kernel.org>
Cc: "Naveen N. Rao" <naveen.n.rao@linux.vnet.ibm.com>
Cc: Anil S Keshavamurthy <anil.s.keshavamurthy@intel.com>
Cc: "David S. Miller" <davem@davemloft.net>
Cc: Alexander Viro <viro@zeniv.linux.org.uk>
Cc: linux-fsdevel@vger.kernel.org
Cc: Borislav Petkov <bp@alien8.de>
Link: https://lkml.kernel.org/r/20180828201421.157735-5-jannh@google.com

authored by

Jann Horn and committed by
Thomas Gleixner
75045f77 e3e4d501

+160 -142
+8 -2
arch/x86/include/asm/asm.h
··· 130 130 # define _ASM_EXTABLE(from, to) \ 131 131 _ASM_EXTABLE_HANDLE(from, to, ex_handler_default) 132 132 133 + # define _ASM_EXTABLE_UA(from, to) \ 134 + _ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess) 135 + 133 136 # define _ASM_EXTABLE_FAULT(from, to) \ 134 137 _ASM_EXTABLE_HANDLE(from, to, ex_handler_fault) 135 138 ··· 168 165 jmp copy_user_handle_tail 169 166 .previous 170 167 171 - _ASM_EXTABLE(100b,103b) 172 - _ASM_EXTABLE(101b,103b) 168 + _ASM_EXTABLE_UA(100b, 103b) 169 + _ASM_EXTABLE_UA(101b, 103b) 173 170 .endm 174 171 175 172 #else ··· 184 181 185 182 # define _ASM_EXTABLE(from, to) \ 186 183 _ASM_EXTABLE_HANDLE(from, to, ex_handler_default) 184 + 185 + # define _ASM_EXTABLE_UA(from, to) \ 186 + _ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess) 187 187 188 188 # define _ASM_EXTABLE_FAULT(from, to) \ 189 189 _ASM_EXTABLE_HANDLE(from, to, ex_handler_fault)
+1 -1
arch/x86/include/asm/fpu/internal.h
··· 226 226 "3: movl $-2,%[err]\n\t" \ 227 227 "jmp 2b\n\t" \ 228 228 ".popsection\n\t" \ 229 - _ASM_EXTABLE(1b, 3b) \ 229 + _ASM_EXTABLE_UA(1b, 3b) \ 230 230 : [err] "=r" (err) \ 231 231 : "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \ 232 232 : "memory")
+3 -3
arch/x86/include/asm/futex.h
··· 20 20 "3:\tmov\t%3, %1\n" \ 21 21 "\tjmp\t2b\n" \ 22 22 "\t.previous\n" \ 23 - _ASM_EXTABLE(1b, 3b) \ 23 + _ASM_EXTABLE_UA(1b, 3b) \ 24 24 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ 25 25 : "i" (-EFAULT), "0" (oparg), "1" (0)) 26 26 ··· 36 36 "4:\tmov\t%5, %1\n" \ 37 37 "\tjmp\t3b\n" \ 38 38 "\t.previous\n" \ 39 - _ASM_EXTABLE(1b, 4b) \ 40 - _ASM_EXTABLE(2b, 4b) \ 39 + _ASM_EXTABLE_UA(1b, 4b) \ 40 + _ASM_EXTABLE_UA(2b, 4b) \ 41 41 : "=&a" (oldval), "=&r" (ret), \ 42 42 "+m" (*uaddr), "=&r" (tem) \ 43 43 : "r" (oparg), "i" (-EFAULT), "1" (0))
+11 -11
arch/x86/include/asm/uaccess.h
··· 198 198 "4: movl %3,%0\n" \ 199 199 " jmp 3b\n" \ 200 200 ".previous\n" \ 201 - _ASM_EXTABLE(1b, 4b) \ 202 - _ASM_EXTABLE(2b, 4b) \ 201 + _ASM_EXTABLE_UA(1b, 4b) \ 202 + _ASM_EXTABLE_UA(2b, 4b) \ 203 203 : "=r" (err) \ 204 204 : "A" (x), "r" (addr), "i" (errret), "0" (err)) 205 205 ··· 340 340 " xorl %%edx,%%edx\n" \ 341 341 " jmp 3b\n" \ 342 342 ".previous\n" \ 343 - _ASM_EXTABLE(1b, 4b) \ 344 - _ASM_EXTABLE(2b, 4b) \ 343 + _ASM_EXTABLE_UA(1b, 4b) \ 344 + _ASM_EXTABLE_UA(2b, 4b) \ 345 345 : "=r" (retval), "=&A"(x) \ 346 346 : "m" (__m(__ptr)), "m" __m(((u32 __user *)(__ptr)) + 1), \ 347 347 "i" (errret), "0" (retval)); \ ··· 386 386 " xor"itype" %"rtype"1,%"rtype"1\n" \ 387 387 " jmp 2b\n" \ 388 388 ".previous\n" \ 389 - _ASM_EXTABLE(1b, 3b) \ 389 + _ASM_EXTABLE_UA(1b, 3b) \ 390 390 : "=r" (err), ltype(x) \ 391 391 : "m" (__m(addr)), "i" (errret), "0" (err)) 392 392 ··· 398 398 "3: mov %3,%0\n" \ 399 399 " jmp 2b\n" \ 400 400 ".previous\n" \ 401 - _ASM_EXTABLE(1b, 3b) \ 401 + _ASM_EXTABLE_UA(1b, 3b) \ 402 402 : "=r" (err), ltype(x) \ 403 403 : "m" (__m(addr)), "i" (errret), "0" (err)) 404 404 ··· 474 474 "3: mov %3,%0\n" \ 475 475 " jmp 2b\n" \ 476 476 ".previous\n" \ 477 - _ASM_EXTABLE(1b, 3b) \ 477 + _ASM_EXTABLE_UA(1b, 3b) \ 478 478 : "=r"(err) \ 479 479 : ltype(x), "m" (__m(addr)), "i" (errret), "0" (err)) 480 480 ··· 602 602 "3:\tmov %3, %0\n" \ 603 603 "\tjmp 2b\n" \ 604 604 "\t.previous\n" \ 605 - _ASM_EXTABLE(1b, 3b) \ 605 + _ASM_EXTABLE_UA(1b, 3b) \ 606 606 : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ 607 607 : "i" (-EFAULT), "q" (__new), "1" (__old) \ 608 608 : "memory" \ ··· 618 618 "3:\tmov %3, %0\n" \ 619 619 "\tjmp 2b\n" \ 620 620 "\t.previous\n" \ 621 - _ASM_EXTABLE(1b, 3b) \ 621 + _ASM_EXTABLE_UA(1b, 3b) \ 622 622 : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ 623 623 : "i" (-EFAULT), "r" (__new), "1" (__old) \ 624 624 : "memory" \ ··· 634 634 "3:\tmov %3, %0\n" \ 635 635 "\tjmp 2b\n" \ 636 636 "\t.previous\n" \ 637 - _ASM_EXTABLE(1b, 3b) \ 637 + _ASM_EXTABLE_UA(1b, 3b) \ 638 638 : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ 639 639 : "i" (-EFAULT), "r" (__new), "1" (__old) \ 640 640 : "memory" \ ··· 653 653 "3:\tmov %3, %0\n" \ 654 654 "\tjmp 2b\n" \ 655 655 "\t.previous\n" \ 656 - _ASM_EXTABLE(1b, 3b) \ 656 + _ASM_EXTABLE_UA(1b, 3b) \ 657 657 : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ 658 658 : "i" (-EFAULT), "r" (__new), "1" (__old) \ 659 659 : "memory" \
+2 -2
arch/x86/lib/checksum_32.S
··· 273 273 274 274 #define SRC(y...) \ 275 275 9999: y; \ 276 - _ASM_EXTABLE(9999b, 6001f) 276 + _ASM_EXTABLE_UA(9999b, 6001f) 277 277 278 278 #define DST(y...) \ 279 279 9999: y; \ 280 - _ASM_EXTABLE(9999b, 6002f) 280 + _ASM_EXTABLE_UA(9999b, 6002f) 281 281 282 282 #ifndef CONFIG_X86_USE_PPRO_CHECKSUM 283 283
+45 -45
arch/x86/lib/copy_user_64.S
··· 92 92 60: jmp copy_user_handle_tail /* ecx is zerorest also */ 93 93 .previous 94 94 95 - _ASM_EXTABLE(1b,30b) 96 - _ASM_EXTABLE(2b,30b) 97 - _ASM_EXTABLE(3b,30b) 98 - _ASM_EXTABLE(4b,30b) 99 - _ASM_EXTABLE(5b,30b) 100 - _ASM_EXTABLE(6b,30b) 101 - _ASM_EXTABLE(7b,30b) 102 - _ASM_EXTABLE(8b,30b) 103 - _ASM_EXTABLE(9b,30b) 104 - _ASM_EXTABLE(10b,30b) 105 - _ASM_EXTABLE(11b,30b) 106 - _ASM_EXTABLE(12b,30b) 107 - _ASM_EXTABLE(13b,30b) 108 - _ASM_EXTABLE(14b,30b) 109 - _ASM_EXTABLE(15b,30b) 110 - _ASM_EXTABLE(16b,30b) 111 - _ASM_EXTABLE(18b,40b) 112 - _ASM_EXTABLE(19b,40b) 113 - _ASM_EXTABLE(21b,50b) 114 - _ASM_EXTABLE(22b,50b) 95 + _ASM_EXTABLE_UA(1b, 30b) 96 + _ASM_EXTABLE_UA(2b, 30b) 97 + _ASM_EXTABLE_UA(3b, 30b) 98 + _ASM_EXTABLE_UA(4b, 30b) 99 + _ASM_EXTABLE_UA(5b, 30b) 100 + _ASM_EXTABLE_UA(6b, 30b) 101 + _ASM_EXTABLE_UA(7b, 30b) 102 + _ASM_EXTABLE_UA(8b, 30b) 103 + _ASM_EXTABLE_UA(9b, 30b) 104 + _ASM_EXTABLE_UA(10b, 30b) 105 + _ASM_EXTABLE_UA(11b, 30b) 106 + _ASM_EXTABLE_UA(12b, 30b) 107 + _ASM_EXTABLE_UA(13b, 30b) 108 + _ASM_EXTABLE_UA(14b, 30b) 109 + _ASM_EXTABLE_UA(15b, 30b) 110 + _ASM_EXTABLE_UA(16b, 30b) 111 + _ASM_EXTABLE_UA(18b, 40b) 112 + _ASM_EXTABLE_UA(19b, 40b) 113 + _ASM_EXTABLE_UA(21b, 50b) 114 + _ASM_EXTABLE_UA(22b, 50b) 115 115 ENDPROC(copy_user_generic_unrolled) 116 116 EXPORT_SYMBOL(copy_user_generic_unrolled) 117 117 ··· 156 156 jmp copy_user_handle_tail 157 157 .previous 158 158 159 - _ASM_EXTABLE(1b,11b) 160 - _ASM_EXTABLE(3b,12b) 159 + _ASM_EXTABLE_UA(1b, 11b) 160 + _ASM_EXTABLE_UA(3b, 12b) 161 161 ENDPROC(copy_user_generic_string) 162 162 EXPORT_SYMBOL(copy_user_generic_string) 163 163 ··· 189 189 jmp copy_user_handle_tail 190 190 .previous 191 191 192 - _ASM_EXTABLE(1b,12b) 192 + _ASM_EXTABLE_UA(1b, 12b) 193 193 ENDPROC(copy_user_enhanced_fast_string) 194 194 EXPORT_SYMBOL(copy_user_enhanced_fast_string) 195 195 ··· 319 319 jmp copy_user_handle_tail 320 320 .previous 321 321 322 - _ASM_EXTABLE(1b,.L_fixup_4x8b_copy) 323 - _ASM_EXTABLE(2b,.L_fixup_4x8b_copy) 324 - _ASM_EXTABLE(3b,.L_fixup_4x8b_copy) 325 - _ASM_EXTABLE(4b,.L_fixup_4x8b_copy) 326 - _ASM_EXTABLE(5b,.L_fixup_4x8b_copy) 327 - _ASM_EXTABLE(6b,.L_fixup_4x8b_copy) 328 - _ASM_EXTABLE(7b,.L_fixup_4x8b_copy) 329 - _ASM_EXTABLE(8b,.L_fixup_4x8b_copy) 330 - _ASM_EXTABLE(9b,.L_fixup_4x8b_copy) 331 - _ASM_EXTABLE(10b,.L_fixup_4x8b_copy) 332 - _ASM_EXTABLE(11b,.L_fixup_4x8b_copy) 333 - _ASM_EXTABLE(12b,.L_fixup_4x8b_copy) 334 - _ASM_EXTABLE(13b,.L_fixup_4x8b_copy) 335 - _ASM_EXTABLE(14b,.L_fixup_4x8b_copy) 336 - _ASM_EXTABLE(15b,.L_fixup_4x8b_copy) 337 - _ASM_EXTABLE(16b,.L_fixup_4x8b_copy) 338 - _ASM_EXTABLE(20b,.L_fixup_8b_copy) 339 - _ASM_EXTABLE(21b,.L_fixup_8b_copy) 340 - _ASM_EXTABLE(30b,.L_fixup_4b_copy) 341 - _ASM_EXTABLE(31b,.L_fixup_4b_copy) 342 - _ASM_EXTABLE(40b,.L_fixup_1b_copy) 343 - _ASM_EXTABLE(41b,.L_fixup_1b_copy) 322 + _ASM_EXTABLE_UA(1b, .L_fixup_4x8b_copy) 323 + _ASM_EXTABLE_UA(2b, .L_fixup_4x8b_copy) 324 + _ASM_EXTABLE_UA(3b, .L_fixup_4x8b_copy) 325 + _ASM_EXTABLE_UA(4b, .L_fixup_4x8b_copy) 326 + _ASM_EXTABLE_UA(5b, .L_fixup_4x8b_copy) 327 + _ASM_EXTABLE_UA(6b, .L_fixup_4x8b_copy) 328 + _ASM_EXTABLE_UA(7b, .L_fixup_4x8b_copy) 329 + _ASM_EXTABLE_UA(8b, .L_fixup_4x8b_copy) 330 + _ASM_EXTABLE_UA(9b, .L_fixup_4x8b_copy) 331 + _ASM_EXTABLE_UA(10b, .L_fixup_4x8b_copy) 332 + _ASM_EXTABLE_UA(11b, .L_fixup_4x8b_copy) 333 + _ASM_EXTABLE_UA(12b, .L_fixup_4x8b_copy) 334 + _ASM_EXTABLE_UA(13b, .L_fixup_4x8b_copy) 335 + _ASM_EXTABLE_UA(14b, .L_fixup_4x8b_copy) 336 + _ASM_EXTABLE_UA(15b, .L_fixup_4x8b_copy) 337 + _ASM_EXTABLE_UA(16b, .L_fixup_4x8b_copy) 338 + _ASM_EXTABLE_UA(20b, .L_fixup_8b_copy) 339 + _ASM_EXTABLE_UA(21b, .L_fixup_8b_copy) 340 + _ASM_EXTABLE_UA(30b, .L_fixup_4b_copy) 341 + _ASM_EXTABLE_UA(31b, .L_fixup_4b_copy) 342 + _ASM_EXTABLE_UA(40b, .L_fixup_1b_copy) 343 + _ASM_EXTABLE_UA(41b, .L_fixup_1b_copy) 344 344 ENDPROC(__copy_user_nocache) 345 345 EXPORT_SYMBOL(__copy_user_nocache)
+6 -2
arch/x86/lib/csum-copy_64.S
··· 31 31 32 32 .macro source 33 33 10: 34 - _ASM_EXTABLE(10b, .Lbad_source) 34 + _ASM_EXTABLE_UA(10b, .Lbad_source) 35 35 .endm 36 36 37 37 .macro dest 38 38 20: 39 - _ASM_EXTABLE(20b, .Lbad_dest) 39 + _ASM_EXTABLE_UA(20b, .Lbad_dest) 40 40 .endm 41 41 42 + /* 43 + * No _ASM_EXTABLE_UA; this is used for intentional prefetch on a 44 + * potentially unmapped kernel address. 45 + */ 42 46 .macro ignore L=.Lignore 43 47 30: 44 48 _ASM_EXTABLE(30b, \L)
+6 -6
arch/x86/lib/getuser.S
··· 132 132 END(bad_get_user_8) 133 133 #endif 134 134 135 - _ASM_EXTABLE(1b,bad_get_user) 136 - _ASM_EXTABLE(2b,bad_get_user) 137 - _ASM_EXTABLE(3b,bad_get_user) 135 + _ASM_EXTABLE_UA(1b, bad_get_user) 136 + _ASM_EXTABLE_UA(2b, bad_get_user) 137 + _ASM_EXTABLE_UA(3b, bad_get_user) 138 138 #ifdef CONFIG_X86_64 139 - _ASM_EXTABLE(4b,bad_get_user) 139 + _ASM_EXTABLE_UA(4b, bad_get_user) 140 140 #else 141 - _ASM_EXTABLE(4b,bad_get_user_8) 142 - _ASM_EXTABLE(5b,bad_get_user_8) 141 + _ASM_EXTABLE_UA(4b, bad_get_user_8) 142 + _ASM_EXTABLE_UA(5b, bad_get_user_8) 143 143 #endif
+5 -5
arch/x86/lib/putuser.S
··· 94 94 EXIT 95 95 END(bad_put_user) 96 96 97 - _ASM_EXTABLE(1b,bad_put_user) 98 - _ASM_EXTABLE(2b,bad_put_user) 99 - _ASM_EXTABLE(3b,bad_put_user) 100 - _ASM_EXTABLE(4b,bad_put_user) 97 + _ASM_EXTABLE_UA(1b, bad_put_user) 98 + _ASM_EXTABLE_UA(2b, bad_put_user) 99 + _ASM_EXTABLE_UA(3b, bad_put_user) 100 + _ASM_EXTABLE_UA(4b, bad_put_user) 101 101 #ifdef CONFIG_X86_32 102 - _ASM_EXTABLE(5b,bad_put_user) 102 + _ASM_EXTABLE_UA(5b, bad_put_user) 103 103 #endif
+63 -63
arch/x86/lib/usercopy_32.c
··· 47 47 "3: lea 0(%2,%0,4),%0\n" \ 48 48 " jmp 2b\n" \ 49 49 ".previous\n" \ 50 - _ASM_EXTABLE(0b,3b) \ 51 - _ASM_EXTABLE(1b,2b) \ 50 + _ASM_EXTABLE_UA(0b, 3b) \ 51 + _ASM_EXTABLE_UA(1b, 2b) \ 52 52 : "=&c"(size), "=&D" (__d0) \ 53 53 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \ 54 54 } while (0) ··· 153 153 "101: lea 0(%%eax,%0,4),%0\n" 154 154 " jmp 100b\n" 155 155 ".previous\n" 156 - _ASM_EXTABLE(1b,100b) 157 - _ASM_EXTABLE(2b,100b) 158 - _ASM_EXTABLE(3b,100b) 159 - _ASM_EXTABLE(4b,100b) 160 - _ASM_EXTABLE(5b,100b) 161 - _ASM_EXTABLE(6b,100b) 162 - _ASM_EXTABLE(7b,100b) 163 - _ASM_EXTABLE(8b,100b) 164 - _ASM_EXTABLE(9b,100b) 165 - _ASM_EXTABLE(10b,100b) 166 - _ASM_EXTABLE(11b,100b) 167 - _ASM_EXTABLE(12b,100b) 168 - _ASM_EXTABLE(13b,100b) 169 - _ASM_EXTABLE(14b,100b) 170 - _ASM_EXTABLE(15b,100b) 171 - _ASM_EXTABLE(16b,100b) 172 - _ASM_EXTABLE(17b,100b) 173 - _ASM_EXTABLE(18b,100b) 174 - _ASM_EXTABLE(19b,100b) 175 - _ASM_EXTABLE(20b,100b) 176 - _ASM_EXTABLE(21b,100b) 177 - _ASM_EXTABLE(22b,100b) 178 - _ASM_EXTABLE(23b,100b) 179 - _ASM_EXTABLE(24b,100b) 180 - _ASM_EXTABLE(25b,100b) 181 - _ASM_EXTABLE(26b,100b) 182 - _ASM_EXTABLE(27b,100b) 183 - _ASM_EXTABLE(28b,100b) 184 - _ASM_EXTABLE(29b,100b) 185 - _ASM_EXTABLE(30b,100b) 186 - _ASM_EXTABLE(31b,100b) 187 - _ASM_EXTABLE(32b,100b) 188 - _ASM_EXTABLE(33b,100b) 189 - _ASM_EXTABLE(34b,100b) 190 - _ASM_EXTABLE(35b,100b) 191 - _ASM_EXTABLE(36b,100b) 192 - _ASM_EXTABLE(37b,100b) 193 - _ASM_EXTABLE(99b,101b) 156 + _ASM_EXTABLE_UA(1b, 100b) 157 + _ASM_EXTABLE_UA(2b, 100b) 158 + _ASM_EXTABLE_UA(3b, 100b) 159 + _ASM_EXTABLE_UA(4b, 100b) 160 + _ASM_EXTABLE_UA(5b, 100b) 161 + _ASM_EXTABLE_UA(6b, 100b) 162 + _ASM_EXTABLE_UA(7b, 100b) 163 + _ASM_EXTABLE_UA(8b, 100b) 164 + _ASM_EXTABLE_UA(9b, 100b) 165 + _ASM_EXTABLE_UA(10b, 100b) 166 + _ASM_EXTABLE_UA(11b, 100b) 167 + _ASM_EXTABLE_UA(12b, 100b) 168 + _ASM_EXTABLE_UA(13b, 100b) 169 + _ASM_EXTABLE_UA(14b, 100b) 170 + _ASM_EXTABLE_UA(15b, 100b) 171 + _ASM_EXTABLE_UA(16b, 100b) 172 + _ASM_EXTABLE_UA(17b, 100b) 173 + _ASM_EXTABLE_UA(18b, 100b) 174 + _ASM_EXTABLE_UA(19b, 100b) 175 + _ASM_EXTABLE_UA(20b, 100b) 176 + _ASM_EXTABLE_UA(21b, 100b) 177 + _ASM_EXTABLE_UA(22b, 100b) 178 + _ASM_EXTABLE_UA(23b, 100b) 179 + _ASM_EXTABLE_UA(24b, 100b) 180 + _ASM_EXTABLE_UA(25b, 100b) 181 + _ASM_EXTABLE_UA(26b, 100b) 182 + _ASM_EXTABLE_UA(27b, 100b) 183 + _ASM_EXTABLE_UA(28b, 100b) 184 + _ASM_EXTABLE_UA(29b, 100b) 185 + _ASM_EXTABLE_UA(30b, 100b) 186 + _ASM_EXTABLE_UA(31b, 100b) 187 + _ASM_EXTABLE_UA(32b, 100b) 188 + _ASM_EXTABLE_UA(33b, 100b) 189 + _ASM_EXTABLE_UA(34b, 100b) 190 + _ASM_EXTABLE_UA(35b, 100b) 191 + _ASM_EXTABLE_UA(36b, 100b) 192 + _ASM_EXTABLE_UA(37b, 100b) 193 + _ASM_EXTABLE_UA(99b, 101b) 194 194 : "=&c"(size), "=&D" (d0), "=&S" (d1) 195 195 : "1"(to), "2"(from), "0"(size) 196 196 : "eax", "edx", "memory"); ··· 259 259 "9: lea 0(%%eax,%0,4),%0\n" 260 260 "16: jmp 8b\n" 261 261 ".previous\n" 262 - _ASM_EXTABLE(0b,16b) 263 - _ASM_EXTABLE(1b,16b) 264 - _ASM_EXTABLE(2b,16b) 265 - _ASM_EXTABLE(21b,16b) 266 - _ASM_EXTABLE(3b,16b) 267 - _ASM_EXTABLE(31b,16b) 268 - _ASM_EXTABLE(4b,16b) 269 - _ASM_EXTABLE(41b,16b) 270 - _ASM_EXTABLE(10b,16b) 271 - _ASM_EXTABLE(51b,16b) 272 - _ASM_EXTABLE(11b,16b) 273 - _ASM_EXTABLE(61b,16b) 274 - _ASM_EXTABLE(12b,16b) 275 - _ASM_EXTABLE(71b,16b) 276 - _ASM_EXTABLE(13b,16b) 277 - _ASM_EXTABLE(81b,16b) 278 - _ASM_EXTABLE(14b,16b) 279 - _ASM_EXTABLE(91b,16b) 280 - _ASM_EXTABLE(6b,9b) 281 - _ASM_EXTABLE(7b,16b) 262 + _ASM_EXTABLE_UA(0b, 16b) 263 + _ASM_EXTABLE_UA(1b, 16b) 264 + _ASM_EXTABLE_UA(2b, 16b) 265 + _ASM_EXTABLE_UA(21b, 16b) 266 + _ASM_EXTABLE_UA(3b, 16b) 267 + _ASM_EXTABLE_UA(31b, 16b) 268 + _ASM_EXTABLE_UA(4b, 16b) 269 + _ASM_EXTABLE_UA(41b, 16b) 270 + _ASM_EXTABLE_UA(10b, 16b) 271 + _ASM_EXTABLE_UA(51b, 16b) 272 + _ASM_EXTABLE_UA(11b, 16b) 273 + _ASM_EXTABLE_UA(61b, 16b) 274 + _ASM_EXTABLE_UA(12b, 16b) 275 + _ASM_EXTABLE_UA(71b, 16b) 276 + _ASM_EXTABLE_UA(13b, 16b) 277 + _ASM_EXTABLE_UA(81b, 16b) 278 + _ASM_EXTABLE_UA(14b, 16b) 279 + _ASM_EXTABLE_UA(91b, 16b) 280 + _ASM_EXTABLE_UA(6b, 9b) 281 + _ASM_EXTABLE_UA(7b, 16b) 282 282 : "=&c"(size), "=&D" (d0), "=&S" (d1) 283 283 : "1"(to), "2"(from), "0"(size) 284 284 : "eax", "edx", "memory"); ··· 321 321 "3: lea 0(%3,%0,4),%0\n" \ 322 322 " jmp 2b\n" \ 323 323 ".previous\n" \ 324 - _ASM_EXTABLE(4b,5b) \ 325 - _ASM_EXTABLE(0b,3b) \ 326 - _ASM_EXTABLE(1b,2b) \ 324 + _ASM_EXTABLE_UA(4b, 5b) \ 325 + _ASM_EXTABLE_UA(0b, 3b) \ 326 + _ASM_EXTABLE_UA(1b, 2b) \ 327 327 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ 328 328 : "3"(size), "0"(size), "1"(to), "2"(from) \ 329 329 : "memory"); \
+2 -2
arch/x86/lib/usercopy_64.c
··· 37 37 "3: lea 0(%[size1],%[size8],8),%[size8]\n" 38 38 " jmp 2b\n" 39 39 ".previous\n" 40 - _ASM_EXTABLE(0b,3b) 41 - _ASM_EXTABLE(1b,2b) 40 + _ASM_EXTABLE_UA(0b, 3b) 41 + _ASM_EXTABLE_UA(1b, 2b) 42 42 : [size8] "=&c"(size), [dst] "=&D" (__d0) 43 43 : [size1] "r"(size & 7), "[size8]" (size / 8), "[dst]"(addr)); 44 44 clac();
+8
arch/x86/mm/extable.c
··· 108 108 } 109 109 EXPORT_SYMBOL_GPL(ex_handler_fprestore); 110 110 111 + __visible bool ex_handler_uaccess(const struct exception_table_entry *fixup, 112 + struct pt_regs *regs, int trapnr) 113 + { 114 + regs->ip = ex_fixup_addr(fixup); 115 + return true; 116 + } 117 + EXPORT_SYMBOL(ex_handler_uaccess); 118 + 111 119 __visible bool ex_handler_ext(const struct exception_table_entry *fixup, 112 120 struct pt_regs *regs, int trapnr) 113 121 {