Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

x86/copy_user_64: Remove .fixup usage

Place the anonymous .fixup code at the tail of the regular functions.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Josh Poimboeuf <jpoimboe@redhat.com>
Reviewed-by: Borislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211110101325.068505810@infradead.org

+11 -21
+11 -21
arch/x86/lib/copy_user_64.S
··· 32 32 decl %ecx 33 33 jnz 100b 34 34 102: 35 - .section .fixup,"ax" 36 - 103: addl %ecx,%edx /* ecx is zerorest also */ 37 - jmp .Lcopy_user_handle_tail 38 - .previous 39 35 40 - _ASM_EXTABLE_CPY(100b, 103b) 41 - _ASM_EXTABLE_CPY(101b, 103b) 42 - .endm 36 + _ASM_EXTABLE_CPY(100b, .Lcopy_user_handle_align) 37 + _ASM_EXTABLE_CPY(101b, .Lcopy_user_handle_align) 38 + .endm 43 39 44 40 /* 45 41 * copy_user_generic_unrolled - memory copy with exception handling. ··· 103 107 ASM_CLAC 104 108 RET 105 109 106 - .section .fixup,"ax" 107 110 30: shll $6,%ecx 108 111 addl %ecx,%edx 109 112 jmp 60f ··· 110 115 jmp 60f 111 116 50: movl %ecx,%edx 112 117 60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */ 113 - .previous 114 118 115 119 _ASM_EXTABLE_CPY(1b, 30b) 116 120 _ASM_EXTABLE_CPY(2b, 30b) ··· 160 166 movl %edx,%ecx 161 167 shrl $3,%ecx 162 168 andl $7,%edx 163 - 1: rep 164 - movsq 169 + 1: rep movsq 165 170 2: movl %edx,%ecx 166 - 3: rep 167 - movsb 171 + 3: rep movsb 168 172 xorl %eax,%eax 169 173 ASM_CLAC 170 174 RET 171 175 172 - .section .fixup,"ax" 173 176 11: leal (%rdx,%rcx,8),%ecx 174 177 12: movl %ecx,%edx /* ecx is zerorest also */ 175 178 jmp .Lcopy_user_handle_tail 176 - .previous 177 179 178 180 _ASM_EXTABLE_CPY(1b, 11b) 179 181 _ASM_EXTABLE_CPY(3b, 12b) ··· 193 203 cmpl $64,%edx 194 204 jb .L_copy_short_string /* less then 64 bytes, avoid the costly 'rep' */ 195 205 movl %edx,%ecx 196 - 1: rep 197 - movsb 206 + 1: rep movsb 198 207 xorl %eax,%eax 199 208 ASM_CLAC 200 209 RET 201 210 202 - .section .fixup,"ax" 203 211 12: movl %ecx,%edx /* ecx is zerorest also */ 204 212 jmp .Lcopy_user_handle_tail 205 - .previous 206 213 207 214 _ASM_EXTABLE_CPY(1b, 12b) 208 215 SYM_FUNC_END(copy_user_enhanced_fast_string) ··· 227 240 RET 228 241 229 242 _ASM_EXTABLE_CPY(1b, 2b) 243 + 244 + .Lcopy_user_handle_align: 245 + addl %ecx,%edx /* ecx is zerorest also */ 246 + jmp .Lcopy_user_handle_tail 247 + 230 248 SYM_CODE_END(.Lcopy_user_handle_tail) 231 249 232 250 /* ··· 342 350 sfence 343 351 RET 344 352 345 - .section .fixup,"ax" 346 353 .L_fixup_4x8b_copy: 347 354 shll $6,%ecx 348 355 addl %ecx,%edx ··· 357 366 .L_fixup_handle_tail: 358 367 sfence 359 368 jmp .Lcopy_user_handle_tail 360 - .previous 361 369 362 370 _ASM_EXTABLE_CPY(1b, .L_fixup_4x8b_copy) 363 371 _ASM_EXTABLE_CPY(2b, .L_fixup_4x8b_copy)