Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

ARM: 8322/1: keep .text and .fixup regions closer together

This moves all fixup snippets to the .text.fixup section, which is
a special section that gets emitted along with the .text section
for each input object file, i.e., the snippets are kept much closer
to the code they refer to, which helps prevent linker failure on
large kernels.

Acked-by: Nicolas Pitre <nico@linaro.org>
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>

authored by

Ard Biesheuvel and committed by
Russell King
c4a84ae3 779c88c9

+17 -20
+1 -1
arch/arm/include/asm/futex.h
··· 13 13 " .align 3\n" \ 14 14 " .long 1b, 4f, 2b, 4f\n" \ 15 15 " .popsection\n" \ 16 - " .pushsection .fixup,\"ax\"\n" \ 16 + " .pushsection .text.fixup,\"ax\"\n" \ 17 17 " .align 2\n" \ 18 18 "4: mov %0, " err_reg "\n" \ 19 19 " b 3b\n" \
+5 -5
arch/arm/include/asm/uaccess.h
··· 315 315 __asm__ __volatile__( \ 316 316 "1: " TUSER(ldrb) " %1,[%2],#0\n" \ 317 317 "2:\n" \ 318 - " .pushsection .fixup,\"ax\"\n" \ 318 + " .pushsection .text.fixup,\"ax\"\n" \ 319 319 " .align 2\n" \ 320 320 "3: mov %0, %3\n" \ 321 321 " mov %1, #0\n" \ ··· 351 351 __asm__ __volatile__( \ 352 352 "1: " TUSER(ldr) " %1,[%2],#0\n" \ 353 353 "2:\n" \ 354 - " .pushsection .fixup,\"ax\"\n" \ 354 + " .pushsection .text.fixup,\"ax\"\n" \ 355 355 " .align 2\n" \ 356 356 "3: mov %0, %3\n" \ 357 357 " mov %1, #0\n" \ ··· 397 397 __asm__ __volatile__( \ 398 398 "1: " TUSER(strb) " %1,[%2],#0\n" \ 399 399 "2:\n" \ 400 - " .pushsection .fixup,\"ax\"\n" \ 400 + " .pushsection .text.fixup,\"ax\"\n" \ 401 401 " .align 2\n" \ 402 402 "3: mov %0, %3\n" \ 403 403 " b 2b\n" \ ··· 430 430 __asm__ __volatile__( \ 431 431 "1: " TUSER(str) " %1,[%2],#0\n" \ 432 432 "2:\n" \ 433 - " .pushsection .fixup,\"ax\"\n" \ 433 + " .pushsection .text.fixup,\"ax\"\n" \ 434 434 " .align 2\n" \ 435 435 "3: mov %0, %3\n" \ 436 436 " b 2b\n" \ ··· 458 458 THUMB( "1: " TUSER(str) " " __reg_oper1 ", [%1]\n" ) \ 459 459 THUMB( "2: " TUSER(str) " " __reg_oper0 ", [%1, #4]\n" ) \ 460 460 "3:\n" \ 461 - " .pushsection .fixup,\"ax\"\n" \ 461 + " .pushsection .text.fixup,\"ax\"\n" \ 462 462 " .align 2\n" \ 463 463 "4: mov %0, %3\n" \ 464 464 " b 3b\n" \
+1 -1
arch/arm/include/asm/word-at-a-time.h
··· 71 71 asm( 72 72 "1: ldr %0, [%2]\n" 73 73 "2:\n" 74 - " .pushsection .fixup,\"ax\"\n" 74 + " .pushsection .text.fixup,\"ax\"\n" 75 75 " .align 2\n" 76 76 "3: and %1, %2, #0x3\n" 77 77 " bic %2, %2, #0x3\n"
+1 -1
arch/arm/kernel/entry-armv.S
··· 545 545 /* 546 546 * The out of line fixup for the ldrt instructions above. 547 547 */ 548 - .pushsection .fixup, "ax" 548 + .pushsection .text.fixup, "ax" 549 549 .align 2 550 550 4: str r4, [sp, #S_PC] @ retry current instruction 551 551 ret r9
+1 -1
arch/arm/kernel/swp_emulate.c
··· 42 42 " cmp %0, #0\n" \ 43 43 " movne %0, %4\n" \ 44 44 "2:\n" \ 45 - " .section .fixup,\"ax\"\n" \ 45 + " .section .text.fixup,\"ax\"\n" \ 46 46 " .align 2\n" \ 47 47 "3: mov %0, %5\n" \ 48 48 " b 2b\n" \
+1 -4
arch/arm/kernel/vmlinux.lds.S
··· 74 74 ARM_EXIT_DISCARD(EXIT_DATA) 75 75 EXIT_CALL 76 76 #ifndef CONFIG_MMU 77 - *(.fixup) 77 + *(.text.fixup) 78 78 *(__ex_table) 79 79 #endif 80 80 #ifndef CONFIG_SMP_ON_UP ··· 109 109 SCHED_TEXT 110 110 LOCK_TEXT 111 111 KPROBES_TEXT 112 - #ifdef CONFIG_MMU 113 - *(.fixup) 114 - #endif 115 112 *(.gnu.warning) 116 113 *(.glue_7) 117 114 *(.glue_7t)
+1 -1
arch/arm/lib/clear_user.S
··· 47 47 ENDPROC(__clear_user) 48 48 ENDPROC(__clear_user_std) 49 49 50 - .pushsection .fixup,"ax" 50 + .pushsection .text.fixup,"ax" 51 51 .align 0 52 52 9001: ldmfd sp!, {r0, pc} 53 53 .popsection
+1 -1
arch/arm/lib/copy_to_user.S
··· 100 100 ENDPROC(__copy_to_user) 101 101 ENDPROC(__copy_to_user_std) 102 102 103 - .pushsection .fixup,"ax" 103 + .pushsection .text.fixup,"ax" 104 104 .align 0 105 105 copy_abort_preamble 106 106 ldmfd sp!, {r1, r2, r3}
+1 -1
arch/arm/lib/csumpartialcopyuser.S
··· 68 68 * so properly, we would have to add in whatever registers were loaded before 69 69 * the fault, which, with the current asm above is not predictable. 70 70 */ 71 - .pushsection .fixup,"ax" 71 + .pushsection .text.fixup,"ax" 72 72 .align 4 73 73 9001: mov r4, #-EFAULT 74 74 ldr r5, [sp, #8*4] @ *err_ptr
+3 -3
arch/arm/mm/alignment.c
··· 201 201 THUMB( "1: "ins" %1, [%2]\n" ) \ 202 202 THUMB( " add %2, %2, #1\n" ) \ 203 203 "2:\n" \ 204 - " .pushsection .fixup,\"ax\"\n" \ 204 + " .pushsection .text.fixup,\"ax\"\n" \ 205 205 " .align 2\n" \ 206 206 "3: mov %0, #1\n" \ 207 207 " b 2b\n" \ ··· 261 261 " mov %1, %1, "NEXT_BYTE"\n" \ 262 262 "2: "ins" %1, [%2]\n" \ 263 263 "3:\n" \ 264 - " .pushsection .fixup,\"ax\"\n" \ 264 + " .pushsection .text.fixup,\"ax\"\n" \ 265 265 " .align 2\n" \ 266 266 "4: mov %0, #1\n" \ 267 267 " b 3b\n" \ ··· 301 301 " mov %1, %1, "NEXT_BYTE"\n" \ 302 302 "4: "ins" %1, [%2]\n" \ 303 303 "5:\n" \ 304 - " .pushsection .fixup,\"ax\"\n" \ 304 + " .pushsection .text.fixup,\"ax\"\n" \ 305 305 " .align 2\n" \ 306 306 "6: mov %0, #1\n" \ 307 307 " b 5b\n" \
+1 -1
arch/arm/nwfpe/entry.S
··· 113 113 @ to fault. Emit the appropriate exception gunk to fix things up. 114 114 @ ??? For some reason, faults can happen at .Lx2 even with a 115 115 @ plain LDR instruction. Weird, but it seems harmless. 116 - .pushsection .fixup,"ax" 116 + .pushsection .text.fixup,"ax" 117 117 .align 2 118 118 .Lfix: ret r9 @ let the user eat segfaults 119 119 .popsection