···11/* Copyright 2002 Andi Kleen */2233#include <linux/linkage.h>44-54#include <asm/cpufeature.h>65#include <asm/dwarf2.h>76#include <asm/alternative-asm.h>77+88+/*99+ * We build a jump to memcpy_orig by default which gets NOPped out on1010+ * the majority of x86 CPUs which set REP_GOOD. In addition, CPUs which1111+ * have the enhanced REP MOVSB/STOSB feature (ERMS), change those NOPs1212+ * to a jmp to memcpy_erms which does the REP; MOVSB mem copy.1313+ */1414+1515+.weak memcpy816917/*1018 * memcpy - Copy a memory block.···2517 * Output:2618 * rax original destination2719 */2020+ENTRY(__memcpy)2121+ENTRY(memcpy)2222+ ALTERNATIVE_2 "jmp memcpy_orig", "", X86_FEATURE_REP_GOOD, \2323+ "jmp memcpy_erms", X86_FEATURE_ERMS28242929-/*3030- * memcpy_c() - fast string ops (REP MOVSQ) based variant.3131- *3232- * This gets patched over the unrolled variant (below) via the3333- * alternative instructions framework:3434- */3535- .section .altinstr_replacement, "ax", @progbits3636-.Lmemcpy_c:3725 movq %rdi, %rax3826 movq %rdx, %rcx3927 shrq $3, %rcx···3834 movl %edx, %ecx3935 rep movsb4036 ret4141-.Lmemcpy_e:4242- .previous3737+ENDPROC(memcpy)3838+ENDPROC(__memcpy)43394440/*4545- * memcpy_c_e() - enhanced fast string memcpy. This is faster and simpler than4646- * memcpy_c. Use memcpy_c_e when possible.4747- *4848- * This gets patched over the unrolled variant (below) via the4949- * alternative instructions framework:4141+ * memcpy_erms() - enhanced fast string memcpy. This is faster and4242+ * simpler than memcpy. Use memcpy_erms when possible.5043 */5151- .section .altinstr_replacement, "ax", @progbits5252-.Lmemcpy_c_e:4444+ENTRY(memcpy_erms)5345 movq %rdi, %rax5446 movq %rdx, %rcx5547 rep movsb5648 ret5757-.Lmemcpy_e_e:5858- .previous4949+ENDPROC(memcpy_erms)59506060-.weak memcpy6161-6262-ENTRY(__memcpy)6363-ENTRY(memcpy)5151+ENTRY(memcpy_orig)6452 CFI_STARTPROC6553 movq %rdi, %rax6654···179183.Lend:180184 retq181185 CFI_ENDPROC182182-ENDPROC(memcpy)183183-ENDPROC(__memcpy)184184-185185- /*186186- * Some CPUs are adding enhanced REP MOVSB/STOSB feature187187- * If the feature is supported, memcpy_c_e() is the first choice.188188- * If enhanced rep movsb copy is not available, use fast string copy189189- * memcpy_c() when possible. This is faster and code is simpler than190190- * original memcpy().191191- * Otherwise, original memcpy() is used.192192- * In .altinstructions section, ERMS feature is placed after REG_GOOD193193- * feature to implement the right patch order.194194- *195195- * Replace only beginning, memcpy is used to apply alternatives,196196- * so it is silly to overwrite itself with nops - reboot is the197197- * only outcome...198198- */199199- .section .altinstructions, "a"200200- altinstruction_entry __memcpy,.Lmemcpy_c,X86_FEATURE_REP_GOOD,\201201- .Lmemcpy_e-.Lmemcpy_c,.Lmemcpy_e-.Lmemcpy_c,0202202- altinstruction_entry __memcpy,.Lmemcpy_c_e,X86_FEATURE_ERMS, \203203- .Lmemcpy_e_e-.Lmemcpy_c_e,.Lmemcpy_e_e-.Lmemcpy_c_e,0204204- .previous186186+ENDPROC(memcpy_orig)