Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

arm64: lib: Use modern annotations for assembly functions

In an effort to clarify and simplify the annotation of assembly functions
in the kernel new macros have been introduced. These replace ENTRY and
ENDPROC and also add a new annotation for static functions which previously
had no ENTRY equivalent. Update the annotations in the library code to the
new macros.

Signed-off-by: Mark Brown <broonie@kernel.org>
[will: Use SYM_FUNC_START_WEAK_PI]
Signed-off-by: Will Deacon <will@kernel.org>

authored by

Mark Brown and committed by
Will Deacon
3ac0f452 35e61c77

+50 -50
+2 -2
arch/arm64/lib/clear_page.S
··· 14 14 * Parameters: 15 15 * x0 - dest 16 16 */ 17 - ENTRY(clear_page) 17 + SYM_FUNC_START(clear_page) 18 18 mrs x1, dczid_el0 19 19 and w1, w1, #0xf 20 20 mov x2, #4 ··· 25 25 tst x0, #(PAGE_SIZE - 1) 26 26 b.ne 1b 27 27 ret 28 - ENDPROC(clear_page) 28 + SYM_FUNC_END(clear_page) 29 29 EXPORT_SYMBOL(clear_page)
+2 -2
arch/arm64/lib/clear_user.S
··· 19 19 * 20 20 * Alignment fixed up by hardware. 21 21 */ 22 - ENTRY(__arch_clear_user) 22 + SYM_FUNC_START(__arch_clear_user) 23 23 mov x2, x1 // save the size for fixup return 24 24 subs x1, x1, #8 25 25 b.mi 2f ··· 40 40 uao_user_alternative 9f, strb, sttrb, wzr, x0, 0 41 41 5: mov x0, #0 42 42 ret 43 - ENDPROC(__arch_clear_user) 43 + SYM_FUNC_END(__arch_clear_user) 44 44 EXPORT_SYMBOL(__arch_clear_user) 45 45 46 46 .section .fixup,"ax"
+2 -2
arch/arm64/lib/copy_from_user.S
··· 53 53 .endm 54 54 55 55 end .req x5 56 - ENTRY(__arch_copy_from_user) 56 + SYM_FUNC_START(__arch_copy_from_user) 57 57 add end, x0, x2 58 58 #include "copy_template.S" 59 59 mov x0, #0 // Nothing to copy 60 60 ret 61 - ENDPROC(__arch_copy_from_user) 61 + SYM_FUNC_END(__arch_copy_from_user) 62 62 EXPORT_SYMBOL(__arch_copy_from_user) 63 63 64 64 .section .fixup,"ax"
+2 -2
arch/arm64/lib/copy_in_user.S
··· 55 55 56 56 end .req x5 57 57 58 - ENTRY(__arch_copy_in_user) 58 + SYM_FUNC_START(__arch_copy_in_user) 59 59 add end, x0, x2 60 60 #include "copy_template.S" 61 61 mov x0, #0 62 62 ret 63 - ENDPROC(__arch_copy_in_user) 63 + SYM_FUNC_END(__arch_copy_in_user) 64 64 EXPORT_SYMBOL(__arch_copy_in_user) 65 65 66 66 .section .fixup,"ax"
+2 -2
arch/arm64/lib/copy_page.S
··· 17 17 * x0 - dest 18 18 * x1 - src 19 19 */ 20 - ENTRY(copy_page) 20 + SYM_FUNC_START(copy_page) 21 21 alternative_if ARM64_HAS_NO_HW_PREFETCH 22 22 // Prefetch three cache lines ahead. 23 23 prfm pldl1strm, [x1, #128] ··· 75 75 stnp x16, x17, [x0, #112] 76 76 77 77 ret 78 - ENDPROC(copy_page) 78 + SYM_FUNC_END(copy_page) 79 79 EXPORT_SYMBOL(copy_page)
+2 -2
arch/arm64/lib/copy_to_user.S
··· 52 52 .endm 53 53 54 54 end .req x5 55 - ENTRY(__arch_copy_to_user) 55 + SYM_FUNC_START(__arch_copy_to_user) 56 56 add end, x0, x2 57 57 #include "copy_template.S" 58 58 mov x0, #0 59 59 ret 60 - ENDPROC(__arch_copy_to_user) 60 + SYM_FUNC_END(__arch_copy_to_user) 61 61 EXPORT_SYMBOL(__arch_copy_to_user) 62 62 63 63 .section .fixup,"ax"
+4 -4
arch/arm64/lib/crc32.S
··· 85 85 .endm 86 86 87 87 .align 5 88 - ENTRY(crc32_le) 88 + SYM_FUNC_START(crc32_le) 89 89 alternative_if_not ARM64_HAS_CRC32 90 90 b crc32_le_base 91 91 alternative_else_nop_endif 92 92 __crc32 93 - ENDPROC(crc32_le) 93 + SYM_FUNC_END(crc32_le) 94 94 95 95 .align 5 96 - ENTRY(__crc32c_le) 96 + SYM_FUNC_START(__crc32c_le) 97 97 alternative_if_not ARM64_HAS_CRC32 98 98 b __crc32c_le_base 99 99 alternative_else_nop_endif 100 100 __crc32 c 101 - ENDPROC(__crc32c_le) 101 + SYM_FUNC_END(__crc32c_le)
+2 -2
arch/arm64/lib/memchr.S
··· 19 19 * Returns: 20 20 * x0 - address of first occurrence of 'c' or 0 21 21 */ 22 - WEAK(memchr) 22 + SYM_FUNC_START_WEAK_PI(memchr) 23 23 and w1, w1, #0xff 24 24 1: subs x2, x2, #1 25 25 b.mi 2f ··· 30 30 ret 31 31 2: mov x0, #0 32 32 ret 33 - ENDPIPROC(memchr) 33 + SYM_FUNC_END_PI(memchr) 34 34 EXPORT_SYMBOL_NOKASAN(memchr)
+2 -2
arch/arm64/lib/memcmp.S
··· 46 46 limit_wd .req x12 47 47 mask .req x13 48 48 49 - WEAK(memcmp) 49 + SYM_FUNC_START_WEAK_PI(memcmp) 50 50 cbz limit, .Lret0 51 51 eor tmp1, src1, src2 52 52 tst tmp1, #7 ··· 243 243 .Lret0: 244 244 mov result, #0 245 245 ret 246 - ENDPIPROC(memcmp) 246 + SYM_FUNC_END_PI(memcmp) 247 247 EXPORT_SYMBOL_NOKASAN(memcmp)
+4 -4
arch/arm64/lib/memcpy.S
··· 57 57 .endm 58 58 59 59 .weak memcpy 60 - ENTRY(__memcpy) 61 - ENTRY(memcpy) 60 + SYM_FUNC_START_ALIAS(__memcpy) 61 + SYM_FUNC_START_PI(memcpy) 62 62 #include "copy_template.S" 63 63 ret 64 - ENDPIPROC(memcpy) 64 + SYM_FUNC_END_PI(memcpy) 65 65 EXPORT_SYMBOL(memcpy) 66 - ENDPROC(__memcpy) 66 + SYM_FUNC_END_ALIAS(__memcpy) 67 67 EXPORT_SYMBOL(__memcpy)
+4 -4
arch/arm64/lib/memmove.S
··· 46 46 D_h .req x14 47 47 48 48 .weak memmove 49 - ENTRY(__memmove) 50 - ENTRY(memmove) 49 + SYM_FUNC_START_ALIAS(__memmove) 50 + SYM_FUNC_START_PI(memmove) 51 51 cmp dstin, src 52 52 b.lo __memcpy 53 53 add tmp1, src, count ··· 184 184 tst count, #0x3f 185 185 b.ne .Ltail63 186 186 ret 187 - ENDPIPROC(memmove) 187 + SYM_FUNC_END_PI(memmove) 188 188 EXPORT_SYMBOL(memmove) 189 - ENDPROC(__memmove) 189 + SYM_FUNC_END_ALIAS(__memmove) 190 190 EXPORT_SYMBOL(__memmove)
+4 -4
arch/arm64/lib/memset.S
··· 43 43 tmp3 .req x9 44 44 45 45 .weak memset 46 - ENTRY(__memset) 47 - ENTRY(memset) 46 + SYM_FUNC_START_ALIAS(__memset) 47 + SYM_FUNC_START_PI(memset) 48 48 mov dst, dstin /* Preserve return value. */ 49 49 and A_lw, val, #255 50 50 orr A_lw, A_lw, A_lw, lsl #8 ··· 203 203 ands count, count, zva_bits_x 204 204 b.ne .Ltail_maybe_long 205 205 ret 206 - ENDPIPROC(memset) 206 + SYM_FUNC_END_PI(memset) 207 207 EXPORT_SYMBOL(memset) 208 - ENDPROC(__memset) 208 + SYM_FUNC_END_ALIAS(__memset) 209 209 EXPORT_SYMBOL(__memset)
+2 -2
arch/arm64/lib/strchr.S
··· 18 18 * Returns: 19 19 * x0 - address of first occurrence of 'c' or 0 20 20 */ 21 - WEAK(strchr) 21 + SYM_FUNC_START_WEAK(strchr) 22 22 and w1, w1, #0xff 23 23 1: ldrb w2, [x0], #1 24 24 cmp w2, w1 ··· 28 28 cmp w2, w1 29 29 csel x0, x0, xzr, eq 30 30 ret 31 - ENDPROC(strchr) 31 + SYM_FUNC_END(strchr) 32 32 EXPORT_SYMBOL_NOKASAN(strchr)
+2 -2
arch/arm64/lib/strcmp.S
··· 48 48 zeroones .req x10 49 49 pos .req x11 50 50 51 - WEAK(strcmp) 51 + SYM_FUNC_START_WEAK_PI(strcmp) 52 52 eor tmp1, src1, src2 53 53 mov zeroones, #REP8_01 54 54 tst tmp1, #7 ··· 219 219 lsr data1, data1, #56 220 220 sub result, data1, data2, lsr #56 221 221 ret 222 - ENDPIPROC(strcmp) 222 + SYM_FUNC_END_PI(strcmp) 223 223 EXPORT_SYMBOL_NOKASAN(strcmp)
+2 -2
arch/arm64/lib/strlen.S
··· 44 44 #define REP8_7f 0x7f7f7f7f7f7f7f7f 45 45 #define REP8_80 0x8080808080808080 46 46 47 - WEAK(strlen) 47 + SYM_FUNC_START_WEAK_PI(strlen) 48 48 mov zeroones, #REP8_01 49 49 bic src, srcin, #15 50 50 ands tmp1, srcin, #15 ··· 111 111 csinv data1, data1, xzr, le 112 112 csel data2, data2, data2a, le 113 113 b .Lrealigned 114 - ENDPIPROC(strlen) 114 + SYM_FUNC_END_PI(strlen) 115 115 EXPORT_SYMBOL_NOKASAN(strlen)
+2 -2
arch/arm64/lib/strncmp.S
··· 52 52 mask .req x14 53 53 endloop .req x15 54 54 55 - WEAK(strncmp) 55 + SYM_FUNC_START_WEAK_PI(strncmp) 56 56 cbz limit, .Lret0 57 57 eor tmp1, src1, src2 58 58 mov zeroones, #REP8_01 ··· 295 295 .Lret0: 296 296 mov result, #0 297 297 ret 298 - ENDPIPROC(strncmp) 298 + SYM_FUNC_END_PI(strncmp) 299 299 EXPORT_SYMBOL_NOKASAN(strncmp)
+2 -2
arch/arm64/lib/strnlen.S
··· 47 47 #define REP8_7f 0x7f7f7f7f7f7f7f7f 48 48 #define REP8_80 0x8080808080808080 49 49 50 - WEAK(strnlen) 50 + SYM_FUNC_START_WEAK_PI(strnlen) 51 51 cbz limit, .Lhit_limit 52 52 mov zeroones, #REP8_01 53 53 bic src, srcin, #15 ··· 156 156 .Lhit_limit: 157 157 mov len, limit 158 158 ret 159 - ENDPIPROC(strnlen) 159 + SYM_FUNC_END_PI(strnlen) 160 160 EXPORT_SYMBOL_NOKASAN(strnlen)
+2 -2
arch/arm64/lib/strrchr.S
··· 18 18 * Returns: 19 19 * x0 - address of last occurrence of 'c' or 0 20 20 */ 21 - WEAK(strrchr) 21 + SYM_FUNC_START_WEAK_PI(strrchr) 22 22 mov x3, #0 23 23 and w1, w1, #0xff 24 24 1: ldrb w2, [x0], #1 ··· 29 29 b 1b 30 30 2: mov x0, x3 31 31 ret 32 - ENDPIPROC(strrchr) 32 + SYM_FUNC_END_PI(strrchr) 33 33 EXPORT_SYMBOL_NOKASAN(strrchr)
+6 -6
arch/arm64/lib/tishift.S
··· 7 7 8 8 #include <asm/assembler.h> 9 9 10 - ENTRY(__ashlti3) 10 + SYM_FUNC_START(__ashlti3) 11 11 cbz x2, 1f 12 12 mov x3, #64 13 13 sub x3, x3, x2 ··· 26 26 lsl x1, x0, x1 27 27 mov x0, x2 28 28 ret 29 - ENDPROC(__ashlti3) 29 + SYM_FUNC_END(__ashlti3) 30 30 EXPORT_SYMBOL(__ashlti3) 31 31 32 - ENTRY(__ashrti3) 32 + SYM_FUNC_START(__ashrti3) 33 33 cbz x2, 1f 34 34 mov x3, #64 35 35 sub x3, x3, x2 ··· 48 48 asr x0, x1, x0 49 49 mov x1, x2 50 50 ret 51 - ENDPROC(__ashrti3) 51 + SYM_FUNC_END(__ashrti3) 52 52 EXPORT_SYMBOL(__ashrti3) 53 53 54 - ENTRY(__lshrti3) 54 + SYM_FUNC_START(__lshrti3) 55 55 cbz x2, 1f 56 56 mov x3, #64 57 57 sub x3, x3, x2 ··· 70 70 lsr x0, x1, x0 71 71 mov x1, x2 72 72 ret 73 - ENDPROC(__lshrti3) 73 + SYM_FUNC_END(__lshrti3) 74 74 EXPORT_SYMBOL(__lshrti3)