Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

sparc64: Define SPARC default __fls function

__fls will now require a boot time patching on T4 and above.
Redefining it under arch/sparc/lib.

Signed-off-by: Vijay Kumar <vijay.ac.kumar@oracle.com>
Signed-off-by: David S. Miller <davem@davemloft.net>

authored by

Vijay Kumar and committed by
David S. Miller
be52bbe3 41413a60

+63 -1
+1 -1
arch/sparc/include/asm/bitops_64.h
··· 23 23 void change_bit(unsigned long nr, volatile unsigned long *addr); 24 24 25 25 int fls(unsigned int word); 26 + int __fls(unsigned long word); 26 27 27 28 #include <asm-generic/bitops/non-atomic.h> 28 29 29 - #include <asm-generic/bitops/__fls.h> 30 30 #include <asm-generic/bitops/fls64.h> 31 31 32 32 #ifdef __KERNEL__
+1
arch/sparc/lib/Makefile
··· 17 17 lib-$(CONFIG_SPARC32) += muldi3.o bitext.o cmpdi2.o 18 18 lib-$(CONFIG_SPARC64) += multi3.o 19 19 lib-$(CONFIG_SPARC64) += fls.o 20 + lib-$(CONFIG_SPARC64) += fls64.o 20 21 21 22 lib-$(CONFIG_SPARC64) += copy_page.o clear_page.o bzero.o 22 23 lib-$(CONFIG_SPARC64) += csum_copy.o csum_copy_from_user.o csum_copy_to_user.o
+61
arch/sparc/lib/fls64.S
··· 1 + /* fls64.S: SPARC default __fls definition. 2 + * 3 + * SPARC default __fls definition, which follows the same algorithm as 4 + * in generic __fls(). This function will be boot time patched on T4 5 + * and onward. 6 + */ 7 + 8 + #include <linux/linkage.h> 9 + #include <asm/export.h> 10 + 11 + .text 12 + .register %g2, #scratch 13 + .register %g3, #scratch 14 + ENTRY(__fls) 15 + mov -1, %g2 16 + sllx %g2, 32, %g2 17 + and %o0, %g2, %g2 18 + brnz,pt %g2, 1f 19 + mov 63, %g1 20 + sllx %o0, 32, %o0 21 + mov 31, %g1 22 + 1: 23 + mov -1, %g2 24 + sllx %g2, 48, %g2 25 + and %o0, %g2, %g2 26 + brnz,pt %g2, 2f 27 + mov -1, %g2 28 + sllx %o0, 16, %o0 29 + add %g1, -16, %g1 30 + 2: 31 + mov -1, %g2 32 + sllx %g2, 56, %g2 33 + and %o0, %g2, %g2 34 + brnz,pt %g2, 3f 35 + mov -1, %g2 36 + sllx %o0, 8, %o0 37 + add %g1, -8, %g1 38 + 3: 39 + sllx %g2, 60, %g2 40 + and %o0, %g2, %g2 41 + brnz,pt %g2, 4f 42 + mov -1, %g2 43 + sllx %o0, 4, %o0 44 + add %g1, -4, %g1 45 + 4: 46 + sllx %g2, 62, %g2 47 + and %o0, %g2, %g2 48 + brnz,pt %g2, 5f 49 + mov -1, %g3 50 + sllx %o0, 2, %o0 51 + add %g1, -2, %g1 52 + 5: 53 + mov 0, %g2 54 + sllx %g3, 63, %g3 55 + and %o0, %g3, %o0 56 + movre %o0, 1, %g2 57 + sub %g1, %g2, %g1 58 + jmp %o7+8 59 + sra %g1, 0, %o0 60 + ENDPROC(__fls) 61 + EXPORT_SYMBOL(__fls)