Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

sparc: move exports to definitions

Acked-by: David S. Miller <davem@davemloft.net>
Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>

Al Viro d3867f04 9445aa1a

+141 -265
+1
arch/sparc/include/asm/Kbuild
··· 6 6 generic-y += div64.h 7 7 generic-y += emergency-restart.h 8 8 generic-y += exec.h 9 + generic-y += export.h 9 10 generic-y += irq_regs.h 10 11 generic-y += irq_work.h 11 12 generic-y += linkage.h
+1 -1
arch/sparc/kernel/Makefile
··· 86 86 obj-$(CONFIG_SUN_PM) += apc.o pmc.o 87 87 88 88 obj-$(CONFIG_MODULES) += module.o 89 - obj-$(CONFIG_MODULES) += sparc_ksyms_$(BITS).o 89 + obj-$(CONFIG_MODULES) += sparc_ksyms.o 90 90 obj-$(CONFIG_SPARC_LED) += led.o 91 91 obj-$(CONFIG_KGDB) += kgdb_$(BITS).o 92 92
+3
arch/sparc/kernel/entry.S
··· 29 29 #include <asm/unistd.h> 30 30 31 31 #include <asm/asmmacro.h> 32 + #include <asm/export.h> 32 33 33 34 #define curptr g6 34 35 ··· 1208 1207 1209 1208 ret 1210 1209 restore 1210 + EXPORT_SYMBOL(__udelay) 1211 + EXPORT_SYMBOL(__ndelay) 1211 1212 1212 1213 /* Handle a software breakpoint */ 1213 1214 /* We have to inform parent that child has stopped */
+3
arch/sparc/kernel/head_32.S
··· 24 24 #include <asm/thread_info.h> /* TI_UWINMASK */ 25 25 #include <asm/errno.h> 26 26 #include <asm/pgtsrmmu.h> /* SRMMU_PGDIR_SHIFT */ 27 + #include <asm/export.h> 27 28 28 29 .data 29 30 /* The following are used with the prom_vector node-ops to figure out ··· 61 60 */ 62 61 .globl empty_zero_page 63 62 empty_zero_page: .skip PAGE_SIZE 63 + EXPORT_SYMBOL(empty_zero_page) 64 64 65 65 .global root_flags 66 66 .global ram_flags ··· 815 813 __ret_efault: 816 814 ret 817 815 restore %g0, -EFAULT, %o0 816 + EXPORT_SYMBOL(__ret_efault)
+6 -1
arch/sparc/kernel/head_64.S
··· 32 32 #include <asm/estate.h> 33 33 #include <asm/sfafsr.h> 34 34 #include <asm/unistd.h> 35 - 35 + #include <asm/export.h> 36 + 36 37 /* This section from from _start to sparc64_boot_end should fit into 37 38 * 0x0000000000404000 to 0x0000000000408000. 38 39 */ ··· 144 143 .skip 64 145 144 prom_root_node: 146 145 .word 0 146 + EXPORT_SYMBOL(prom_root_node) 147 147 prom_mmu_ihandle_cache: 148 148 .word 0 149 149 prom_boot_mapped_pc: ··· 160 158 .word 0 161 159 sun4v_chip_type: 162 160 .word SUN4V_CHIP_INVALID 161 + EXPORT_SYMBOL(sun4v_chip_type) 163 162 1: 164 163 rd %pc, %l0 165 164 ··· 923 920 .globl prom_tba, tlb_type 924 921 prom_tba: .xword 0 925 922 tlb_type: .word 0 /* Must NOT end up in BSS */ 923 + EXPORT_SYMBOL(tlb_type) 926 924 .section ".fixup",#alloc,#execinstr 927 925 928 926 .globl __ret_efault, __retl_efault, __ret_one, __retl_one ··· 931 927 ret 932 928 restore %g0, -EFAULT, %o0 933 929 ENDPROC(__ret_efault) 930 + EXPORT_SYMBOL(__ret_efault) 934 931 935 932 ENTRY(__retl_efault) 936 933 retl
+2
arch/sparc/kernel/helpers.S
··· 15 15 2: retl 16 16 nop 17 17 .size __flushw_user,.-__flushw_user 18 + EXPORT_SYMBOL(__flushw_user) 18 19 19 20 /* Flush %fp and %i7 to the stack for all register 20 21 * windows active inside of the cpu. This allows ··· 62 61 .size hard_smp_processor_id,.-hard_smp_processor_id 63 62 #endif 64 63 .size real_hard_smp_processor_id,.-real_hard_smp_processor_id 64 + EXPORT_SYMBOL_GPL(real_hard_smp_processor_id)
+5
arch/sparc/kernel/hvcalls.S
··· 343 343 0: retl 344 344 nop 345 345 ENDPROC(sun4v_mach_set_watchdog) 346 + EXPORT_SYMBOL(sun4v_mach_set_watchdog) 346 347 347 348 /* No inputs and does not return. */ 348 349 ENTRY(sun4v_mach_sir) ··· 777 776 retl 778 777 nop 779 778 ENDPROC(sun4v_niagara_getperf) 779 + EXPORT_SYMBOL(sun4v_niagara_getperf) 780 780 781 781 ENTRY(sun4v_niagara_setperf) 782 782 mov HV_FAST_SET_PERFREG, %o5 ··· 785 783 retl 786 784 nop 787 785 ENDPROC(sun4v_niagara_setperf) 786 + EXPORT_SYMBOL(sun4v_niagara_setperf) 788 787 789 788 ENTRY(sun4v_niagara2_getperf) 790 789 mov %o0, %o4 ··· 795 792 retl 796 793 nop 797 794 ENDPROC(sun4v_niagara2_getperf) 795 + EXPORT_SYMBOL(sun4v_niagara2_getperf) 798 796 799 797 ENTRY(sun4v_niagara2_setperf) 800 798 mov HV_FAST_N2_SET_PERFREG, %o5 ··· 803 799 retl 804 800 nop 805 801 ENDPROC(sun4v_niagara2_setperf) 802 + EXPORT_SYMBOL(sun4v_niagara2_setperf) 806 803 807 804 ENTRY(sun4v_reboot_data_set) 808 805 mov HV_FAST_REBOOT_DATA_SET, %o5
+12
arch/sparc/kernel/sparc_ksyms.c
··· 1 + /* 2 + * arch/sparc/kernel/ksyms.c: Sparc specific ksyms support. 3 + * 4 + * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) 5 + * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) 6 + */ 7 + 8 + #include <linux/init.h> 9 + #include <linux/export.h> 10 + 11 + /* This is needed only for drivers/sbus/char/openprom.c */ 12 + EXPORT_SYMBOL(saved_command_line);
-31
arch/sparc/kernel/sparc_ksyms_32.c
··· 1 - /* 2 - * arch/sparc/kernel/ksyms.c: Sparc specific ksyms support. 3 - * 4 - * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) 5 - * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) 6 - */ 7 - 8 - #include <linux/module.h> 9 - 10 - #include <asm/pgtable.h> 11 - #include <asm/uaccess.h> 12 - #include <asm/delay.h> 13 - #include <asm/head.h> 14 - #include <asm/dma.h> 15 - 16 - struct poll { 17 - int fd; 18 - short events; 19 - short revents; 20 - }; 21 - 22 - /* from entry.S */ 23 - EXPORT_SYMBOL(__udelay); 24 - EXPORT_SYMBOL(__ndelay); 25 - 26 - /* from head_32.S */ 27 - EXPORT_SYMBOL(__ret_efault); 28 - EXPORT_SYMBOL(empty_zero_page); 29 - 30 - /* Exporting a symbol from /init/main.c */ 31 - EXPORT_SYMBOL(saved_command_line);
-53
arch/sparc/kernel/sparc_ksyms_64.c
··· 1 - /* arch/sparc64/kernel/sparc64_ksyms.c: Sparc64 specific ksyms support. 2 - * 3 - * Copyright (C) 1996, 2007 David S. Miller (davem@davemloft.net) 4 - * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) 5 - * Copyright (C) 1999 Jakub Jelinek (jj@ultra.linux.cz) 6 - */ 7 - 8 - #include <linux/export.h> 9 - #include <linux/pci.h> 10 - #include <linux/bitops.h> 11 - 12 - #include <asm/cpudata.h> 13 - #include <asm/uaccess.h> 14 - #include <asm/spitfire.h> 15 - #include <asm/oplib.h> 16 - #include <asm/hypervisor.h> 17 - #include <asm/cacheflush.h> 18 - 19 - struct poll { 20 - int fd; 21 - short events; 22 - short revents; 23 - }; 24 - 25 - /* from helpers.S */ 26 - EXPORT_SYMBOL(__flushw_user); 27 - EXPORT_SYMBOL_GPL(real_hard_smp_processor_id); 28 - 29 - /* from head_64.S */ 30 - EXPORT_SYMBOL(__ret_efault); 31 - EXPORT_SYMBOL(tlb_type); 32 - EXPORT_SYMBOL(sun4v_chip_type); 33 - EXPORT_SYMBOL(prom_root_node); 34 - 35 - /* from hvcalls.S */ 36 - EXPORT_SYMBOL(sun4v_niagara_getperf); 37 - EXPORT_SYMBOL(sun4v_niagara_setperf); 38 - EXPORT_SYMBOL(sun4v_niagara2_getperf); 39 - EXPORT_SYMBOL(sun4v_niagara2_setperf); 40 - EXPORT_SYMBOL(sun4v_mach_set_watchdog); 41 - 42 - /* from hweight.S */ 43 - EXPORT_SYMBOL(__arch_hweight8); 44 - EXPORT_SYMBOL(__arch_hweight16); 45 - EXPORT_SYMBOL(__arch_hweight32); 46 - EXPORT_SYMBOL(__arch_hweight64); 47 - 48 - /* from ffs_ffz.S */ 49 - EXPORT_SYMBOL(ffs); 50 - EXPORT_SYMBOL(__ffs); 51 - 52 - /* Exporting a symbol from /init/main.c */ 53 - EXPORT_SYMBOL(saved_command_line);
-1
arch/sparc/lib/Makefile
··· 43 43 44 44 obj-$(CONFIG_SPARC64) += iomap.o 45 45 obj-$(CONFIG_SPARC32) += atomic32.o ucmpdi2.o 46 - obj-y += ksyms.o 47 46 obj-$(CONFIG_SPARC64) += PeeCeeI.o
+2
arch/sparc/lib/U1memcpy.S
··· 7 7 #ifdef __KERNEL__ 8 8 #include <asm/visasm.h> 9 9 #include <asm/asi.h> 10 + #include <asm/export.h> 10 11 #define GLOBAL_SPARE g7 11 12 #else 12 13 #define GLOBAL_SPARE g5 ··· 568 567 mov EX_RETVAL(%o4), %o0 569 568 570 569 .size FUNC_NAME, .-FUNC_NAME 570 + EXPORT_SYMBOL(FUNC_NAME)
+2
arch/sparc/lib/VISsave.S
··· 13 13 #include <asm/ptrace.h> 14 14 #include <asm/visasm.h> 15 15 #include <asm/thread_info.h> 16 + #include <asm/export.h> 16 17 17 18 /* On entry: %o5=current FPRS value, %g7 is callers address */ 18 19 /* May clobber %o5, %g1, %g2, %g3, %g7, %icc, %xcc */ ··· 80 79 80: jmpl %g7 + %g0, %g0 81 80 nop 82 81 ENDPROC(VISenter) 82 + EXPORT_SYMBOL(VISenter)
+2
arch/sparc/lib/ashldi3.S
··· 6 6 */ 7 7 8 8 #include <linux/linkage.h> 9 + #include <asm/export.h> 9 10 10 11 .text 11 12 ENTRY(__ashldi3) ··· 34 33 retl 35 34 nop 36 35 ENDPROC(__ashldi3) 36 + EXPORT_SYMBOL(__ashldi3)
+2
arch/sparc/lib/ashrdi3.S
··· 6 6 */ 7 7 8 8 #include <linux/linkage.h> 9 + #include <asm/export.h> 9 10 10 11 .text 11 12 ENTRY(__ashrdi3) ··· 36 35 jmpl %o7 + 8, %g0 37 36 nop 38 37 ENDPROC(__ashrdi3) 38 + EXPORT_SYMBOL(__ashrdi3)
+12 -4
arch/sparc/lib/atomic_64.S
··· 6 6 #include <linux/linkage.h> 7 7 #include <asm/asi.h> 8 8 #include <asm/backoff.h> 9 + #include <asm/export.h> 9 10 10 11 .text 11 12 ··· 30 29 nop; \ 31 30 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 32 31 ENDPROC(atomic_##op); \ 32 + EXPORT_SYMBOL(atomic_##op); 33 33 34 34 #define ATOMIC_OP_RETURN(op) \ 35 35 ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ ··· 44 42 retl; \ 45 43 sra %g1, 0, %o0; \ 46 44 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 47 - ENDPROC(atomic_##op##_return); 45 + ENDPROC(atomic_##op##_return); \ 46 + EXPORT_SYMBOL(atomic_##op##_return); 48 47 49 48 #define ATOMIC_FETCH_OP(op) \ 50 49 ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ ··· 59 56 retl; \ 60 57 sra %g1, 0, %o0; \ 61 58 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 62 - ENDPROC(atomic_fetch_##op); 59 + ENDPROC(atomic_fetch_##op); \ 60 + EXPORT_SYMBOL(atomic_fetch_##op); 63 61 64 62 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op) 65 63 ··· 92 88 nop; \ 93 89 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 94 90 ENDPROC(atomic64_##op); \ 91 + EXPORT_SYMBOL(atomic64_##op); 95 92 96 93 #define ATOMIC64_OP_RETURN(op) \ 97 94 ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ ··· 106 101 retl; \ 107 102 op %g1, %o0, %o0; \ 108 103 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 109 - ENDPROC(atomic64_##op##_return); 104 + ENDPROC(atomic64_##op##_return); \ 105 + EXPORT_SYMBOL(atomic64_##op##_return); 110 106 111 107 #define ATOMIC64_FETCH_OP(op) \ 112 108 ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ ··· 121 115 retl; \ 122 116 mov %g1, %o0; \ 123 117 2: BACKOFF_SPIN(%o2, %o3, 1b); \ 124 - ENDPROC(atomic64_fetch_##op); 118 + ENDPROC(atomic64_fetch_##op); \ 119 + EXPORT_SYMBOL(atomic64_fetch_##op); 125 120 126 121 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op) 127 122 ··· 154 147 sub %g1, 1, %o0 155 148 2: BACKOFF_SPIN(%o2, %o3, 1b) 156 149 ENDPROC(atomic64_dec_if_positive) 150 + EXPORT_SYMBOL(atomic64_dec_if_positive)
+7
arch/sparc/lib/bitops.S
··· 6 6 #include <linux/linkage.h> 7 7 #include <asm/asi.h> 8 8 #include <asm/backoff.h> 9 + #include <asm/export.h> 9 10 10 11 .text 11 12 ··· 30 29 nop 31 30 2: BACKOFF_SPIN(%o3, %o4, 1b) 32 31 ENDPROC(test_and_set_bit) 32 + EXPORT_SYMBOL(test_and_set_bit) 33 33 34 34 ENTRY(test_and_clear_bit) /* %o0=nr, %o1=addr */ 35 35 BACKOFF_SETUP(%o3) ··· 52 50 nop 53 51 2: BACKOFF_SPIN(%o3, %o4, 1b) 54 52 ENDPROC(test_and_clear_bit) 53 + EXPORT_SYMBOL(test_and_clear_bit) 55 54 56 55 ENTRY(test_and_change_bit) /* %o0=nr, %o1=addr */ 57 56 BACKOFF_SETUP(%o3) ··· 74 71 nop 75 72 2: BACKOFF_SPIN(%o3, %o4, 1b) 76 73 ENDPROC(test_and_change_bit) 74 + EXPORT_SYMBOL(test_and_change_bit) 77 75 78 76 ENTRY(set_bit) /* %o0=nr, %o1=addr */ 79 77 BACKOFF_SETUP(%o3) ··· 94 90 nop 95 91 2: BACKOFF_SPIN(%o3, %o4, 1b) 96 92 ENDPROC(set_bit) 93 + EXPORT_SYMBOL(set_bit) 97 94 98 95 ENTRY(clear_bit) /* %o0=nr, %o1=addr */ 99 96 BACKOFF_SETUP(%o3) ··· 114 109 nop 115 110 2: BACKOFF_SPIN(%o3, %o4, 1b) 116 111 ENDPROC(clear_bit) 112 + EXPORT_SYMBOL(clear_bit) 117 113 118 114 ENTRY(change_bit) /* %o0=nr, %o1=addr */ 119 115 BACKOFF_SETUP(%o3) ··· 134 128 nop 135 129 2: BACKOFF_SPIN(%o3, %o4, 1b) 136 130 ENDPROC(change_bit) 131 + EXPORT_SYMBOL(change_bit)
+3
arch/sparc/lib/blockops.S
··· 6 6 7 7 #include <linux/linkage.h> 8 8 #include <asm/page.h> 9 + #include <asm/export.h> 9 10 10 11 /* Zero out 64 bytes of memory at (buf + offset). 11 12 * Assumes %g1 contains zero. ··· 65 64 retl 66 65 nop 67 66 ENDPROC(bzero_1page) 67 + EXPORT_SYMBOL(bzero_1page) 68 68 69 69 ENTRY(__copy_1page) 70 70 /* NOTE: If you change the number of insns of this routine, please check ··· 89 87 retl 90 88 nop 91 89 ENDPROC(__copy_1page) 90 + EXPORT_SYMBOL(__copy_1page)
+4
arch/sparc/lib/bzero.S
··· 5 5 */ 6 6 7 7 #include <linux/linkage.h> 8 + #include <asm/export.h> 8 9 9 10 .text 10 11 ··· 79 78 mov %o3, %o0 80 79 ENDPROC(__bzero) 81 80 ENDPROC(memset) 81 + EXPORT_SYMBOL(__bzero) 82 + EXPORT_SYMBOL(memset) 82 83 83 84 #define EX_ST(x,y) \ 84 85 98: x,y; \ ··· 146 143 retl 147 144 clr %o0 148 145 ENDPROC(__clear_user) 146 + EXPORT_SYMBOL(__clear_user)
+3
arch/sparc/lib/checksum_32.S
··· 14 14 */ 15 15 16 16 #include <asm/errno.h> 17 + #include <asm/export.h> 17 18 18 19 #define CSUM_BIGCHUNK(buf, offset, sum, t0, t1, t2, t3, t4, t5) \ 19 20 ldd [buf + offset + 0x00], t0; \ ··· 105 104 * buffer of size 0x20. Follow the code path for that case. 106 105 */ 107 106 .globl csum_partial 107 + EXPORT_SYMBOL(csum_partial) 108 108 csum_partial: /* %o0=buf, %o1=len, %o2=sum */ 109 109 andcc %o0, 0x7, %g0 ! alignment problems? 110 110 bne csum_partial_fix_alignment ! yep, handle it ··· 337 335 */ 338 336 .align 8 339 337 .globl __csum_partial_copy_sparc_generic 338 + EXPORT_SYMBOL(__csum_partial_copy_sparc_generic) 340 339 __csum_partial_copy_sparc_generic: 341 340 /* %o0=src, %o1=dest, %g1=len, %g7=sum */ 342 341 xor %o0, %o1, %o4 ! get changing bits
+2
arch/sparc/lib/checksum_64.S
··· 13 13 * BSD4.4 portable checksum routine 14 14 */ 15 15 16 + #include <asm/export.h> 16 17 .text 17 18 18 19 csum_partial_fix_alignment: ··· 38 37 39 38 .align 32 40 39 .globl csum_partial 40 + EXPORT_SYMBOL(csum_partial) 41 41 csum_partial: /* %o0=buff, %o1=len, %o2=sum */ 42 42 prefetch [%o0 + 0x000], #n_reads 43 43 clr %o4
+3
arch/sparc/lib/clear_page.S
··· 10 10 #include <asm/pgtable.h> 11 11 #include <asm/spitfire.h> 12 12 #include <asm/head.h> 13 + #include <asm/export.h> 13 14 14 15 /* What we used to do was lock a TLB entry into a specific 15 16 * TLB slot, clear the page with interrupts disabled, then ··· 27 26 .text 28 27 29 28 .globl _clear_page 29 + EXPORT_SYMBOL(_clear_page) 30 30 _clear_page: /* %o0=dest */ 31 31 ba,pt %xcc, clear_page_common 32 32 clr %o4 ··· 37 35 */ 38 36 .align 32 39 37 .globl clear_user_page 38 + EXPORT_SYMBOL(clear_user_page) 40 39 clear_user_page: /* %o0=dest, %o1=vaddr */ 41 40 lduw [%g6 + TI_PRE_COUNT], %o2 42 41 sethi %hi(PAGE_OFFSET), %g2
+2
arch/sparc/lib/copy_in_user.S
··· 5 5 6 6 #include <linux/linkage.h> 7 7 #include <asm/asi.h> 8 + #include <asm/export.h> 8 9 9 10 #define XCC xcc 10 11 ··· 91 90 retl 92 91 clr %o0 93 92 ENDPROC(___copy_in_user) 93 + EXPORT_SYMBOL(___copy_in_user)
+2
arch/sparc/lib/copy_page.S
··· 10 10 #include <asm/pgtable.h> 11 11 #include <asm/spitfire.h> 12 12 #include <asm/head.h> 13 + #include <asm/export.h> 13 14 14 15 /* What we used to do was lock a TLB entry into a specific 15 16 * TLB slot, clear the page with interrupts disabled, then ··· 45 44 .align 32 46 45 .globl copy_user_page 47 46 .type copy_user_page,#function 47 + EXPORT_SYMBOL(copy_user_page) 48 48 copy_user_page: /* %o0=dest, %o1=src, %o2=vaddr */ 49 49 lduw [%g6 + TI_PRE_COUNT], %o4 50 50 sethi %hi(PAGE_OFFSET), %g2
+2
arch/sparc/lib/copy_user.S
··· 15 15 #include <asm/asmmacro.h> 16 16 #include <asm/page.h> 17 17 #include <asm/thread_info.h> 18 + #include <asm/export.h> 18 19 19 20 /* Work around cpp -rob */ 20 21 #define ALLOC #alloc ··· 120 119 __copy_user_begin: 121 120 122 121 .globl __copy_user 122 + EXPORT_SYMBOL(__copy_user) 123 123 dword_align: 124 124 andcc %o1, 1, %g0 125 125 be 4f
+3
arch/sparc/lib/csum_copy.S
··· 3 3 * Copyright (C) 2005 David S. Miller <davem@davemloft.net> 4 4 */ 5 5 6 + #include <asm/export.h> 7 + 6 8 #ifdef __KERNEL__ 7 9 #define GLOBAL_SPARE %g7 8 10 #else ··· 65 63 add %o5, %o4, %o4 66 64 67 65 .globl FUNC_NAME 66 + EXPORT_SYMBOL(FUNC_NAME) 68 67 FUNC_NAME: /* %o0=src, %o1=dst, %o2=len, %o3=sum */ 69 68 LOAD(prefetch, %o0 + 0x000, #n_reads) 70 69 xor %o0, %o1, %g1
+2
arch/sparc/lib/divdi3.S
··· 17 17 the Free Software Foundation, 59 Temple Place - Suite 330, 18 18 Boston, MA 02111-1307, USA. */ 19 19 20 + #include <asm/export.h> 20 21 .text 21 22 .align 4 22 23 .globl __divdi3 ··· 280 279 .LL81: 281 280 ret 282 281 restore 282 + EXPORT_SYMBOL(__divdi3)
+3
arch/sparc/lib/ffs.S
··· 1 1 #include <linux/linkage.h> 2 + #include <asm/export.h> 2 3 3 4 .register %g2,#scratch 4 5 ··· 66 65 add %o2, %g1, %o0 67 66 ENDPROC(ffs) 68 67 ENDPROC(__ffs) 68 + EXPORT_SYMBOL(__ffs) 69 + EXPORT_SYMBOL(ffs) 69 70 70 71 .section .popc_6insn_patch, "ax" 71 72 .word ffs
+5
arch/sparc/lib/hweight.S
··· 1 1 #include <linux/linkage.h> 2 + #include <asm/export.h> 2 3 3 4 .text 4 5 .align 32 ··· 8 7 nop 9 8 nop 10 9 ENDPROC(__arch_hweight8) 10 + EXPORT_SYMBOL(__arch_hweight8) 11 11 .section .popc_3insn_patch, "ax" 12 12 .word __arch_hweight8 13 13 sllx %o0, 64-8, %g1 ··· 21 19 nop 22 20 nop 23 21 ENDPROC(__arch_hweight16) 22 + EXPORT_SYMBOL(__arch_hweight16) 24 23 .section .popc_3insn_patch, "ax" 25 24 .word __arch_hweight16 26 25 sllx %o0, 64-16, %g1 ··· 34 31 nop 35 32 nop 36 33 ENDPROC(__arch_hweight32) 34 + EXPORT_SYMBOL(__arch_hweight32) 37 35 .section .popc_3insn_patch, "ax" 38 36 .word __arch_hweight32 39 37 sllx %o0, 64-32, %g1 ··· 47 43 nop 48 44 nop 49 45 ENDPROC(__arch_hweight64) 46 + EXPORT_SYMBOL(__arch_hweight64) 50 47 .section .popc_3insn_patch, "ax" 51 48 .word __arch_hweight64 52 49 retl
+2
arch/sparc/lib/ipcsum.S
··· 1 1 #include <linux/linkage.h> 2 + #include <asm/export.h> 2 3 3 4 .text 4 5 ENTRY(ip_fast_csum) /* %o0 = iph, %o1 = ihl */ ··· 32 31 retl 33 32 and %o2, %o1, %o0 34 33 ENDPROC(ip_fast_csum) 34 + EXPORT_SYMBOL(ip_fast_csum)
-174
arch/sparc/lib/ksyms.c
··· 1 - /* 2 - * Export of symbols defined in assembler 3 - */ 4 - 5 - /* Tell string.h we don't want memcpy etc. as cpp defines */ 6 - #define EXPORT_SYMTAB_STROPS 7 - 8 - #include <linux/module.h> 9 - #include <linux/string.h> 10 - #include <linux/types.h> 11 - 12 - #include <asm/checksum.h> 13 - #include <asm/uaccess.h> 14 - #include <asm/ftrace.h> 15 - 16 - /* string functions */ 17 - EXPORT_SYMBOL(strlen); 18 - EXPORT_SYMBOL(strncmp); 19 - 20 - /* mem* functions */ 21 - extern void *__memscan_zero(void *, size_t); 22 - extern void *__memscan_generic(void *, int, size_t); 23 - extern void *__bzero(void *, size_t); 24 - 25 - EXPORT_SYMBOL(memscan); 26 - EXPORT_SYMBOL(__memscan_zero); 27 - EXPORT_SYMBOL(__memscan_generic); 28 - EXPORT_SYMBOL(memcmp); 29 - EXPORT_SYMBOL(memcpy); 30 - EXPORT_SYMBOL(memset); 31 - EXPORT_SYMBOL(memmove); 32 - EXPORT_SYMBOL(__bzero); 33 - 34 - /* Networking helper routines. */ 35 - EXPORT_SYMBOL(csum_partial); 36 - 37 - #ifdef CONFIG_MCOUNT 38 - EXPORT_SYMBOL(_mcount); 39 - #endif 40 - 41 - /* 42 - * sparc 43 - */ 44 - #ifdef CONFIG_SPARC32 45 - extern int __ashrdi3(int, int); 46 - extern int __ashldi3(int, int); 47 - extern int __lshrdi3(int, int); 48 - extern int __muldi3(int, int); 49 - extern int __divdi3(int, int); 50 - 51 - extern void (*__copy_1page)(void *, const void *); 52 - extern void (*bzero_1page)(void *); 53 - 54 - extern void ___rw_read_enter(void); 55 - extern void ___rw_read_try(void); 56 - extern void ___rw_read_exit(void); 57 - extern void ___rw_write_enter(void); 58 - 59 - /* Networking helper routines. */ 60 - EXPORT_SYMBOL(__csum_partial_copy_sparc_generic); 61 - 62 - /* Special internal versions of library functions. */ 63 - EXPORT_SYMBOL(__copy_1page); 64 - EXPORT_SYMBOL(__memmove); 65 - EXPORT_SYMBOL(bzero_1page); 66 - 67 - /* Moving data to/from/in userspace. */ 68 - EXPORT_SYMBOL(__copy_user); 69 - 70 - /* Used by asm/spinlock.h */ 71 - #ifdef CONFIG_SMP 72 - EXPORT_SYMBOL(___rw_read_enter); 73 - EXPORT_SYMBOL(___rw_read_try); 74 - EXPORT_SYMBOL(___rw_read_exit); 75 - EXPORT_SYMBOL(___rw_write_enter); 76 - #endif 77 - 78 - EXPORT_SYMBOL(__ashrdi3); 79 - EXPORT_SYMBOL(__ashldi3); 80 - EXPORT_SYMBOL(__lshrdi3); 81 - EXPORT_SYMBOL(__muldi3); 82 - EXPORT_SYMBOL(__divdi3); 83 - #endif 84 - 85 - /* 86 - * sparc64 87 - */ 88 - #ifdef CONFIG_SPARC64 89 - /* Networking helper routines. */ 90 - EXPORT_SYMBOL(csum_partial_copy_nocheck); 91 - EXPORT_SYMBOL(__csum_partial_copy_from_user); 92 - EXPORT_SYMBOL(__csum_partial_copy_to_user); 93 - EXPORT_SYMBOL(ip_fast_csum); 94 - 95 - /* Moving data to/from/in userspace. */ 96 - EXPORT_SYMBOL(___copy_to_user); 97 - EXPORT_SYMBOL(___copy_from_user); 98 - EXPORT_SYMBOL(___copy_in_user); 99 - EXPORT_SYMBOL(__clear_user); 100 - 101 - /* Atomic counter implementation. */ 102 - #define ATOMIC_OP(op) \ 103 - EXPORT_SYMBOL(atomic_##op); \ 104 - EXPORT_SYMBOL(atomic64_##op); 105 - 106 - #define ATOMIC_OP_RETURN(op) \ 107 - EXPORT_SYMBOL(atomic_##op##_return); \ 108 - EXPORT_SYMBOL(atomic64_##op##_return); 109 - 110 - #define ATOMIC_FETCH_OP(op) \ 111 - EXPORT_SYMBOL(atomic_fetch_##op); \ 112 - EXPORT_SYMBOL(atomic64_fetch_##op); 113 - 114 - #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op) 115 - 116 - ATOMIC_OPS(add) 117 - ATOMIC_OPS(sub) 118 - 119 - #undef ATOMIC_OPS 120 - #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op) 121 - 122 - ATOMIC_OPS(and) 123 - ATOMIC_OPS(or) 124 - ATOMIC_OPS(xor) 125 - 126 - #undef ATOMIC_OPS 127 - #undef ATOMIC_FETCH_OP 128 - #undef ATOMIC_OP_RETURN 129 - #undef ATOMIC_OP 130 - 131 - EXPORT_SYMBOL(atomic64_dec_if_positive); 132 - 133 - /* Atomic bit operations. */ 134 - EXPORT_SYMBOL(test_and_set_bit); 135 - EXPORT_SYMBOL(test_and_clear_bit); 136 - EXPORT_SYMBOL(test_and_change_bit); 137 - EXPORT_SYMBOL(set_bit); 138 - EXPORT_SYMBOL(clear_bit); 139 - EXPORT_SYMBOL(change_bit); 140 - 141 - /* Special internal versions of library functions. */ 142 - EXPORT_SYMBOL(_clear_page); 143 - EXPORT_SYMBOL(clear_user_page); 144 - EXPORT_SYMBOL(copy_user_page); 145 - 146 - /* RAID code needs this */ 147 - void VISenter(void); 148 - EXPORT_SYMBOL(VISenter); 149 - 150 - extern void xor_vis_2(unsigned long, unsigned long *, unsigned long *); 151 - extern void xor_vis_3(unsigned long, unsigned long *, unsigned long *, 152 - unsigned long *); 153 - extern void xor_vis_4(unsigned long, unsigned long *, unsigned long *, 154 - unsigned long *, unsigned long *); 155 - extern void xor_vis_5(unsigned long, unsigned long *, unsigned long *, 156 - unsigned long *, unsigned long *, unsigned long *); 157 - EXPORT_SYMBOL(xor_vis_2); 158 - EXPORT_SYMBOL(xor_vis_3); 159 - EXPORT_SYMBOL(xor_vis_4); 160 - EXPORT_SYMBOL(xor_vis_5); 161 - 162 - extern void xor_niagara_2(unsigned long, unsigned long *, unsigned long *); 163 - extern void xor_niagara_3(unsigned long, unsigned long *, unsigned long *, 164 - unsigned long *); 165 - extern void xor_niagara_4(unsigned long, unsigned long *, unsigned long *, 166 - unsigned long *, unsigned long *); 167 - extern void xor_niagara_5(unsigned long, unsigned long *, unsigned long *, 168 - unsigned long *, unsigned long *, unsigned long *); 169 - 170 - EXPORT_SYMBOL(xor_niagara_2); 171 - EXPORT_SYMBOL(xor_niagara_3); 172 - EXPORT_SYMBOL(xor_niagara_4); 173 - EXPORT_SYMBOL(xor_niagara_5); 174 - #endif
+5
arch/sparc/lib/locks.S
··· 10 10 #include <asm/psr.h> 11 11 #include <asm/smp.h> 12 12 #include <asm/spinlock.h> 13 + #include <asm/export.h> 13 14 14 15 .text 15 16 .align 4 ··· 49 48 ld [%g1], %g2 50 49 51 50 .globl ___rw_read_enter 51 + EXPORT_SYMBOL(___rw_read_enter) 52 52 ___rw_read_enter: 53 53 orcc %g2, 0x0, %g0 54 54 bne,a ___rw_read_enter_spin_on_wlock ··· 61 59 mov %g4, %o7 62 60 63 61 .globl ___rw_read_exit 62 + EXPORT_SYMBOL(___rw_read_exit) 64 63 ___rw_read_exit: 65 64 orcc %g2, 0x0, %g0 66 65 bne,a ___rw_read_exit_spin_on_wlock ··· 73 70 mov %g4, %o7 74 71 75 72 .globl ___rw_read_try 73 + EXPORT_SYMBOL(___rw_read_try) 76 74 ___rw_read_try: 77 75 orcc %g2, 0x0, %g0 78 76 bne ___rw_read_try_spin_on_wlock ··· 85 81 mov %g4, %o7 86 82 87 83 .globl ___rw_write_enter 84 + EXPORT_SYMBOL(___rw_write_enter) 88 85 ___rw_write_enter: 89 86 orcc %g2, 0x0, %g0 90 87 bne ___rw_write_enter_spin_on_wlock
+2
arch/sparc/lib/lshrdi3.S
··· 1 1 #include <linux/linkage.h> 2 + #include <asm/export.h> 2 3 3 4 ENTRY(__lshrdi3) 4 5 cmp %o2, 0 ··· 26 25 retl 27 26 nop 28 27 ENDPROC(__lshrdi3) 28 + EXPORT_SYMBOL(__lshrdi3)
+2
arch/sparc/lib/mcount.S
··· 6 6 */ 7 7 8 8 #include <linux/linkage.h> 9 + #include <asm/export.h> 9 10 10 11 /* 11 12 * This is the main variant and is called by C code. GCC's -pg option ··· 17 16 .align 32 18 17 .globl _mcount 19 18 .type _mcount,#function 19 + EXPORT_SYMBOL(_mcount) 20 20 .globl mcount 21 21 .type mcount,#function 22 22 _mcount:
+2
arch/sparc/lib/memcmp.S
··· 6 6 7 7 #include <linux/linkage.h> 8 8 #include <asm/asm.h> 9 + #include <asm/export.h> 9 10 10 11 .text 11 12 ENTRY(memcmp) ··· 26 25 2: retl 27 26 mov 0, %o0 28 27 ENDPROC(memcmp) 28 + EXPORT_SYMBOL(memcmp)
+4
arch/sparc/lib/memcpy.S
··· 7 7 * Copyright (C) 1996 Jakub Jelinek (jj@sunsite.mff.cuni.cz) 8 8 */ 9 9 10 + #include <asm/export.h> 10 11 #define FUNC(x) \ 11 12 .globl x; \ 12 13 .type x,@function; \ ··· 144 143 #ifdef __KERNEL__ 145 144 FUNC(amemmove) 146 145 FUNC(__memmove) 146 + EXPORT_SYMBOL(__memmove) 147 147 #endif 148 148 FUNC(memmove) 149 + EXPORT_SYMBOL(memmove) 149 150 cmp %o0, %o1 150 151 mov %o0, %g7 151 152 bleu 9f ··· 205 202 add %o0, 2, %o0 206 203 207 204 FUNC(memcpy) /* %o0=dst %o1=src %o2=len */ 205 + EXPORT_SYMBOL(memcpy) 208 206 209 207 sub %o0, %o1, %o4 210 208 mov %o0, %g7
+2
arch/sparc/lib/memmove.S
··· 5 5 */ 6 6 7 7 #include <linux/linkage.h> 8 + #include <asm/export.h> 8 9 9 10 .text 10 11 ENTRY(memmove) /* o0=dst o1=src o2=len */ ··· 58 57 stb %g7, [%o0 - 0x1] 59 58 ba,a,pt %xcc, 99b 60 59 ENDPROC(memmove) 60 + EXPORT_SYMBOL(memmove)
+4
arch/sparc/lib/memscan_32.S
··· 4 4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) 5 5 */ 6 6 7 + #include <asm/export.h> 8 + 7 9 /* In essence, this is just a fancy strlen. */ 8 10 9 11 #define LO_MAGIC 0x01010101 ··· 15 13 .align 4 16 14 .globl __memscan_zero, __memscan_generic 17 15 .globl memscan 16 + EXPORT_SYMBOL(__memscan_zero) 17 + EXPORT_SYMBOL(__memscan_generic) 18 18 __memscan_zero: 19 19 /* %o0 = addr, %o1 = size */ 20 20 cmp %o1, 0
+4
arch/sparc/lib/memscan_64.S
··· 5 5 * Copyright (C) 1998 David S. Miller (davem@redhat.com) 6 6 */ 7 7 8 + #include <asm/export.h> 9 + 8 10 #define HI_MAGIC 0x8080808080808080 9 11 #define LO_MAGIC 0x0101010101010101 10 12 #define ASI_PL 0x88 ··· 15 13 .align 32 16 14 .globl __memscan_zero, __memscan_generic 17 15 .globl memscan 16 + EXPORT_SYMBOL(__memscan_zero) 17 + EXPORT_SYMBOL(__memscan_generic) 18 18 19 19 __memscan_zero: 20 20 /* %o0 = bufp, %o1 = size */
+3
arch/sparc/lib/memset.S
··· 9 9 */ 10 10 11 11 #include <asm/ptrace.h> 12 + #include <asm/export.h> 12 13 13 14 /* Work around cpp -rob */ 14 15 #define ALLOC #alloc ··· 64 63 65 64 .globl __bzero 66 65 .globl memset 66 + EXPORT_SYMBOL(__bzero) 67 + EXPORT_SYMBOL(memset) 67 68 .globl __memset_start, __memset_end 68 69 __memset_start: 69 70 memset:
+2
arch/sparc/lib/muldi3.S
··· 17 17 the Free Software Foundation, 59 Temple Place - Suite 330, 18 18 Boston, MA 02111-1307, USA. */ 19 19 20 + #include <asm/export.h> 20 21 .text 21 22 .align 4 22 23 .globl __muldi3 ··· 75 74 add %l2, %l0, %i0 76 75 ret 77 76 restore %g0, %l3, %o1 77 + EXPORT_SYMBOL(__muldi3)
+2
arch/sparc/lib/strlen.S
··· 7 7 8 8 #include <linux/linkage.h> 9 9 #include <asm/asm.h> 10 + #include <asm/export.h> 10 11 11 12 #define LO_MAGIC 0x01010101 12 13 #define HI_MAGIC 0x80808080 ··· 79 78 retl 80 79 mov 2, %o0 81 80 ENDPROC(strlen) 81 + EXPORT_SYMBOL(strlen)
+2
arch/sparc/lib/strncmp_32.S
··· 4 4 */ 5 5 6 6 #include <linux/linkage.h> 7 + #include <asm/export.h> 7 8 8 9 .text 9 10 ENTRY(strncmp) ··· 117 116 retl 118 117 sub %o3, %o0, %o0 119 118 ENDPROC(strncmp) 119 + EXPORT_SYMBOL(strncmp)
+2
arch/sparc/lib/strncmp_64.S
··· 6 6 7 7 #include <linux/linkage.h> 8 8 #include <asm/asi.h> 9 + #include <asm/export.h> 9 10 10 11 .text 11 12 ENTRY(strncmp) ··· 29 28 retl 30 29 clr %o0 31 30 ENDPROC(strncmp) 31 + EXPORT_SYMBOL(strncmp)
+9
arch/sparc/lib/xor.S
··· 13 13 #include <asm/asi.h> 14 14 #include <asm/dcu.h> 15 15 #include <asm/spitfire.h> 16 + #include <asm/export.h> 16 17 17 18 /* 18 19 * Requirements: ··· 91 90 retl 92 91 wr %g0, 0, %fprs 93 92 ENDPROC(xor_vis_2) 93 + EXPORT_SYMBOL(xor_vis_2) 94 94 95 95 ENTRY(xor_vis_3) 96 96 rd %fprs, %o5 ··· 158 156 retl 159 157 wr %g0, 0, %fprs 160 158 ENDPROC(xor_vis_3) 159 + EXPORT_SYMBOL(xor_vis_3) 161 160 162 161 ENTRY(xor_vis_4) 163 162 rd %fprs, %o5 ··· 244 241 retl 245 242 wr %g0, 0, %fprs 246 243 ENDPROC(xor_vis_4) 244 + EXPORT_SYMBOL(xor_vis_4) 247 245 248 246 ENTRY(xor_vis_5) 249 247 save %sp, -192, %sp ··· 351 347 ret 352 348 restore 353 349 ENDPROC(xor_vis_5) 350 + EXPORT_SYMBOL(xor_vis_5) 354 351 355 352 /* Niagara versions. */ 356 353 ENTRY(xor_niagara_2) /* %o0=bytes, %o1=dest, %o2=src */ ··· 398 393 ret 399 394 restore 400 395 ENDPROC(xor_niagara_2) 396 + EXPORT_SYMBOL(xor_niagara_2) 401 397 402 398 ENTRY(xor_niagara_3) /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2 */ 403 399 save %sp, -192, %sp ··· 460 454 ret 461 455 restore 462 456 ENDPROC(xor_niagara_3) 457 + EXPORT_SYMBOL(xor_niagara_3) 463 458 464 459 ENTRY(xor_niagara_4) /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3 */ 465 460 save %sp, -192, %sp ··· 543 536 ret 544 537 restore 545 538 ENDPROC(xor_niagara_4) 539 + EXPORT_SYMBOL(xor_niagara_4) 546 540 547 541 ENTRY(xor_niagara_5) /* %o0=bytes, %o1=dest, %o2=src1, %o3=src2, %o4=src3, %o5=src4 */ 548 542 save %sp, -192, %sp ··· 642 634 ret 643 635 restore 644 636 ENDPROC(xor_niagara_5) 637 + EXPORT_SYMBOL(xor_niagara_5)