ppc: move exports to definitions

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
diff --git a/arch/powerpc/lib/Makefile b/arch/powerpc/lib/Makefile
index ba21be1..555ff1a 100644
--- a/arch/powerpc/lib/Makefile
+++ b/arch/powerpc/lib/Makefile
@@ -9,7 +9,7 @@
 CFLAGS_REMOVE_code-patching.o = $(CC_FLAGS_FTRACE)
 CFLAGS_REMOVE_feature-fixups.o = $(CC_FLAGS_FTRACE)
 
-obj-y += string.o alloc.o crtsavres.o ppc_ksyms.o code-patching.o \
+obj-y += string.o alloc.o crtsavres.o code-patching.o \
 	 feature-fixups.o
 
 obj-$(CONFIG_PPC32)	+= div64.o copy_32.o
diff --git a/arch/powerpc/lib/checksum_32.S b/arch/powerpc/lib/checksum_32.S
index d90870a..da678bd 100644
--- a/arch/powerpc/lib/checksum_32.S
+++ b/arch/powerpc/lib/checksum_32.S
@@ -17,6 +17,7 @@
 #include <asm/cache.h>
 #include <asm/errno.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 	.text
 
@@ -68,6 +69,7 @@
 	adde	r5,r5,r0
 5:	addze	r3,r5		/* add in final carry */
 	blr
+EXPORT_SYMBOL(__csum_partial)
 
 /*
  * Computes the checksum of a memory block at src, length len,
@@ -295,3 +297,4 @@
 	.long	41b,dst_error
 	.long	50b,src_error
 	.long	51b,dst_error
+EXPORT_SYMBOL(csum_partial_copy_generic)
diff --git a/arch/powerpc/lib/checksum_64.S b/arch/powerpc/lib/checksum_64.S
index fdec6e6..fd91766 100644
--- a/arch/powerpc/lib/checksum_64.S
+++ b/arch/powerpc/lib/checksum_64.S
@@ -16,6 +16,7 @@
 #include <asm/processor.h>
 #include <asm/errno.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 /*
  * Computes the checksum of a memory block at buff, length len,
@@ -176,6 +177,7 @@
 	add	r3,r4,r0
 	srdi	r3,r3,32
 	blr
+EXPORT_SYMBOL(__csum_partial)
 
 
 	.macro srcnr
@@ -430,3 +432,4 @@
 	li	r6,-EFAULT
 	stw	r6,0(r8)
 	blr
+EXPORT_SYMBOL(csum_partial_copy_generic)
diff --git a/arch/powerpc/lib/copy_32.S b/arch/powerpc/lib/copy_32.S
index 99f37f2..40cce33 100644
--- a/arch/powerpc/lib/copy_32.S
+++ b/arch/powerpc/lib/copy_32.S
@@ -12,6 +12,7 @@
 #include <asm/cache.h>
 #include <asm/errno.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 #define COPY_16_BYTES		\
 	lwz	r7,4(r4);	\
@@ -92,6 +93,7 @@
 	subf	r6,r0,r6
 	cmplwi	0,r4,0
 	bne	2f	/* Use normal procedure if r4 is not zero */
+EXPORT_SYMBOL(memset)
 _GLOBAL(memset_nocache_branch)
 	b	2f	/* Skip optimised bloc until cache is enabled */
 
@@ -216,6 +218,8 @@
 	stbu	r0,1(r6)
 	bdnz	40b
 65:	blr
+EXPORT_SYMBOL(memcpy)
+EXPORT_SYMBOL(memmove)
 
 generic_memcpy:
 	srwi.	r7,r5,3
@@ -507,3 +511,4 @@
 	.long	112b,120b
 	.long	114b,120b
 	.text
+EXPORT_SYMBOL(__copy_tofrom_user)
diff --git a/arch/powerpc/lib/copypage_64.S b/arch/powerpc/lib/copypage_64.S
index a3c4dc4..21367b3 100644
--- a/arch/powerpc/lib/copypage_64.S
+++ b/arch/powerpc/lib/copypage_64.S
@@ -10,6 +10,7 @@
 #include <asm/processor.h>
 #include <asm/ppc_asm.h>
 #include <asm/asm-offsets.h>
+#include <asm/export.h>
 
         .section        ".toc","aw"
 PPC64_CACHES:
@@ -110,3 +111,4 @@
 	std	r11,120(r3)
 	std	r12,128(r3)
 	blr
+EXPORT_SYMBOL(copy_page)
diff --git a/arch/powerpc/lib/copyuser_64.S b/arch/powerpc/lib/copyuser_64.S
index f09899e..f19a15b 100644
--- a/arch/powerpc/lib/copyuser_64.S
+++ b/arch/powerpc/lib/copyuser_64.S
@@ -8,6 +8,7 @@
  */
 #include <asm/processor.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 #ifdef __BIG_ENDIAN__
 #define sLd sld		/* Shift towards low-numbered address. */
@@ -671,3 +672,4 @@
 	.llong	89b,100b
 	.llong	90b,100b
 	.llong	91b,100b
+EXPORT_SYMBOL(__copy_tofrom_user)
diff --git a/arch/powerpc/lib/hweight_64.S b/arch/powerpc/lib/hweight_64.S
index 19e6600..3de7ac1 100644
--- a/arch/powerpc/lib/hweight_64.S
+++ b/arch/powerpc/lib/hweight_64.S
@@ -19,6 +19,7 @@
  */
 #include <asm/processor.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 /* Note: This code relies on -mminimal-toc */
 
@@ -32,6 +33,7 @@
 	clrldi	r3,r3,64-8
 	blr
 ALT_FTR_SECTION_END_IFCLR(CPU_FTR_POPCNTB)
+EXPORT_SYMBOL(__arch_hweight8)
 
 _GLOBAL(__arch_hweight16)
 BEGIN_FTR_SECTION
@@ -54,6 +56,7 @@
 	blr
   ALT_FTR_SECTION_END_NESTED_IFCLR(CPU_FTR_POPCNTD, 50)
 ALT_FTR_SECTION_END_IFCLR(CPU_FTR_POPCNTB)
+EXPORT_SYMBOL(__arch_hweight16)
 
 _GLOBAL(__arch_hweight32)
 BEGIN_FTR_SECTION
@@ -79,6 +82,7 @@
 	blr
   ALT_FTR_SECTION_END_NESTED_IFCLR(CPU_FTR_POPCNTD, 51)
 ALT_FTR_SECTION_END_IFCLR(CPU_FTR_POPCNTB)
+EXPORT_SYMBOL(__arch_hweight32)
 
 _GLOBAL(__arch_hweight64)
 BEGIN_FTR_SECTION
@@ -108,3 +112,4 @@
 	blr
   ALT_FTR_SECTION_END_NESTED_IFCLR(CPU_FTR_POPCNTD, 52)
 ALT_FTR_SECTION_END_IFCLR(CPU_FTR_POPCNTB)
+EXPORT_SYMBOL(__arch_hweight64)
diff --git a/arch/powerpc/lib/mem_64.S b/arch/powerpc/lib/mem_64.S
index 43435c6..2d128df 100644
--- a/arch/powerpc/lib/mem_64.S
+++ b/arch/powerpc/lib/mem_64.S
@@ -11,6 +11,7 @@
 #include <asm/processor.h>
 #include <asm/errno.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 _GLOBAL(memset)
 	neg	r0,r3
@@ -76,6 +77,7 @@
 10:	bflr	31
 	stb	r4,0(r6)
 	blr
+EXPORT_SYMBOL(memset)
 
 _GLOBAL_TOC(memmove)
 	cmplw	0,r3,r4
@@ -117,3 +119,4 @@
 	beq	2b
 	mtctr	r7
 	b	1b
+EXPORT_SYMBOL(memmove)
diff --git a/arch/powerpc/lib/memcmp_64.S b/arch/powerpc/lib/memcmp_64.S
index 8953d23..d75d18b 100644
--- a/arch/powerpc/lib/memcmp_64.S
+++ b/arch/powerpc/lib/memcmp_64.S
@@ -8,6 +8,7 @@
  * 2 of the License, or (at your option) any later version.
  */
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 #define off8	r6
 #define off16	r7
@@ -231,3 +232,4 @@
 	ld	r28,-32(r1)
 	ld	r27,-40(r1)
 	blr
+EXPORT_SYMBOL(memcmp)
diff --git a/arch/powerpc/lib/memcpy_64.S b/arch/powerpc/lib/memcpy_64.S
index 32a06ec..f4d6088 100644
--- a/arch/powerpc/lib/memcpy_64.S
+++ b/arch/powerpc/lib/memcpy_64.S
@@ -8,6 +8,7 @@
  */
 #include <asm/processor.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 	.align	7
 _GLOBAL_TOC(memcpy)
@@ -219,3 +220,4 @@
 4:	ld	r3,-STACKFRAMESIZE+STK_REG(R31)(r1)	/* return dest pointer */
 	blr
 #endif
+EXPORT_SYMBOL(memcpy)
diff --git a/arch/powerpc/lib/ppc_ksyms.c b/arch/powerpc/lib/ppc_ksyms.c
deleted file mode 100644
index ae69d84..0000000
--- a/arch/powerpc/lib/ppc_ksyms.c
+++ /dev/null
@@ -1,29 +0,0 @@
-#include <linux/string.h>
-#include <linux/uaccess.h>
-#include <linux/bitops.h>
-#include <net/checksum.h>
-
-EXPORT_SYMBOL(memcpy);
-EXPORT_SYMBOL(memset);
-EXPORT_SYMBOL(memmove);
-EXPORT_SYMBOL(memcmp);
-EXPORT_SYMBOL(memchr);
-
-EXPORT_SYMBOL(strncpy);
-EXPORT_SYMBOL(strncmp);
-
-#ifndef CONFIG_GENERIC_CSUM
-EXPORT_SYMBOL(__csum_partial);
-EXPORT_SYMBOL(csum_partial_copy_generic);
-#endif
-
-EXPORT_SYMBOL(__copy_tofrom_user);
-EXPORT_SYMBOL(__clear_user);
-EXPORT_SYMBOL(copy_page);
-
-#ifdef CONFIG_PPC64
-EXPORT_SYMBOL(__arch_hweight8);
-EXPORT_SYMBOL(__arch_hweight16);
-EXPORT_SYMBOL(__arch_hweight32);
-EXPORT_SYMBOL(__arch_hweight64);
-#endif
diff --git a/arch/powerpc/lib/string.S b/arch/powerpc/lib/string.S
index beabc68..d13e076 100644
--- a/arch/powerpc/lib/string.S
+++ b/arch/powerpc/lib/string.S
@@ -11,6 +11,7 @@
 #include <asm/processor.h>
 #include <asm/errno.h>
 #include <asm/ppc_asm.h>
+#include <asm/export.h>
 
 	.section __ex_table,"a"
 	PPC_LONG_ALIGN
@@ -36,6 +37,7 @@
 2:	stbu	r0,1(r6)	/* clear it out if so */
 	bdnz	2b
 	blr
+EXPORT_SYMBOL(strncpy)
 
 _GLOBAL(strncmp)
 	PPC_LCMPI 0,r5,0
@@ -53,6 +55,7 @@
 	blr
 2:	li	r3,0
 	blr
+EXPORT_SYMBOL(strncmp)
 
 #ifdef CONFIG_PPC32
 _GLOBAL(memcmp)
@@ -68,6 +71,7 @@
 	blr
 2:	li	r3,0
 	blr
+EXPORT_SYMBOL(memcmp)
 #endif
 
 _GLOBAL(memchr)
@@ -82,6 +86,7 @@
 	beqlr
 2:	li	r3,0
 	blr
+EXPORT_SYMBOL(memchr)
 
 #ifdef CONFIG_PPC32
 _GLOBAL(__clear_user)
@@ -125,4 +130,5 @@
 	PPC_LONG	1b,91b
 	PPC_LONG	8b,92b
 	.text
+EXPORT_SYMBOL(__clear_user)
 #endif
diff --git a/arch/powerpc/lib/string_64.S b/arch/powerpc/lib/string_64.S
index 7bd9549..57ace35 100644
--- a/arch/powerpc/lib/string_64.S
+++ b/arch/powerpc/lib/string_64.S
@@ -20,6 +20,7 @@
 
 #include <asm/ppc_asm.h>
 #include <asm/asm-offsets.h>
+#include <asm/export.h>
 
 	.section	".toc","aw"
 PPC64_CACHES:
@@ -200,3 +201,4 @@
 	cmpdi	r4,32
 	blt	.Lshort_clear
 	b	.Lmedium_clear
+EXPORT_SYMBOL(__clear_user)