crypto: serpent-sse2/avx - allow both to be built into kernel

Rename serpent-avx assembler functions so that they do not collide with
serpent-sse2 assembler functions when linking both versions in to same
kernel image.

Reported-by: Randy Dunlap <rdunlap@xenotime.net>
Cc: Johannes Goetzfried <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
Signed-off-by: Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
diff --git a/arch/x86/crypto/serpent-avx-x86_64-asm_64.S b/arch/x86/crypto/serpent-avx-x86_64-asm_64.S
index 0ed47a1..504106b 100644
--- a/arch/x86/crypto/serpent-avx-x86_64-asm_64.S
+++ b/arch/x86/crypto/serpent-avx-x86_64-asm_64.S
@@ -579,10 +579,10 @@
 	vmovdqu x3,		(3*4*4)(out);
 
 .align 8
-.global __serpent_enc_blk_8way
-.type   __serpent_enc_blk_8way,@function;
+.global __serpent_enc_blk_8way_avx
+.type   __serpent_enc_blk_8way_avx,@function;
 
-__serpent_enc_blk_8way:
+__serpent_enc_blk_8way_avx:
 	/* input:
 	 *	%rdi: ctx, CTX
 	 *	%rsi: dst
@@ -647,10 +647,10 @@
 	ret;
 
 .align 8
-.global serpent_dec_blk_8way
-.type   serpent_dec_blk_8way,@function;
+.global serpent_dec_blk_8way_avx
+.type   serpent_dec_blk_8way_avx,@function;
 
-serpent_dec_blk_8way:
+serpent_dec_blk_8way_avx:
 	/* input:
 	 *	%rdi: ctx, CTX
 	 *	%rsi: dst
diff --git a/arch/x86/crypto/serpent_avx_glue.c b/arch/x86/crypto/serpent_avx_glue.c
index 0dc7a26..dd81bab 100644
--- a/arch/x86/crypto/serpent_avx_glue.c
+++ b/arch/x86/crypto/serpent_avx_glue.c
@@ -39,7 +39,7 @@
 #include <asm/i387.h>
 #include <asm/xcr.h>
 #include <asm/xsave.h>
-#include <asm/serpent.h>
+#include <asm/serpent-avx.h>
 #include <crypto/scatterwalk.h>
 #include <linux/workqueue.h>
 #include <linux/spinlock.h>
diff --git a/arch/x86/crypto/serpent_sse2_glue.c b/arch/x86/crypto/serpent_sse2_glue.c
index 4b21be8..deecd25 100644
--- a/arch/x86/crypto/serpent_sse2_glue.c
+++ b/arch/x86/crypto/serpent_sse2_glue.c
@@ -42,7 +42,7 @@
 #include <crypto/lrw.h>
 #include <crypto/xts.h>
 #include <asm/i387.h>
-#include <asm/serpent.h>
+#include <asm/serpent-sse2.h>
 #include <crypto/scatterwalk.h>
 #include <linux/workqueue.h>
 #include <linux/spinlock.h>
diff --git a/arch/x86/include/asm/serpent-avx.h b/arch/x86/include/asm/serpent-avx.h
new file mode 100644
index 0000000..432deed
--- /dev/null
+++ b/arch/x86/include/asm/serpent-avx.h
@@ -0,0 +1,32 @@
+#ifndef ASM_X86_SERPENT_AVX_H
+#define ASM_X86_SERPENT_AVX_H
+
+#include <linux/crypto.h>
+#include <crypto/serpent.h>
+
+#define SERPENT_PARALLEL_BLOCKS 8
+
+asmlinkage void __serpent_enc_blk_8way_avx(struct serpent_ctx *ctx, u8 *dst,
+					   const u8 *src, bool xor);
+asmlinkage void serpent_dec_blk_8way_avx(struct serpent_ctx *ctx, u8 *dst,
+					 const u8 *src);
+
+static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst,
+				   const u8 *src)
+{
+	__serpent_enc_blk_8way_avx(ctx, dst, src, false);
+}
+
+static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst,
+				       const u8 *src)
+{
+	__serpent_enc_blk_8way_avx(ctx, dst, src, true);
+}
+
+static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst,
+				   const u8 *src)
+{
+	serpent_dec_blk_8way_avx(ctx, dst, src);
+}
+
+#endif
diff --git a/arch/x86/include/asm/serpent.h b/arch/x86/include/asm/serpent-sse2.h
similarity index 95%
rename from arch/x86/include/asm/serpent.h
rename to arch/x86/include/asm/serpent-sse2.h
index d3ef63f..e6e77df 100644
--- a/arch/x86/include/asm/serpent.h
+++ b/arch/x86/include/asm/serpent-sse2.h
@@ -1,5 +1,5 @@
-#ifndef ASM_X86_SERPENT_H
-#define ASM_X86_SERPENT_H
+#ifndef ASM_X86_SERPENT_SSE2_H
+#define ASM_X86_SERPENT_SSE2_H
 
 #include <linux/crypto.h>
 #include <crypto/serpent.h>