crypto: aesni-intel - Fixed build error on x86-32

Exclude AES-GCM code for x86-32 due to heavy usage of 64-bit registers
not available on x86-32.

While at it, fixed unregister order in aesni_exit().

Signed-off-by: Mathias Krause <minipli@googlemail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
diff --git a/arch/x86/crypto/aesni-intel_asm.S b/arch/x86/crypto/aesni-intel_asm.S
index f592e03..d528fde 100644
--- a/arch/x86/crypto/aesni-intel_asm.S
+++ b/arch/x86/crypto/aesni-intel_asm.S
@@ -32,6 +32,7 @@
 #include <linux/linkage.h>
 #include <asm/inst.h>
 
+#ifdef __x86_64__
 .data
 POLY:   .octa 0xC2000000000000000000000000000001
 TWOONE: .octa 0x00000001000000000000000000000001
@@ -84,6 +85,7 @@
 #define arg8 STACK_OFFSET+16(%r14)
 #define arg9 STACK_OFFSET+24(%r14)
 #define arg10 STACK_OFFSET+32(%r14)
+#endif
 
 
 #define STATE1	%xmm0
@@ -130,6 +132,7 @@
 #endif
 
 
+#ifdef __x86_64__
 /* GHASH_MUL MACRO to implement: Data*HashKey mod (128,127,126,121,0)
 *
 *
@@ -1255,7 +1258,7 @@
 	pop	%r13
 	pop	%r12
 	ret
-
+#endif
 
 
 _key_expansion_128:
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 8a3b800..e1e60c7 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -97,7 +97,6 @@
 #ifdef CONFIG_X86_64
 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
 			      const u8 *in, unsigned int len, u8 *iv);
-#endif
 
 /* asmlinkage void aesni_gcm_enc()
  * void *ctx,  AES Key schedule. Starts on a 16 byte boundary.
@@ -149,6 +148,7 @@
 		PTR_ALIGN((u8 *)
 		crypto_tfm_ctx(crypto_aead_tfm(tfm)), AESNI_ALIGN);
 }
+#endif
 
 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
 {
@@ -822,6 +822,7 @@
 };
 #endif
 
+#ifdef CONFIG_X86_64
 static int rfc4106_init(struct crypto_tfm *tfm)
 {
 	struct cryptd_aead *cryptd_tfm;
@@ -1237,6 +1238,7 @@
 		},
 	},
 };
+#endif
 
 static int __init aesni_init(void)
 {
@@ -1264,6 +1266,10 @@
 		goto blk_ctr_err;
 	if ((err = crypto_register_alg(&ablk_ctr_alg)))
 		goto ablk_ctr_err;
+	if ((err = crypto_register_alg(&__rfc4106_alg)))
+		goto __aead_gcm_err;
+	if ((err = crypto_register_alg(&rfc4106_alg)))
+		goto aead_gcm_err;
 #ifdef HAS_CTR
 	if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))
 		goto ablk_rfc3686_ctr_err;
@@ -1281,19 +1287,9 @@
 	if ((err = crypto_register_alg(&ablk_xts_alg)))
 		goto ablk_xts_err;
 #endif
-	err = crypto_register_alg(&__rfc4106_alg);
-	if (err)
-		goto __aead_gcm_err;
-	err = crypto_register_alg(&rfc4106_alg);
-	if (err)
-		goto aead_gcm_err;
 	return err;
 
-aead_gcm_err:
-	crypto_unregister_alg(&__rfc4106_alg);
-__aead_gcm_err:
 #ifdef HAS_XTS
-	crypto_unregister_alg(&ablk_xts_alg);
 ablk_xts_err:
 #endif
 #ifdef HAS_PCBC
@@ -1309,6 +1305,10 @@
 	crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
 ablk_rfc3686_ctr_err:
 #endif
+	crypto_unregister_alg(&rfc4106_alg);
+aead_gcm_err:
+	crypto_unregister_alg(&__rfc4106_alg);
+__aead_gcm_err:
 	crypto_unregister_alg(&ablk_ctr_alg);
 ablk_ctr_err:
 	crypto_unregister_alg(&blk_ctr_alg);
@@ -1331,8 +1331,6 @@
 
 static void __exit aesni_exit(void)
 {
-	crypto_unregister_alg(&__rfc4106_alg);
-	crypto_unregister_alg(&rfc4106_alg);
 #ifdef HAS_XTS
 	crypto_unregister_alg(&ablk_xts_alg);
 #endif
@@ -1346,6 +1344,8 @@
 #ifdef HAS_CTR
 	crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
 #endif
+	crypto_unregister_alg(&rfc4106_alg);
+	crypto_unregister_alg(&__rfc4106_alg);
 	crypto_unregister_alg(&ablk_ctr_alg);
 	crypto_unregister_alg(&blk_ctr_alg);
 #endif