blob: 0f8519cf4ac29455afcdcc7fc1edeee1fb28c6ef [file] [log] [blame]
Johannes Goetzfried7efe4072012-06-12 16:47:43 +08001/*
2 * Glue Code for AVX assembler versions of Serpent Cipher
3 *
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6 *
Jussi Kivilinnaa05248e2013-04-08 21:50:55 +03007 * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
Johannes Goetzfried7efe4072012-06-12 16:47:43 +08008 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
22 * USA
23 *
24 */
25
26#include <linux/module.h>
27#include <linux/hardirq.h>
28#include <linux/types.h>
29#include <linux/crypto.h>
30#include <linux/err.h>
31#include <crypto/algapi.h>
32#include <crypto/serpent.h>
33#include <crypto/cryptd.h>
34#include <crypto/b128ops.h>
35#include <crypto/ctr.h>
36#include <crypto/lrw.h>
37#include <crypto/xts.h>
Johannes Goetzfried7efe4072012-06-12 16:47:43 +080038#include <asm/xcr.h>
39#include <asm/xsave.h>
Jussi Kivilinnad4af0e92012-06-18 14:07:45 +030040#include <asm/crypto/serpent-avx.h>
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +030041#include <asm/crypto/ablk_helper.h>
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +030042#include <asm/crypto/glue_helper.h>
Johannes Goetzfried7efe4072012-06-12 16:47:43 +080043
Jussi Kivilinna58990982012-10-20 15:06:36 +030044static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
Johannes Goetzfried7efe4072012-06-12 16:47:43 +080045{
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +030046 be128 ctrblk;
47
Jussi Kivilinna58990982012-10-20 15:06:36 +030048 le128_to_be128(&ctrblk, iv);
49 le128_inc(iv);
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +030050
51 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
52 u128_xor(dst, src, (u128 *)&ctrblk);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +080053}
54
Jussi Kivilinnaa05248e2013-04-08 21:50:55 +030055static void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
56{
57 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
58 GLUE_FUNC_CAST(__serpent_encrypt));
59}
60
61static void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
62{
63 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
64 GLUE_FUNC_CAST(__serpent_decrypt));
65}
66
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +030067static const struct common_glue_ctx serpent_enc = {
68 .num_funcs = 2,
69 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
70
71 .funcs = { {
72 .num_blocks = SERPENT_PARALLEL_BLOCKS,
Jussi Kivilinnafacd4162012-10-20 15:06:51 +030073 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +030074 }, {
75 .num_blocks = 1,
76 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
77 } }
78};
79
80static const struct common_glue_ctx serpent_ctr = {
81 .num_funcs = 2,
82 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
83
84 .funcs = { {
85 .num_blocks = SERPENT_PARALLEL_BLOCKS,
Jussi Kivilinnafacd4162012-10-20 15:06:51 +030086 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +030087 }, {
88 .num_blocks = 1,
89 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr) }
90 } }
91};
92
Jussi Kivilinnaa05248e2013-04-08 21:50:55 +030093static const struct common_glue_ctx serpent_enc_xts = {
94 .num_funcs = 2,
95 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
96
97 .funcs = { {
98 .num_blocks = SERPENT_PARALLEL_BLOCKS,
99 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
100 }, {
101 .num_blocks = 1,
102 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
103 } }
104};
105
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300106static const struct common_glue_ctx serpent_dec = {
107 .num_funcs = 2,
108 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
109
110 .funcs = { {
111 .num_blocks = SERPENT_PARALLEL_BLOCKS,
Jussi Kivilinnafacd4162012-10-20 15:06:51 +0300112 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300113 }, {
114 .num_blocks = 1,
115 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
116 } }
117};
118
119static const struct common_glue_ctx serpent_dec_cbc = {
120 .num_funcs = 2,
121 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
122
123 .funcs = { {
124 .num_blocks = SERPENT_PARALLEL_BLOCKS,
Jussi Kivilinnafacd4162012-10-20 15:06:51 +0300125 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300126 }, {
127 .num_blocks = 1,
128 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
129 } }
130};
131
Jussi Kivilinnaa05248e2013-04-08 21:50:55 +0300132static const struct common_glue_ctx serpent_dec_xts = {
133 .num_funcs = 2,
134 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
135
136 .funcs = { {
137 .num_blocks = SERPENT_PARALLEL_BLOCKS,
138 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
139 }, {
140 .num_blocks = 1,
141 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
142 } }
143};
144
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800145static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
146 struct scatterlist *src, unsigned int nbytes)
147{
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300148 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800149}
150
151static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
152 struct scatterlist *src, unsigned int nbytes)
153{
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300154 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800155}
156
157static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
158 struct scatterlist *src, unsigned int nbytes)
159{
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300160 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
161 dst, src, nbytes);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800162}
163
164static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
165 struct scatterlist *src, unsigned int nbytes)
166{
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300167 return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
168 nbytes);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800169}
170
171static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
172 struct scatterlist *src, unsigned int nbytes)
173{
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300174 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
175}
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800176
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300177static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
178{
179 return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
180 NULL, fpu_enabled, nbytes);
181}
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800182
Jussi Kivilinna1d0debb2012-06-18 14:07:24 +0300183static inline void serpent_fpu_end(bool fpu_enabled)
184{
185 glue_fpu_end(fpu_enabled);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800186}
187
188struct crypt_priv {
189 struct serpent_ctx *ctx;
190 bool fpu_enabled;
191};
192
193static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
194{
195 const unsigned int bsize = SERPENT_BLOCK_SIZE;
196 struct crypt_priv *ctx = priv;
197 int i;
198
199 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
200
201 if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
Jussi Kivilinnafacd4162012-10-20 15:06:51 +0300202 serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800203 return;
204 }
205
206 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
207 __serpent_encrypt(ctx->ctx, srcdst, srcdst);
208}
209
210static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
211{
212 const unsigned int bsize = SERPENT_BLOCK_SIZE;
213 struct crypt_priv *ctx = priv;
214 int i;
215
216 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
217
218 if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
Jussi Kivilinnafacd4162012-10-20 15:06:51 +0300219 serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800220 return;
221 }
222
223 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
224 __serpent_decrypt(ctx->ctx, srcdst, srcdst);
225}
226
227struct serpent_lrw_ctx {
228 struct lrw_table_ctx lrw_table;
229 struct serpent_ctx serpent_ctx;
230};
231
232static int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
233 unsigned int keylen)
234{
235 struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
236 int err;
237
238 err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
239 SERPENT_BLOCK_SIZE);
240 if (err)
241 return err;
242
243 return lrw_init_table(&ctx->lrw_table, key + keylen -
244 SERPENT_BLOCK_SIZE);
245}
246
247static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
248 struct scatterlist *src, unsigned int nbytes)
249{
250 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
251 be128 buf[SERPENT_PARALLEL_BLOCKS];
252 struct crypt_priv crypt_ctx = {
253 .ctx = &ctx->serpent_ctx,
254 .fpu_enabled = false,
255 };
256 struct lrw_crypt_req req = {
257 .tbuf = buf,
258 .tbuflen = sizeof(buf),
259
260 .table_ctx = &ctx->lrw_table,
261 .crypt_ctx = &crypt_ctx,
262 .crypt_fn = encrypt_callback,
263 };
264 int ret;
265
266 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
267 ret = lrw_crypt(desc, dst, src, nbytes, &req);
268 serpent_fpu_end(crypt_ctx.fpu_enabled);
269
270 return ret;
271}
272
273static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
274 struct scatterlist *src, unsigned int nbytes)
275{
276 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
277 be128 buf[SERPENT_PARALLEL_BLOCKS];
278 struct crypt_priv crypt_ctx = {
279 .ctx = &ctx->serpent_ctx,
280 .fpu_enabled = false,
281 };
282 struct lrw_crypt_req req = {
283 .tbuf = buf,
284 .tbuflen = sizeof(buf),
285
286 .table_ctx = &ctx->lrw_table,
287 .crypt_ctx = &crypt_ctx,
288 .crypt_fn = decrypt_callback,
289 };
290 int ret;
291
292 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
293 ret = lrw_crypt(desc, dst, src, nbytes, &req);
294 serpent_fpu_end(crypt_ctx.fpu_enabled);
295
296 return ret;
297}
298
299static void lrw_exit_tfm(struct crypto_tfm *tfm)
300{
301 struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
302
303 lrw_free_table(&ctx->lrw_table);
304}
305
306struct serpent_xts_ctx {
307 struct serpent_ctx tweak_ctx;
308 struct serpent_ctx crypt_ctx;
309};
310
311static int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
312 unsigned int keylen)
313{
314 struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
315 u32 *flags = &tfm->crt_flags;
316 int err;
317
318 /* key consists of keys of equal size concatenated, therefore
319 * the length must be even
320 */
321 if (keylen % 2) {
322 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
323 return -EINVAL;
324 }
325
326 /* first half of xts-key is for crypt */
327 err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
328 if (err)
329 return err;
330
331 /* second half of xts-key is for tweak */
332 return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
333}
334
335static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
336 struct scatterlist *src, unsigned int nbytes)
337{
338 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800339
Jussi Kivilinnaa05248e2013-04-08 21:50:55 +0300340 return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes,
341 XTS_TWEAK_CAST(__serpent_encrypt),
342 &ctx->tweak_ctx, &ctx->crypt_ctx);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800343}
344
345static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
346 struct scatterlist *src, unsigned int nbytes)
347{
348 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800349
Jussi Kivilinnaa05248e2013-04-08 21:50:55 +0300350 return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes,
351 XTS_TWEAK_CAST(__serpent_encrypt),
352 &ctx->tweak_ctx, &ctx->crypt_ctx);
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800353}
354
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800355static struct crypto_alg serpent_algs[10] = { {
356 .cra_name = "__ecb-serpent-avx",
357 .cra_driver_name = "__driver-ecb-serpent-avx",
358 .cra_priority = 0,
359 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
360 .cra_blocksize = SERPENT_BLOCK_SIZE,
361 .cra_ctxsize = sizeof(struct serpent_ctx),
362 .cra_alignmask = 0,
363 .cra_type = &crypto_blkcipher_type,
364 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800365 .cra_u = {
366 .blkcipher = {
367 .min_keysize = SERPENT_MIN_KEY_SIZE,
368 .max_keysize = SERPENT_MAX_KEY_SIZE,
369 .setkey = serpent_setkey,
370 .encrypt = ecb_encrypt,
371 .decrypt = ecb_decrypt,
372 },
373 },
374}, {
375 .cra_name = "__cbc-serpent-avx",
376 .cra_driver_name = "__driver-cbc-serpent-avx",
377 .cra_priority = 0,
378 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
379 .cra_blocksize = SERPENT_BLOCK_SIZE,
380 .cra_ctxsize = sizeof(struct serpent_ctx),
381 .cra_alignmask = 0,
382 .cra_type = &crypto_blkcipher_type,
383 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800384 .cra_u = {
385 .blkcipher = {
386 .min_keysize = SERPENT_MIN_KEY_SIZE,
387 .max_keysize = SERPENT_MAX_KEY_SIZE,
388 .setkey = serpent_setkey,
389 .encrypt = cbc_encrypt,
390 .decrypt = cbc_decrypt,
391 },
392 },
393}, {
394 .cra_name = "__ctr-serpent-avx",
395 .cra_driver_name = "__driver-ctr-serpent-avx",
396 .cra_priority = 0,
397 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
398 .cra_blocksize = 1,
399 .cra_ctxsize = sizeof(struct serpent_ctx),
400 .cra_alignmask = 0,
401 .cra_type = &crypto_blkcipher_type,
402 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800403 .cra_u = {
404 .blkcipher = {
405 .min_keysize = SERPENT_MIN_KEY_SIZE,
406 .max_keysize = SERPENT_MAX_KEY_SIZE,
407 .ivsize = SERPENT_BLOCK_SIZE,
408 .setkey = serpent_setkey,
409 .encrypt = ctr_crypt,
410 .decrypt = ctr_crypt,
411 },
412 },
413}, {
414 .cra_name = "__lrw-serpent-avx",
415 .cra_driver_name = "__driver-lrw-serpent-avx",
416 .cra_priority = 0,
417 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
418 .cra_blocksize = SERPENT_BLOCK_SIZE,
419 .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
420 .cra_alignmask = 0,
421 .cra_type = &crypto_blkcipher_type,
422 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800423 .cra_exit = lrw_exit_tfm,
424 .cra_u = {
425 .blkcipher = {
426 .min_keysize = SERPENT_MIN_KEY_SIZE +
427 SERPENT_BLOCK_SIZE,
428 .max_keysize = SERPENT_MAX_KEY_SIZE +
429 SERPENT_BLOCK_SIZE,
430 .ivsize = SERPENT_BLOCK_SIZE,
431 .setkey = lrw_serpent_setkey,
432 .encrypt = lrw_encrypt,
433 .decrypt = lrw_decrypt,
434 },
435 },
436}, {
437 .cra_name = "__xts-serpent-avx",
438 .cra_driver_name = "__driver-xts-serpent-avx",
439 .cra_priority = 0,
440 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
441 .cra_blocksize = SERPENT_BLOCK_SIZE,
442 .cra_ctxsize = sizeof(struct serpent_xts_ctx),
443 .cra_alignmask = 0,
444 .cra_type = &crypto_blkcipher_type,
445 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800446 .cra_u = {
447 .blkcipher = {
448 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
449 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
450 .ivsize = SERPENT_BLOCK_SIZE,
451 .setkey = xts_serpent_setkey,
452 .encrypt = xts_encrypt,
453 .decrypt = xts_decrypt,
454 },
455 },
456}, {
457 .cra_name = "ecb(serpent)",
458 .cra_driver_name = "ecb-serpent-avx",
459 .cra_priority = 500,
460 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
461 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300462 .cra_ctxsize = sizeof(struct async_helper_ctx),
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800463 .cra_alignmask = 0,
464 .cra_type = &crypto_ablkcipher_type,
465 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800466 .cra_init = ablk_init,
467 .cra_exit = ablk_exit,
468 .cra_u = {
469 .ablkcipher = {
470 .min_keysize = SERPENT_MIN_KEY_SIZE,
471 .max_keysize = SERPENT_MAX_KEY_SIZE,
472 .setkey = ablk_set_key,
473 .encrypt = ablk_encrypt,
474 .decrypt = ablk_decrypt,
475 },
476 },
477}, {
478 .cra_name = "cbc(serpent)",
479 .cra_driver_name = "cbc-serpent-avx",
480 .cra_priority = 500,
481 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
482 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300483 .cra_ctxsize = sizeof(struct async_helper_ctx),
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800484 .cra_alignmask = 0,
485 .cra_type = &crypto_ablkcipher_type,
486 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800487 .cra_init = ablk_init,
488 .cra_exit = ablk_exit,
489 .cra_u = {
490 .ablkcipher = {
491 .min_keysize = SERPENT_MIN_KEY_SIZE,
492 .max_keysize = SERPENT_MAX_KEY_SIZE,
493 .ivsize = SERPENT_BLOCK_SIZE,
494 .setkey = ablk_set_key,
495 .encrypt = __ablk_encrypt,
496 .decrypt = ablk_decrypt,
497 },
498 },
499}, {
500 .cra_name = "ctr(serpent)",
501 .cra_driver_name = "ctr-serpent-avx",
502 .cra_priority = 500,
503 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
504 .cra_blocksize = 1,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300505 .cra_ctxsize = sizeof(struct async_helper_ctx),
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800506 .cra_alignmask = 0,
507 .cra_type = &crypto_ablkcipher_type,
508 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800509 .cra_init = ablk_init,
510 .cra_exit = ablk_exit,
511 .cra_u = {
512 .ablkcipher = {
513 .min_keysize = SERPENT_MIN_KEY_SIZE,
514 .max_keysize = SERPENT_MAX_KEY_SIZE,
515 .ivsize = SERPENT_BLOCK_SIZE,
516 .setkey = ablk_set_key,
517 .encrypt = ablk_encrypt,
518 .decrypt = ablk_encrypt,
519 .geniv = "chainiv",
520 },
521 },
522}, {
523 .cra_name = "lrw(serpent)",
524 .cra_driver_name = "lrw-serpent-avx",
525 .cra_priority = 500,
526 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
527 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300528 .cra_ctxsize = sizeof(struct async_helper_ctx),
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800529 .cra_alignmask = 0,
530 .cra_type = &crypto_ablkcipher_type,
531 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800532 .cra_init = ablk_init,
533 .cra_exit = ablk_exit,
534 .cra_u = {
535 .ablkcipher = {
536 .min_keysize = SERPENT_MIN_KEY_SIZE +
537 SERPENT_BLOCK_SIZE,
538 .max_keysize = SERPENT_MAX_KEY_SIZE +
539 SERPENT_BLOCK_SIZE,
540 .ivsize = SERPENT_BLOCK_SIZE,
541 .setkey = ablk_set_key,
542 .encrypt = ablk_encrypt,
543 .decrypt = ablk_decrypt,
544 },
545 },
546}, {
547 .cra_name = "xts(serpent)",
548 .cra_driver_name = "xts-serpent-avx",
549 .cra_priority = 500,
550 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
551 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300552 .cra_ctxsize = sizeof(struct async_helper_ctx),
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800553 .cra_alignmask = 0,
554 .cra_type = &crypto_ablkcipher_type,
555 .cra_module = THIS_MODULE,
Johannes Goetzfried7efe4072012-06-12 16:47:43 +0800556 .cra_init = ablk_init,
557 .cra_exit = ablk_exit,
558 .cra_u = {
559 .ablkcipher = {
560 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
561 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
562 .ivsize = SERPENT_BLOCK_SIZE,
563 .setkey = ablk_set_key,
564 .encrypt = ablk_encrypt,
565 .decrypt = ablk_decrypt,
566 },
567 },
568} };
569
570static int __init serpent_init(void)
571{
572 u64 xcr0;
573
574 if (!cpu_has_avx || !cpu_has_osxsave) {
575 printk(KERN_INFO "AVX instructions are not detected.\n");
576 return -ENODEV;
577 }
578
579 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
580 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
581 printk(KERN_INFO "AVX detected but unusable.\n");
582 return -ENODEV;
583 }
584
585 return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
586}
587
588static void __exit serpent_exit(void)
589{
590 crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
591}
592
593module_init(serpent_init);
594module_exit(serpent_exit);
595
596MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
597MODULE_LICENSE("GPL");
598MODULE_ALIAS("serpent");