blob: 579dce07146389f9a38d036fe48b00f347d7f070 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Herbert Xu1ce5a042015-04-22 15:06:30 +080025#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080026#include <crypto/hash.h>
Herbert Xucba83562006-08-13 08:26:09 +100027#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080028#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090030#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100032#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070033#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070035#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070036#include <linux/timex.h>
37#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070038#include "tcrypt.h"
39
40/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080041 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070042 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080043#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070044
45/*
Herbert Xuda7f0332008-07-31 17:08:25 +080046* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070047*/
48#define ENCRYPT 1
49#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070050
Horia Geant?f074f7b2015-08-27 18:38:36 +030051#define MAX_DIGEST_SIZE 64
52
Harald Welteebfd9bc2005-06-22 13:27:23 -070053/*
Luca Clementi263a8df2014-06-25 22:57:42 -070054 * return a string with the driver name
55 */
56#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
57
58/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070059 * Used by test_cipher_speed()
60 */
Herbert Xu6a179442005-06-22 13:29:03 -070061static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070062
Steffen Klasserta873a5f2009-06-19 19:46:53 +080063static char *alg = NULL;
64static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080065static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070066static int mode;
Herbert Xuf139cfa2008-07-31 12:23:53 +080067static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070068
69static char *check[] = {
Jonathan Lynchcd12fb902007-11-10 20:08:25 +080070 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
71 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
72 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110073 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080074 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +080075 "lzo", "cts", "zlib", NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070076};
77
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +053078struct tcrypt_result {
79 struct completion completion;
80 int err;
81};
82
83static void tcrypt_complete(struct crypto_async_request *req, int err)
84{
85 struct tcrypt_result *res = req->data;
86
87 if (err == -EINPROGRESS)
88 return;
89
90 res->err = err;
91 complete(&res->completion);
92}
93
Herbert Xuf139cfa2008-07-31 12:23:53 +080094static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -070095 struct scatterlist *sg, int blen, int secs)
Herbert Xu6a179442005-06-22 13:29:03 -070096{
Herbert Xu6a179442005-06-22 13:29:03 -070097 unsigned long start, end;
98 int bcount;
99 int ret;
100
Mark Rustad3e3dc252014-07-25 02:53:38 -0700101 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xu6a179442005-06-22 13:29:03 -0700102 time_before(jiffies, end); bcount++) {
103 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000104 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700105 else
Herbert Xucba83562006-08-13 08:26:09 +1000106 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700107
108 if (ret)
109 return ret;
110 }
111
112 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700113 bcount, secs, (long)bcount * blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700114 return 0;
115}
116
Herbert Xuf139cfa2008-07-31 12:23:53 +0800117static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
118 struct scatterlist *sg, int blen)
Herbert Xu6a179442005-06-22 13:29:03 -0700119{
Herbert Xu6a179442005-06-22 13:29:03 -0700120 unsigned long cycles = 0;
121 int ret = 0;
122 int i;
123
Herbert Xu6a179442005-06-22 13:29:03 -0700124 local_irq_disable();
125
126 /* Warm-up run. */
127 for (i = 0; i < 4; i++) {
128 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000129 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700130 else
Herbert Xucba83562006-08-13 08:26:09 +1000131 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700132
133 if (ret)
134 goto out;
135 }
136
137 /* The real thing. */
138 for (i = 0; i < 8; i++) {
139 cycles_t start, end;
140
141 start = get_cycles();
142 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000143 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700144 else
Herbert Xucba83562006-08-13 08:26:09 +1000145 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700146 end = get_cycles();
147
148 if (ret)
149 goto out;
150
151 cycles += end - start;
152 }
153
154out:
155 local_irq_enable();
Herbert Xu6a179442005-06-22 13:29:03 -0700156
157 if (ret == 0)
158 printk("1 operation in %lu cycles (%d bytes)\n",
159 (cycles + 4) / 8, blen);
160
161 return ret;
162}
163
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530164static inline int do_one_aead_op(struct aead_request *req, int ret)
165{
166 if (ret == -EINPROGRESS || ret == -EBUSY) {
167 struct tcrypt_result *tr = req->base.data;
168
169 ret = wait_for_completion_interruptible(&tr->completion);
170 if (!ret)
171 ret = tr->err;
172 reinit_completion(&tr->completion);
173 }
174
175 return ret;
176}
177
Tim Chen53f52d72013-12-11 14:28:47 -0800178static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700179 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800180{
181 unsigned long start, end;
182 int bcount;
183 int ret;
184
Mark Rustad3e3dc252014-07-25 02:53:38 -0700185 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800186 time_before(jiffies, end); bcount++) {
187 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530188 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800189 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530190 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800191
192 if (ret)
193 return ret;
194 }
195
196 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700197 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800198 return 0;
199}
200
201static int test_aead_cycles(struct aead_request *req, int enc, int blen)
202{
203 unsigned long cycles = 0;
204 int ret = 0;
205 int i;
206
207 local_irq_disable();
208
209 /* Warm-up run. */
210 for (i = 0; i < 4; i++) {
211 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530212 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800213 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530214 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800215
216 if (ret)
217 goto out;
218 }
219
220 /* The real thing. */
221 for (i = 0; i < 8; i++) {
222 cycles_t start, end;
223
224 start = get_cycles();
225 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530226 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800227 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530228 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800229 end = get_cycles();
230
231 if (ret)
232 goto out;
233
234 cycles += end - start;
235 }
236
237out:
238 local_irq_enable();
239
240 if (ret == 0)
241 printk("1 operation in %lu cycles (%d bytes)\n",
242 (cycles + 4) / 8, blen);
243
244 return ret;
245}
246
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800247static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
Tim Chen53f52d72013-12-11 14:28:47 -0800248static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
249
250#define XBUFSIZE 8
251#define MAX_IVLEN 32
252
253static int testmgr_alloc_buf(char *buf[XBUFSIZE])
254{
255 int i;
256
257 for (i = 0; i < XBUFSIZE; i++) {
258 buf[i] = (void *)__get_free_page(GFP_KERNEL);
259 if (!buf[i])
260 goto err_free_buf;
261 }
262
263 return 0;
264
265err_free_buf:
266 while (i-- > 0)
267 free_page((unsigned long)buf[i]);
268
269 return -ENOMEM;
270}
271
272static void testmgr_free_buf(char *buf[XBUFSIZE])
273{
274 int i;
275
276 for (i = 0; i < XBUFSIZE; i++)
277 free_page((unsigned long)buf[i]);
278}
279
280static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
281 unsigned int buflen)
282{
283 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
284 int k, rem;
285
Tim Chen53f52d72013-12-11 14:28:47 -0800286 if (np > XBUFSIZE) {
287 rem = PAGE_SIZE;
288 np = XBUFSIZE;
Cristian Stoicac4768992015-01-27 11:54:27 +0200289 } else {
290 rem = buflen % PAGE_SIZE;
Tim Chen53f52d72013-12-11 14:28:47 -0800291 }
Cristian Stoicac4768992015-01-27 11:54:27 +0200292
Herbert Xu31267272015-06-17 14:05:26 +0800293 sg_init_table(sg, np + 1);
Cristian Stoicac4768992015-01-27 11:54:27 +0200294 np--;
295 for (k = 0; k < np; k++)
Herbert Xu31267272015-06-17 14:05:26 +0800296 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
Cristian Stoicac4768992015-01-27 11:54:27 +0200297
Herbert Xu31267272015-06-17 14:05:26 +0800298 sg_set_buf(&sg[k + 1], xbuf[k], rem);
Tim Chen53f52d72013-12-11 14:28:47 -0800299}
300
Mark Rustad3e3dc252014-07-25 02:53:38 -0700301static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800302 struct aead_speed_template *template,
303 unsigned int tcount, u8 authsize,
304 unsigned int aad_size, u8 *keysize)
305{
306 unsigned int i, j;
307 struct crypto_aead *tfm;
308 int ret = -ENOMEM;
309 const char *key;
310 struct aead_request *req;
311 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800312 struct scatterlist *sgout;
313 const char *e;
314 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200315 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800316 char *xbuf[XBUFSIZE];
317 char *xoutbuf[XBUFSIZE];
318 char *axbuf[XBUFSIZE];
319 unsigned int *b_size;
320 unsigned int iv_len;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530321 struct tcrypt_result result;
Tim Chen53f52d72013-12-11 14:28:47 -0800322
Cristian Stoica96692a732015-01-28 13:07:32 +0200323 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
324 if (!iv)
325 return;
326
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200327 if (aad_size >= PAGE_SIZE) {
328 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200329 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200330 }
331
Tim Chen53f52d72013-12-11 14:28:47 -0800332 if (enc == ENCRYPT)
333 e = "encryption";
334 else
335 e = "decryption";
336
337 if (testmgr_alloc_buf(xbuf))
338 goto out_noxbuf;
339 if (testmgr_alloc_buf(axbuf))
340 goto out_noaxbuf;
341 if (testmgr_alloc_buf(xoutbuf))
342 goto out_nooutbuf;
343
Herbert Xua3f21852015-05-27 16:03:51 +0800344 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800345 if (!sg)
346 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800347 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800348
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800349 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800350
351 if (IS_ERR(tfm)) {
352 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
353 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200354 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800355 }
356
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530357 init_completion(&result.completion);
Luca Clementi263a8df2014-06-25 22:57:42 -0700358 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
359 get_driver_name(crypto_aead, tfm), e);
360
Tim Chen53f52d72013-12-11 14:28:47 -0800361 req = aead_request_alloc(tfm, GFP_KERNEL);
362 if (!req) {
363 pr_err("alg: aead: Failed to allocate request for %s\n",
364 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200365 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800366 }
367
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530368 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
369 tcrypt_complete, &result);
370
Tim Chen53f52d72013-12-11 14:28:47 -0800371 i = 0;
372 do {
373 b_size = aead_sizes;
374 do {
375 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200376 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800377
378 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
379 pr_err("template (%u) too big for tvmem (%lu)\n",
380 *keysize + *b_size,
381 TVMEMSIZE * PAGE_SIZE);
382 goto out;
383 }
384
385 key = tvmem[0];
386 for (j = 0; j < tcount; j++) {
387 if (template[j].klen == *keysize) {
388 key = template[j].key;
389 break;
390 }
391 }
392 ret = crypto_aead_setkey(tfm, key, *keysize);
393 ret = crypto_aead_setauthsize(tfm, authsize);
394
395 iv_len = crypto_aead_ivsize(tfm);
396 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200397 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800398
399 crypto_aead_clear_flags(tfm, ~0);
400 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
401 i, *keysize * 8, *b_size);
402
403
404 memset(tvmem[0], 0xff, PAGE_SIZE);
405
406 if (ret) {
407 pr_err("setkey() failed flags=%x\n",
408 crypto_aead_get_flags(tfm));
409 goto out;
410 }
411
Herbert Xu31267272015-06-17 14:05:26 +0800412 sg_init_aead(sg, xbuf,
Tim Chen53f52d72013-12-11 14:28:47 -0800413 *b_size + (enc ? authsize : 0));
414
Herbert Xu31267272015-06-17 14:05:26 +0800415 sg_init_aead(sgout, xoutbuf,
Tim Chen53f52d72013-12-11 14:28:47 -0800416 *b_size + (enc ? authsize : 0));
417
Herbert Xu31267272015-06-17 14:05:26 +0800418 sg_set_buf(&sg[0], assoc, aad_size);
419 sg_set_buf(&sgout[0], assoc, aad_size);
420
Tim Chen53f52d72013-12-11 14:28:47 -0800421 aead_request_set_crypt(req, sg, sgout, *b_size, iv);
Herbert Xua3f21852015-05-27 16:03:51 +0800422 aead_request_set_ad(req, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800423
Mark Rustad3e3dc252014-07-25 02:53:38 -0700424 if (secs)
425 ret = test_aead_jiffies(req, enc, *b_size,
426 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800427 else
428 ret = test_aead_cycles(req, enc, *b_size);
429
430 if (ret) {
431 pr_err("%s() failed return code=%d\n", e, ret);
432 break;
433 }
434 b_size++;
435 i++;
436 } while (*b_size);
437 keysize++;
438 } while (*keysize);
439
440out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200441 aead_request_free(req);
442out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800443 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200444out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800445 kfree(sg);
446out_nosg:
447 testmgr_free_buf(xoutbuf);
448out_nooutbuf:
449 testmgr_free_buf(axbuf);
450out_noaxbuf:
451 testmgr_free_buf(xbuf);
452out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200453 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800454 return;
455}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800456
Mark Rustad3e3dc252014-07-25 02:53:38 -0700457static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
Herbert Xuda7f0332008-07-31 17:08:25 +0800458 struct cipher_speed_template *template,
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800459 unsigned int tcount, u8 *keysize)
Harald Welteebfd9bc2005-06-22 13:27:23 -0700460{
Herbert Xudce907c2005-06-22 13:27:51 -0700461 unsigned int ret, i, j, iv_len;
David Sterbaf07ef1d2011-03-04 15:28:52 +0800462 const char *key;
463 char iv[128];
Herbert Xucba83562006-08-13 08:26:09 +1000464 struct crypto_blkcipher *tfm;
465 struct blkcipher_desc desc;
466 const char *e;
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800467 u32 *b_size;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700468
469 if (enc == ENCRYPT)
470 e = "encryption";
471 else
472 e = "decryption";
Harald Welteebfd9bc2005-06-22 13:27:23 -0700473
Herbert Xucba83562006-08-13 08:26:09 +1000474 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700475
Herbert Xucba83562006-08-13 08:26:09 +1000476 if (IS_ERR(tfm)) {
477 printk("failed to load transform for %s: %ld\n", algo,
478 PTR_ERR(tfm));
Harald Welteebfd9bc2005-06-22 13:27:23 -0700479 return;
480 }
Herbert Xucba83562006-08-13 08:26:09 +1000481 desc.tfm = tfm;
482 desc.flags = 0;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700483
Luca Clementi263a8df2014-06-25 22:57:42 -0700484 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
485 get_driver_name(crypto_blkcipher, tfm), e);
486
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800487 i = 0;
488 do {
Harald Welteebfd9bc2005-06-22 13:27:23 -0700489
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800490 b_size = block_sizes;
491 do {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800492 struct scatterlist sg[TVMEMSIZE];
Harald Welteebfd9bc2005-06-22 13:27:23 -0700493
Herbert Xuf139cfa2008-07-31 12:23:53 +0800494 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
495 printk("template (%u) too big for "
496 "tvmem (%lu)\n", *keysize + *b_size,
497 TVMEMSIZE * PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800498 goto out;
499 }
Harald Welteebfd9bc2005-06-22 13:27:23 -0700500
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800501 printk("test %u (%d bit key, %d byte blocks): ", i,
502 *keysize * 8, *b_size);
503
Herbert Xuf139cfa2008-07-31 12:23:53 +0800504 memset(tvmem[0], 0xff, PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800505
506 /* set key, plain text and IV */
Herbert Xuda7f0332008-07-31 17:08:25 +0800507 key = tvmem[0];
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800508 for (j = 0; j < tcount; j++) {
509 if (template[j].klen == *keysize) {
510 key = template[j].key;
511 break;
512 }
513 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800514
515 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
516 if (ret) {
517 printk("setkey() failed flags=%x\n",
518 crypto_blkcipher_get_flags(tfm));
519 goto out;
520 }
521
Herbert Xuf139cfa2008-07-31 12:23:53 +0800522 sg_init_table(sg, TVMEMSIZE);
523 sg_set_buf(sg, tvmem[0] + *keysize,
524 PAGE_SIZE - *keysize);
525 for (j = 1; j < TVMEMSIZE; j++) {
526 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
527 memset (tvmem[j], 0xff, PAGE_SIZE);
528 }
529
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800530 iv_len = crypto_blkcipher_ivsize(tfm);
531 if (iv_len) {
532 memset(&iv, 0xff, iv_len);
533 crypto_blkcipher_set_iv(tfm, iv, iv_len);
534 }
535
Mark Rustad3e3dc252014-07-25 02:53:38 -0700536 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800537 ret = test_cipher_jiffies(&desc, enc, sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700538 *b_size, secs);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800539 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800540 ret = test_cipher_cycles(&desc, enc, sg,
541 *b_size);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800542
543 if (ret) {
544 printk("%s() failed flags=%x\n", e, desc.flags);
Herbert Xudce907c2005-06-22 13:27:51 -0700545 break;
546 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800547 b_size++;
548 i++;
549 } while (*b_size);
550 keysize++;
551 } while (*keysize);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700552
553out:
Herbert Xucba83562006-08-13 08:26:09 +1000554 crypto_free_blkcipher(tfm);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700555}
556
David S. Millerbeb63da2010-05-19 14:11:21 +1000557static void test_hash_sg_init(struct scatterlist *sg)
558{
559 int i;
560
561 sg_init_table(sg, TVMEMSIZE);
562 for (i = 0; i < TVMEMSIZE; i++) {
563 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
564 memset(tvmem[i], 0xff, PAGE_SIZE);
565 }
566}
567
David S. Millerbeb63da2010-05-19 14:11:21 +1000568static inline int do_one_ahash_op(struct ahash_request *req, int ret)
569{
570 if (ret == -EINPROGRESS || ret == -EBUSY) {
571 struct tcrypt_result *tr = req->base.data;
572
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100573 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800574 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100575 ret = tr->err;
David S. Millerbeb63da2010-05-19 14:11:21 +1000576 }
577 return ret;
578}
579
580static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700581 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000582{
583 unsigned long start, end;
584 int bcount;
585 int ret;
586
Mark Rustad3e3dc252014-07-25 02:53:38 -0700587 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000588 time_before(jiffies, end); bcount++) {
589 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
590 if (ret)
591 return ret;
592 }
593
594 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700595 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000596
597 return 0;
598}
599
600static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700601 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000602{
603 unsigned long start, end;
604 int bcount, pcount;
605 int ret;
606
607 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700608 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000609
Mark Rustad3e3dc252014-07-25 02:53:38 -0700610 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000611 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800612 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000613 if (ret)
614 return ret;
615 for (pcount = 0; pcount < blen; pcount += plen) {
616 ret = do_one_ahash_op(req, crypto_ahash_update(req));
617 if (ret)
618 return ret;
619 }
620 /* we assume there is enough space in 'out' for the result */
621 ret = do_one_ahash_op(req, crypto_ahash_final(req));
622 if (ret)
623 return ret;
624 }
625
626 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700627 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000628
629 return 0;
630}
631
632static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
633 char *out)
634{
635 unsigned long cycles = 0;
636 int ret, i;
637
638 /* Warm-up run. */
639 for (i = 0; i < 4; i++) {
640 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
641 if (ret)
642 goto out;
643 }
644
645 /* The real thing. */
646 for (i = 0; i < 8; i++) {
647 cycles_t start, end;
648
649 start = get_cycles();
650
651 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
652 if (ret)
653 goto out;
654
655 end = get_cycles();
656
657 cycles += end - start;
658 }
659
660out:
661 if (ret)
662 return ret;
663
664 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
665 cycles / 8, cycles / (8 * blen));
666
667 return 0;
668}
669
670static int test_ahash_cycles(struct ahash_request *req, int blen,
671 int plen, char *out)
672{
673 unsigned long cycles = 0;
674 int i, pcount, ret;
675
676 if (plen == blen)
677 return test_ahash_cycles_digest(req, blen, out);
678
679 /* Warm-up run. */
680 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800681 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000682 if (ret)
683 goto out;
684 for (pcount = 0; pcount < blen; pcount += plen) {
685 ret = do_one_ahash_op(req, crypto_ahash_update(req));
686 if (ret)
687 goto out;
688 }
689 ret = do_one_ahash_op(req, crypto_ahash_final(req));
690 if (ret)
691 goto out;
692 }
693
694 /* The real thing. */
695 for (i = 0; i < 8; i++) {
696 cycles_t start, end;
697
698 start = get_cycles();
699
Herbert Xu43a96072015-04-22 11:02:27 +0800700 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000701 if (ret)
702 goto out;
703 for (pcount = 0; pcount < blen; pcount += plen) {
704 ret = do_one_ahash_op(req, crypto_ahash_update(req));
705 if (ret)
706 goto out;
707 }
708 ret = do_one_ahash_op(req, crypto_ahash_final(req));
709 if (ret)
710 goto out;
711
712 end = get_cycles();
713
714 cycles += end - start;
715 }
716
717out:
718 if (ret)
719 return ret;
720
721 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
722 cycles / 8, cycles / (8 * blen));
723
724 return 0;
725}
726
Herbert Xu06605112016-02-01 21:36:49 +0800727static void test_ahash_speed_common(const char *algo, unsigned int secs,
728 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +1000729{
730 struct scatterlist sg[TVMEMSIZE];
731 struct tcrypt_result tresult;
732 struct ahash_request *req;
733 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +0300734 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +1000735 int i, ret;
736
Herbert Xu06605112016-02-01 21:36:49 +0800737 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +1000738 if (IS_ERR(tfm)) {
739 pr_err("failed to load transform for %s: %ld\n",
740 algo, PTR_ERR(tfm));
741 return;
742 }
743
Luca Clementi263a8df2014-06-25 22:57:42 -0700744 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
745 get_driver_name(crypto_ahash, tfm));
746
Horia Geant?f074f7b2015-08-27 18:38:36 +0300747 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
748 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
749 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +1000750 goto out;
751 }
752
753 test_hash_sg_init(sg);
754 req = ahash_request_alloc(tfm, GFP_KERNEL);
755 if (!req) {
756 pr_err("ahash request allocation failure\n");
757 goto out;
758 }
759
760 init_completion(&tresult.completion);
761 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
762 tcrypt_complete, &tresult);
763
Horia Geant?f074f7b2015-08-27 18:38:36 +0300764 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
765 if (!output)
766 goto out_nomem;
767
David S. Millerbeb63da2010-05-19 14:11:21 +1000768 for (i = 0; speed[i].blen != 0; i++) {
769 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
770 pr_err("template (%u) too big for tvmem (%lu)\n",
771 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
772 break;
773 }
774
775 pr_info("test%3u "
776 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
777 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
778
779 ahash_request_set_crypt(req, sg, output, speed[i].plen);
780
Mark Rustad3e3dc252014-07-25 02:53:38 -0700781 if (secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000782 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700783 speed[i].plen, output, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000784 else
785 ret = test_ahash_cycles(req, speed[i].blen,
786 speed[i].plen, output);
787
788 if (ret) {
789 pr_err("hashing failed ret=%d\n", ret);
790 break;
791 }
792 }
793
Horia Geant?f074f7b2015-08-27 18:38:36 +0300794 kfree(output);
795
796out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +1000797 ahash_request_free(req);
798
799out:
800 crypto_free_ahash(tfm);
801}
802
Herbert Xu06605112016-02-01 21:36:49 +0800803static void test_ahash_speed(const char *algo, unsigned int secs,
804 struct hash_speed *speed)
805{
806 return test_ahash_speed_common(algo, secs, speed, 0);
807}
808
809static void test_hash_speed(const char *algo, unsigned int secs,
810 struct hash_speed *speed)
811{
812 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
813}
814
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300815static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret)
816{
817 if (ret == -EINPROGRESS || ret == -EBUSY) {
818 struct tcrypt_result *tr = req->base.data;
819
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100820 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800821 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100822 ret = tr->err;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300823 }
824
825 return ret;
826}
827
828static int test_acipher_jiffies(struct ablkcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700829 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300830{
831 unsigned long start, end;
832 int bcount;
833 int ret;
834
Mark Rustad3e3dc252014-07-25 02:53:38 -0700835 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300836 time_before(jiffies, end); bcount++) {
837 if (enc)
838 ret = do_one_acipher_op(req,
839 crypto_ablkcipher_encrypt(req));
840 else
841 ret = do_one_acipher_op(req,
842 crypto_ablkcipher_decrypt(req));
843
844 if (ret)
845 return ret;
846 }
847
848 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700849 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300850 return 0;
851}
852
853static int test_acipher_cycles(struct ablkcipher_request *req, int enc,
854 int blen)
855{
856 unsigned long cycles = 0;
857 int ret = 0;
858 int i;
859
860 /* Warm-up run. */
861 for (i = 0; i < 4; i++) {
862 if (enc)
863 ret = do_one_acipher_op(req,
864 crypto_ablkcipher_encrypt(req));
865 else
866 ret = do_one_acipher_op(req,
867 crypto_ablkcipher_decrypt(req));
868
869 if (ret)
870 goto out;
871 }
872
873 /* The real thing. */
874 for (i = 0; i < 8; i++) {
875 cycles_t start, end;
876
877 start = get_cycles();
878 if (enc)
879 ret = do_one_acipher_op(req,
880 crypto_ablkcipher_encrypt(req));
881 else
882 ret = do_one_acipher_op(req,
883 crypto_ablkcipher_decrypt(req));
884 end = get_cycles();
885
886 if (ret)
887 goto out;
888
889 cycles += end - start;
890 }
891
892out:
893 if (ret == 0)
894 pr_cont("1 operation in %lu cycles (%d bytes)\n",
895 (cycles + 4) / 8, blen);
896
897 return ret;
898}
899
Mark Rustad3e3dc252014-07-25 02:53:38 -0700900static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300901 struct cipher_speed_template *template,
902 unsigned int tcount, u8 *keysize)
903{
Nicolas Royerde1975332012-07-01 19:19:47 +0200904 unsigned int ret, i, j, k, iv_len;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300905 struct tcrypt_result tresult;
906 const char *key;
907 char iv[128];
908 struct ablkcipher_request *req;
909 struct crypto_ablkcipher *tfm;
910 const char *e;
911 u32 *b_size;
912
913 if (enc == ENCRYPT)
914 e = "encryption";
915 else
916 e = "decryption";
917
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300918 init_completion(&tresult.completion);
919
920 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
921
922 if (IS_ERR(tfm)) {
923 pr_err("failed to load transform for %s: %ld\n", algo,
924 PTR_ERR(tfm));
925 return;
926 }
927
Luca Clementi263a8df2014-06-25 22:57:42 -0700928 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
929 get_driver_name(crypto_ablkcipher, tfm), e);
930
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300931 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
932 if (!req) {
933 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
934 algo);
935 goto out;
936 }
937
938 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
939 tcrypt_complete, &tresult);
940
941 i = 0;
942 do {
943 b_size = block_sizes;
944
945 do {
946 struct scatterlist sg[TVMEMSIZE];
947
948 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
949 pr_err("template (%u) too big for "
950 "tvmem (%lu)\n", *keysize + *b_size,
951 TVMEMSIZE * PAGE_SIZE);
952 goto out_free_req;
953 }
954
955 pr_info("test %u (%d bit key, %d byte blocks): ", i,
956 *keysize * 8, *b_size);
957
958 memset(tvmem[0], 0xff, PAGE_SIZE);
959
960 /* set key, plain text and IV */
961 key = tvmem[0];
962 for (j = 0; j < tcount; j++) {
963 if (template[j].klen == *keysize) {
964 key = template[j].key;
965 break;
966 }
967 }
968
969 crypto_ablkcipher_clear_flags(tfm, ~0);
970
971 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
972 if (ret) {
973 pr_err("setkey() failed flags=%x\n",
974 crypto_ablkcipher_get_flags(tfm));
975 goto out_free_req;
976 }
977
Nicolas Royerde1975332012-07-01 19:19:47 +0200978 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +0200979 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
980
Nicolas Royerde1975332012-07-01 19:19:47 +0200981 if (k > PAGE_SIZE) {
982 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300983 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +0200984 k -= PAGE_SIZE;
985 j = 1;
986 while (k > PAGE_SIZE) {
987 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
988 memset(tvmem[j], 0xff, PAGE_SIZE);
989 j++;
990 k -= PAGE_SIZE;
991 }
992 sg_set_buf(sg + j, tvmem[j], k);
993 memset(tvmem[j], 0xff, k);
994 } else {
995 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300996 }
997
998 iv_len = crypto_ablkcipher_ivsize(tfm);
999 if (iv_len)
1000 memset(&iv, 0xff, iv_len);
1001
1002 ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv);
1003
Mark Rustad3e3dc252014-07-25 02:53:38 -07001004 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001005 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001006 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001007 else
1008 ret = test_acipher_cycles(req, enc,
1009 *b_size);
1010
1011 if (ret) {
1012 pr_err("%s() failed flags=%x\n", e,
1013 crypto_ablkcipher_get_flags(tfm));
1014 break;
1015 }
1016 b_size++;
1017 i++;
1018 } while (*b_size);
1019 keysize++;
1020 } while (*keysize);
1021
1022out_free_req:
1023 ablkcipher_request_free(req);
1024out:
1025 crypto_free_ablkcipher(tfm);
1026}
1027
Herbert Xuef2736f2005-06-22 13:26:03 -07001028static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001029{
1030 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001031
Linus Torvalds1da177e2005-04-16 15:20:36 -07001032 while (*name) {
1033 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001034 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001035 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001036 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001037 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001038}
1039
Herbert Xu01b32322008-07-31 15:41:55 +08001040static inline int tcrypt_test(const char *alg)
1041{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001042 int ret;
1043
1044 ret = alg_test(alg, alg, 0, 0);
1045 /* non-fips algs return -EINVAL in fips mode */
1046 if (fips_enabled && ret == -EINVAL)
1047 ret = 0;
1048 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001049}
1050
Herbert Xu86068132014-12-04 16:43:29 +08001051static int do_test(const char *alg, u32 type, u32 mask, int m)
Herbert Xu01b32322008-07-31 15:41:55 +08001052{
1053 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001054 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001055
1056 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001057 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001058 if (alg) {
1059 if (!crypto_has_alg(alg, type,
1060 mask ?: CRYPTO_ALG_TYPE_MASK))
1061 ret = -ENOENT;
1062 break;
1063 }
1064
Herbert Xu01b32322008-07-31 15:41:55 +08001065 for (i = 1; i < 200; i++)
Herbert Xu86068132014-12-04 16:43:29 +08001066 ret += do_test(NULL, 0, 0, i);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001067 break;
1068
1069 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001070 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001071 break;
1072
1073 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001074 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001075 break;
1076
1077 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001078 ret += tcrypt_test("ecb(des)");
1079 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001080 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001081 break;
1082
1083 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001084 ret += tcrypt_test("ecb(des3_ede)");
1085 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001086 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001087 break;
1088
1089 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001090 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001091 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001092
Linus Torvalds1da177e2005-04-16 15:20:36 -07001093 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001094 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001095 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001096
Linus Torvalds1da177e2005-04-16 15:20:36 -07001097 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001098 ret += tcrypt_test("ecb(blowfish)");
1099 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001100 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001101 break;
1102
1103 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001104 ret += tcrypt_test("ecb(twofish)");
1105 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001106 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001107 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001108 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001109 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001110
Linus Torvalds1da177e2005-04-16 15:20:36 -07001111 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001112 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001113 ret += tcrypt_test("cbc(serpent)");
1114 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001115 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001116 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001117 break;
1118
1119 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001120 ret += tcrypt_test("ecb(aes)");
1121 ret += tcrypt_test("cbc(aes)");
1122 ret += tcrypt_test("lrw(aes)");
1123 ret += tcrypt_test("xts(aes)");
1124 ret += tcrypt_test("ctr(aes)");
1125 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001126 break;
1127
1128 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001129 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001130 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001131
Linus Torvalds1da177e2005-04-16 15:20:36 -07001132 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001133 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001134 break;
1135
1136 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001137 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001138 break;
1139
1140 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001141 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001142 ret += tcrypt_test("cbc(cast5)");
1143 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001144 break;
1145
1146 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001147 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001148 ret += tcrypt_test("cbc(cast6)");
1149 ret += tcrypt_test("ctr(cast6)");
1150 ret += tcrypt_test("lrw(cast6)");
1151 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001152 break;
1153
1154 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001155 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001156 break;
1157
1158 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001159 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001160 break;
1161
1162 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001163 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001164 break;
1165
1166 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001167 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001168 break;
1169
1170 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001171 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001172 break;
1173
1174 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001175 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001176 break;
1177
1178 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001179 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001180 break;
1181
1182 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001183 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001184 break;
1185
1186 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001187 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001188 break;
1189
1190 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001191 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001192 break;
1193
1194 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001195 ret += tcrypt_test("ecb(anubis)");
1196 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001197 break;
1198
1199 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001200 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001201 break;
1202
1203 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001204 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001205 break;
1206
1207 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001208 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001209 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001210
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001211 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001212 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001213 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001214
David Howells90831632006-12-16 12:13:14 +11001215 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001216 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001217 break;
1218
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001219 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001220 ret += tcrypt_test("ecb(camellia)");
1221 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001222 ret += tcrypt_test("ctr(camellia)");
1223 ret += tcrypt_test("lrw(camellia)");
1224 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001225 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001226
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001227 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001228 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001229 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001230
Tan Swee Heng2407d602007-11-23 19:45:00 +08001231 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001232 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001233 break;
1234
Herbert Xu8df213d2007-12-02 14:55:47 +11001235 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001236 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001237 break;
1238
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001239 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001240 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001241 break;
1242
Joy Latten93cc74e2007-12-12 20:24:22 +08001243 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001244 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001245 break;
1246
Kevin Coffman76cb9522008-03-24 21:26:16 +08001247 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001248 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001249 break;
1250
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001251 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001252 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001253 break;
1254
1255 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001256 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001257 break;
1258
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001259 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001260 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001261 break;
1262
1263 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001264 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001265 break;
1266
1267 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001268 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001269 break;
1270
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001271 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001272 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001273 break;
1274
Jarod Wilson5d667322009-05-04 19:23:40 +08001275 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001276 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001277 break;
1278
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001279 case 46:
1280 ret += tcrypt_test("ghash");
1281 break;
1282
Herbert Xu684115212013-09-07 12:56:26 +10001283 case 47:
1284 ret += tcrypt_test("crct10dif");
1285 break;
1286
Linus Torvalds1da177e2005-04-16 15:20:36 -07001287 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001288 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001289 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001290
Linus Torvalds1da177e2005-04-16 15:20:36 -07001291 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001292 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001293 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001294
Linus Torvalds1da177e2005-04-16 15:20:36 -07001295 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001296 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001297 break;
1298
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001299 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001300 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001301 break;
1302
1303 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001304 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001305 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001306
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001307 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001308 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001309 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001310
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001311 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001312 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001313 break;
1314
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001315 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001316 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001317 break;
1318
1319 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001320 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001321 break;
1322
Shane Wangf1939f72009-09-02 20:05:22 +10001323 case 109:
1324 ret += tcrypt_test("vmac(aes)");
1325 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001326
Sonic Zhanga482b082012-05-25 17:54:13 +08001327 case 110:
1328 ret += tcrypt_test("hmac(crc32)");
1329 break;
Shane Wangf1939f72009-09-02 20:05:22 +10001330
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001331 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001332 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001333 break;
1334
Adrian Hoban69435b92010-11-04 15:02:04 -04001335 case 151:
1336 ret += tcrypt_test("rfc4106(gcm(aes))");
1337 break;
1338
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001339 case 152:
1340 ret += tcrypt_test("rfc4543(gcm(aes))");
1341 break;
1342
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001343 case 153:
1344 ret += tcrypt_test("cmac(aes)");
1345 break;
1346
1347 case 154:
1348 ret += tcrypt_test("cmac(des3_ede)");
1349 break;
1350
Horia Geantabbf9c892013-11-28 15:11:16 +02001351 case 155:
1352 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1353 break;
1354
Horia Geantabca4feb2014-03-14 17:46:51 +02001355 case 156:
1356 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1357 break;
1358
1359 case 157:
1360 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1361 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301362 case 181:
1363 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1364 break;
1365 case 182:
1366 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1367 break;
1368 case 183:
1369 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
1370 break;
1371 case 184:
1372 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
1373 break;
1374 case 185:
1375 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
1376 break;
1377 case 186:
1378 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
1379 break;
1380 case 187:
1381 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
1382 break;
1383 case 188:
1384 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
1385 break;
1386 case 189:
1387 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
1388 break;
1389 case 190:
1390 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1391 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07001392 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10001393 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001394 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001395 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001396 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001397 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001398 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001399 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001400 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11001401 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001402 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11001403 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001404 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08001405 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001406 speed_template_32_48_64);
Rik Snelf19f5112007-09-19 20:23:13 +08001407 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001408 speed_template_32_48_64);
Jan Glauber9996e342011-04-26 16:34:01 +10001409 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1410 speed_template_16_24_32);
1411 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1412 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001413 break;
1414
1415 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10001416 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001417 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001418 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001419 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001420 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001421 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001422 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001423 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001424 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001425 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001426 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001427 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03001428 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
1429 des3_speed_template, DES3_SPEED_VECTORS,
1430 speed_template_24);
1431 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
1432 des3_speed_template, DES3_SPEED_VECTORS,
1433 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001434 break;
1435
1436 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10001437 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001438 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001439 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001440 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001441 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001442 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001443 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001444 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03001445 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1446 speed_template_16_24_32);
1447 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1448 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001449 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1450 speed_template_32_40_48);
1451 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1452 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001453 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1454 speed_template_32_48_64);
1455 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1456 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001457 break;
1458
1459 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10001460 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001461 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001462 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001463 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001464 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001465 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001466 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001467 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03001468 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1469 speed_template_8_32);
1470 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1471 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001472 break;
1473
1474 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10001475 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001476 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001477 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001478 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001479 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001480 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001481 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001482 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001483 break;
1484
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001485 case 205:
1486 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001487 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001488 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001489 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001490 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001491 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001492 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001493 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02001494 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1495 speed_template_16_24_32);
1496 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1497 speed_template_16_24_32);
1498 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1499 speed_template_32_40_48);
1500 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1501 speed_template_32_40_48);
1502 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1503 speed_template_32_48_64);
1504 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1505 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001506 break;
1507
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001508 case 206:
1509 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001510 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001511 break;
1512
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001513 case 207:
1514 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1515 speed_template_16_32);
1516 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1517 speed_template_16_32);
1518 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1519 speed_template_16_32);
1520 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1521 speed_template_16_32);
1522 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1523 speed_template_16_32);
1524 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1525 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001526 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1527 speed_template_32_48);
1528 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1529 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001530 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1531 speed_template_32_64);
1532 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1533 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001534 break;
1535
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001536 case 208:
1537 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1538 speed_template_8);
1539 break;
1540
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001541 case 209:
1542 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1543 speed_template_8_16);
1544 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1545 speed_template_8_16);
1546 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1547 speed_template_8_16);
1548 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1549 speed_template_8_16);
1550 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1551 speed_template_8_16);
1552 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1553 speed_template_8_16);
1554 break;
1555
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001556 case 210:
1557 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1558 speed_template_16_32);
1559 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1560 speed_template_16_32);
1561 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1562 speed_template_16_32);
1563 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1564 speed_template_16_32);
1565 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1566 speed_template_16_32);
1567 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1568 speed_template_16_32);
1569 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1570 speed_template_32_48);
1571 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1572 speed_template_32_48);
1573 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1574 speed_template_32_64);
1575 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1576 speed_template_32_64);
1577 break;
1578
Tim Chen53f52d72013-12-11 14:28:47 -08001579 case 211:
1580 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08001581 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05301582 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01001583 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08001584 break;
1585
Herbert Xu4e4aab62015-06-17 14:04:21 +08001586 case 212:
1587 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08001588 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08001589 break;
1590
Martin Willi2dce0632015-07-16 19:13:59 +02001591 case 213:
1592 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
1593 NULL, 0, 16, 8, aead_speed_template_36);
1594 break;
1595
1596 case 214:
1597 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
1598 speed_template_32);
1599 break;
1600
1601
Michal Ludvige8057922006-05-30 22:04:19 +10001602 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08001603 if (alg) {
1604 test_hash_speed(alg, sec, generic_hash_speed_template);
1605 break;
1606 }
1607
Michal Ludvige8057922006-05-30 22:04:19 +10001608 /* fall through */
1609
1610 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10001611 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001612 if (mode > 300 && mode < 400) break;
1613
1614 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10001615 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001616 if (mode > 300 && mode < 400) break;
1617
1618 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10001619 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001620 if (mode > 300 && mode < 400) break;
1621
1622 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10001623 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001624 if (mode > 300 && mode < 400) break;
1625
1626 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10001627 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001628 if (mode > 300 && mode < 400) break;
1629
1630 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10001631 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001632 if (mode > 300 && mode < 400) break;
1633
1634 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10001635 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001636 if (mode > 300 && mode < 400) break;
1637
1638 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10001639 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001640 if (mode > 300 && mode < 400) break;
1641
1642 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10001643 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001644 if (mode > 300 && mode < 400) break;
1645
1646 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10001647 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001648 if (mode > 300 && mode < 400) break;
1649
1650 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10001651 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001652 if (mode > 300 && mode < 400) break;
1653
1654 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10001655 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001656 if (mode > 300 && mode < 400) break;
1657
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001658 case 313:
1659 test_hash_speed("sha224", sec, generic_hash_speed_template);
1660 if (mode > 300 && mode < 400) break;
1661
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001662 case 314:
1663 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1664 if (mode > 300 && mode < 400) break;
1665
1666 case 315:
1667 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1668 if (mode > 300 && mode < 400) break;
1669
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001670 case 316:
1671 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1672 if (mode > 300 && mode < 400) break;
1673
1674 case 317:
1675 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1676 if (mode > 300 && mode < 400) break;
1677
Huang Ying18bcc912010-03-10 18:30:32 +08001678 case 318:
1679 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1680 if (mode > 300 && mode < 400) break;
1681
Tim Chene3899e42012-09-27 15:44:24 -07001682 case 319:
1683 test_hash_speed("crc32c", sec, generic_hash_speed_template);
1684 if (mode > 300 && mode < 400) break;
1685
Herbert Xu684115212013-09-07 12:56:26 +10001686 case 320:
1687 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
1688 if (mode > 300 && mode < 400) break;
1689
Martin Willi2dce0632015-07-16 19:13:59 +02001690 case 321:
1691 test_hash_speed("poly1305", sec, poly1305_speed_template);
1692 if (mode > 300 && mode < 400) break;
1693
Michal Ludvige8057922006-05-30 22:04:19 +10001694 case 399:
1695 break;
1696
David S. Millerbeb63da2010-05-19 14:11:21 +10001697 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08001698 if (alg) {
1699 test_ahash_speed(alg, sec, generic_hash_speed_template);
1700 break;
1701 }
1702
David S. Millerbeb63da2010-05-19 14:11:21 +10001703 /* fall through */
1704
1705 case 401:
1706 test_ahash_speed("md4", sec, generic_hash_speed_template);
1707 if (mode > 400 && mode < 500) break;
1708
1709 case 402:
1710 test_ahash_speed("md5", sec, generic_hash_speed_template);
1711 if (mode > 400 && mode < 500) break;
1712
1713 case 403:
1714 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1715 if (mode > 400 && mode < 500) break;
1716
1717 case 404:
1718 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1719 if (mode > 400 && mode < 500) break;
1720
1721 case 405:
1722 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1723 if (mode > 400 && mode < 500) break;
1724
1725 case 406:
1726 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1727 if (mode > 400 && mode < 500) break;
1728
1729 case 407:
1730 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1731 if (mode > 400 && mode < 500) break;
1732
1733 case 408:
1734 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1735 if (mode > 400 && mode < 500) break;
1736
1737 case 409:
1738 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1739 if (mode > 400 && mode < 500) break;
1740
1741 case 410:
1742 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1743 if (mode > 400 && mode < 500) break;
1744
1745 case 411:
1746 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1747 if (mode > 400 && mode < 500) break;
1748
1749 case 412:
1750 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1751 if (mode > 400 && mode < 500) break;
1752
1753 case 413:
1754 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1755 if (mode > 400 && mode < 500) break;
1756
1757 case 414:
1758 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1759 if (mode > 400 && mode < 500) break;
1760
1761 case 415:
1762 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1763 if (mode > 400 && mode < 500) break;
1764
1765 case 416:
1766 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1767 if (mode > 400 && mode < 500) break;
1768
1769 case 417:
1770 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1771 if (mode > 400 && mode < 500) break;
1772
1773 case 499:
1774 break;
1775
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001776 case 500:
1777 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1778 speed_template_16_24_32);
1779 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1780 speed_template_16_24_32);
1781 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1782 speed_template_16_24_32);
1783 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1784 speed_template_16_24_32);
1785 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1786 speed_template_32_40_48);
1787 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1788 speed_template_32_40_48);
1789 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1790 speed_template_32_48_64);
1791 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1792 speed_template_32_48_64);
1793 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1794 speed_template_16_24_32);
1795 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1796 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02001797 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
1798 speed_template_16_24_32);
1799 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
1800 speed_template_16_24_32);
1801 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
1802 speed_template_16_24_32);
1803 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1804 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02001805 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1806 speed_template_20_28_36);
1807 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1808 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001809 break;
1810
1811 case 501:
1812 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1813 des3_speed_template, DES3_SPEED_VECTORS,
1814 speed_template_24);
1815 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
1816 des3_speed_template, DES3_SPEED_VECTORS,
1817 speed_template_24);
1818 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1819 des3_speed_template, DES3_SPEED_VECTORS,
1820 speed_template_24);
1821 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
1822 des3_speed_template, DES3_SPEED_VECTORS,
1823 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02001824 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
1825 des3_speed_template, DES3_SPEED_VECTORS,
1826 speed_template_24);
1827 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
1828 des3_speed_template, DES3_SPEED_VECTORS,
1829 speed_template_24);
1830 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
1831 des3_speed_template, DES3_SPEED_VECTORS,
1832 speed_template_24);
1833 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
1834 des3_speed_template, DES3_SPEED_VECTORS,
1835 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001836 break;
1837
1838 case 502:
1839 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1840 speed_template_8);
1841 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1842 speed_template_8);
1843 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1844 speed_template_8);
1845 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1846 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02001847 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
1848 speed_template_8);
1849 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
1850 speed_template_8);
1851 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
1852 speed_template_8);
1853 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
1854 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001855 break;
1856
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001857 case 503:
1858 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1859 speed_template_16_32);
1860 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1861 speed_template_16_32);
1862 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1863 speed_template_16_32);
1864 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1865 speed_template_16_32);
1866 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1867 speed_template_16_32);
1868 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1869 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001870 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1871 speed_template_32_48);
1872 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1873 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001874 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1875 speed_template_32_64);
1876 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1877 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001878 break;
1879
Johannes Goetzfried107778b2012-05-28 15:54:24 +02001880 case 504:
1881 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1882 speed_template_16_24_32);
1883 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1884 speed_template_16_24_32);
1885 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1886 speed_template_16_24_32);
1887 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1888 speed_template_16_24_32);
1889 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1890 speed_template_16_24_32);
1891 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1892 speed_template_16_24_32);
1893 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1894 speed_template_32_40_48);
1895 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1896 speed_template_32_40_48);
1897 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1898 speed_template_32_48_64);
1899 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1900 speed_template_32_48_64);
1901 break;
1902
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001903 case 505:
1904 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1905 speed_template_8);
1906 break;
1907
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001908 case 506:
1909 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1910 speed_template_8_16);
1911 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1912 speed_template_8_16);
1913 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1914 speed_template_8_16);
1915 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1916 speed_template_8_16);
1917 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1918 speed_template_8_16);
1919 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1920 speed_template_8_16);
1921 break;
1922
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001923 case 507:
1924 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1925 speed_template_16_32);
1926 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1927 speed_template_16_32);
1928 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1929 speed_template_16_32);
1930 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1931 speed_template_16_32);
1932 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1933 speed_template_16_32);
1934 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1935 speed_template_16_32);
1936 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1937 speed_template_32_48);
1938 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1939 speed_template_32_48);
1940 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1941 speed_template_32_64);
1942 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1943 speed_template_32_64);
1944 break;
1945
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03001946 case 508:
1947 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
1948 speed_template_16_32);
1949 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
1950 speed_template_16_32);
1951 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
1952 speed_template_16_32);
1953 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
1954 speed_template_16_32);
1955 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1956 speed_template_16_32);
1957 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1958 speed_template_16_32);
1959 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1960 speed_template_32_48);
1961 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1962 speed_template_32_48);
1963 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1964 speed_template_32_64);
1965 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1966 speed_template_32_64);
1967 break;
1968
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03001969 case 509:
1970 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
1971 speed_template_8_32);
1972 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
1973 speed_template_8_32);
1974 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
1975 speed_template_8_32);
1976 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
1977 speed_template_8_32);
1978 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1979 speed_template_8_32);
1980 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1981 speed_template_8_32);
1982 break;
1983
Linus Torvalds1da177e2005-04-16 15:20:36 -07001984 case 1000:
1985 test_available();
1986 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001987 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10001988
1989 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001990}
1991
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08001992static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001993{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08001994 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08001995 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08001996
Herbert Xuf139cfa2008-07-31 12:23:53 +08001997 for (i = 0; i < TVMEMSIZE; i++) {
1998 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
1999 if (!tvmem[i])
2000 goto err_free_tv;
2001 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002002
Herbert Xu86068132014-12-04 16:43:29 +08002003 err = do_test(alg, type, mask, mode);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002004
Jarod Wilson4e033a62009-05-27 15:10:21 +10002005 if (err) {
2006 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2007 goto err_free_tv;
2008 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002009
Jarod Wilson4e033a62009-05-27 15:10:21 +10002010 /* We intentionaly return -EAGAIN to prevent keeping the module,
2011 * unless we're running in fips mode. It does all its work from
2012 * init() and doesn't offer any runtime functionality, but in
2013 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002014 * => we don't need it in the memory, do we?
2015 * -- mludvig
2016 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002017 if (!fips_enabled)
2018 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002019
Herbert Xuf139cfa2008-07-31 12:23:53 +08002020err_free_tv:
2021 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2022 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002023
2024 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002025}
2026
2027/*
2028 * If an init function is provided, an exit function must also be provided
2029 * to allow module unload.
2030 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002031static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002032
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002033module_init(tcrypt_mod_init);
2034module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002035
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002036module_param(alg, charp, 0);
2037module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002038module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002039module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002040module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002041MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2042 "(defaults to zero which uses CPU cycles instead)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002043
2044MODULE_LICENSE("GPL");
2045MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2046MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");