blob: 51fe7c8744ae355c7ccf7f899505474a6aff0fc4 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Rabin Vincent76512f22017-01-18 14:54:05 +010025#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
26
Herbert Xu1ce5a042015-04-22 15:06:30 +080027#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080028#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080029#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100030#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080031#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090033#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100035#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070038#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070039#include <linux/timex.h>
40#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070041#include "tcrypt.h"
42
43/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080044 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070045 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080046#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
48/*
Herbert Xuda7f0332008-07-31 17:08:25 +080049* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070050*/
51#define ENCRYPT 1
52#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070053
Horia Geant?f074f7b2015-08-27 18:38:36 +030054#define MAX_DIGEST_SIZE 64
55
Harald Welteebfd9bc2005-06-22 13:27:23 -070056/*
Luca Clementi263a8df2014-06-25 22:57:42 -070057 * return a string with the driver name
58 */
59#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
60
61/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070062 * Used by test_cipher_speed()
63 */
Herbert Xu6a179442005-06-22 13:29:03 -070064static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070065
Steffen Klasserta873a5f2009-06-19 19:46:53 +080066static char *alg = NULL;
67static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080068static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070069static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000070static u32 num_mb = 8;
Herbert Xuf139cfa2008-07-31 12:23:53 +080071static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070072
73static char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030074 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080075 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
76 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110077 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080078 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +053079 "lzo", "cts", "zlib", "sha3-224", "sha3-256", "sha3-384", "sha3-512",
80 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070081};
82
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000083static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
84static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
85
86#define XBUFSIZE 8
87#define MAX_IVLEN 32
88
89static int testmgr_alloc_buf(char *buf[XBUFSIZE])
90{
91 int i;
92
93 for (i = 0; i < XBUFSIZE; i++) {
94 buf[i] = (void *)__get_free_page(GFP_KERNEL);
95 if (!buf[i])
96 goto err_free_buf;
97 }
98
99 return 0;
100
101err_free_buf:
102 while (i-- > 0)
103 free_page((unsigned long)buf[i]);
104
105 return -ENOMEM;
106}
107
108static void testmgr_free_buf(char *buf[XBUFSIZE])
109{
110 int i;
111
112 for (i = 0; i < XBUFSIZE; i++)
113 free_page((unsigned long)buf[i]);
114}
115
116static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
117 unsigned int buflen, const void *assoc,
118 unsigned int aad_size)
119{
120 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
121 int k, rem;
122
123 if (np > XBUFSIZE) {
124 rem = PAGE_SIZE;
125 np = XBUFSIZE;
126 } else {
127 rem = buflen % PAGE_SIZE;
128 }
129
130 sg_init_table(sg, np + 1);
131
132 sg_set_buf(&sg[0], assoc, aad_size);
133
134 if (rem)
135 np--;
136 for (k = 0; k < np; k++)
137 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
138
139 if (rem)
140 sg_set_buf(&sg[k + 1], xbuf[k], rem);
141}
142
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530143static inline int do_one_aead_op(struct aead_request *req, int ret)
144{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100145 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530146
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100147 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530148}
149
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000150struct test_mb_aead_data {
151 struct scatterlist sg[XBUFSIZE];
152 struct scatterlist sgout[XBUFSIZE];
153 struct aead_request *req;
154 struct crypto_wait wait;
155 char *xbuf[XBUFSIZE];
156 char *xoutbuf[XBUFSIZE];
157 char *axbuf[XBUFSIZE];
158};
159
160static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
161 u32 num_mb)
162{
163 int i, rc[num_mb], err = 0;
164
165 /* Fire up a bunch of concurrent requests */
166 for (i = 0; i < num_mb; i++) {
167 if (enc == ENCRYPT)
168 rc[i] = crypto_aead_encrypt(data[i].req);
169 else
170 rc[i] = crypto_aead_decrypt(data[i].req);
171 }
172
173 /* Wait for all requests to finish */
174 for (i = 0; i < num_mb; i++) {
175 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
176
177 if (rc[i]) {
178 pr_info("concurrent request %d error %d\n", i, rc[i]);
179 err = rc[i];
180 }
181 }
182
183 return err;
184}
185
186static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
187 int blen, int secs, u32 num_mb)
188{
189 unsigned long start, end;
190 int bcount;
191 int ret;
192
193 for (start = jiffies, end = start + secs * HZ, bcount = 0;
194 time_before(jiffies, end); bcount++) {
195 ret = do_mult_aead_op(data, enc, num_mb);
196 if (ret)
197 return ret;
198 }
199
200 pr_cont("%d operations in %d seconds (%ld bytes)\n",
201 bcount * num_mb, secs, (long)bcount * blen * num_mb);
202 return 0;
203}
204
205static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
206 int blen, u32 num_mb)
207{
208 unsigned long cycles = 0;
209 int ret = 0;
210 int i;
211
212 /* Warm-up run. */
213 for (i = 0; i < 4; i++) {
214 ret = do_mult_aead_op(data, enc, num_mb);
215 if (ret)
216 goto out;
217 }
218
219 /* The real thing. */
220 for (i = 0; i < 8; i++) {
221 cycles_t start, end;
222
223 start = get_cycles();
224 ret = do_mult_aead_op(data, enc, num_mb);
225 end = get_cycles();
226
227 if (ret)
228 goto out;
229
230 cycles += end - start;
231 }
232
233out:
234 if (ret == 0)
235 pr_cont("1 operation in %lu cycles (%d bytes)\n",
236 (cycles + 4) / (8 * num_mb), blen);
237
238 return ret;
239}
240
241static void test_mb_aead_speed(const char *algo, int enc, int secs,
242 struct aead_speed_template *template,
243 unsigned int tcount, u8 authsize,
244 unsigned int aad_size, u8 *keysize, u32 num_mb)
245{
246 struct test_mb_aead_data *data;
247 struct crypto_aead *tfm;
248 unsigned int i, j, iv_len;
249 const char *key;
250 const char *e;
251 void *assoc;
252 u32 *b_size;
253 char *iv;
254 int ret;
255
256
257 if (aad_size >= PAGE_SIZE) {
258 pr_err("associate data length (%u) too big\n", aad_size);
259 return;
260 }
261
262 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
263 if (!iv)
264 return;
265
266 if (enc == ENCRYPT)
267 e = "encryption";
268 else
269 e = "decryption";
270
271 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
272 if (!data)
273 goto out_free_iv;
274
275 tfm = crypto_alloc_aead(algo, 0, 0);
276 if (IS_ERR(tfm)) {
277 pr_err("failed to load transform for %s: %ld\n",
278 algo, PTR_ERR(tfm));
279 goto out_free_data;
280 }
281
282 ret = crypto_aead_setauthsize(tfm, authsize);
283
284 for (i = 0; i < num_mb; ++i)
285 if (testmgr_alloc_buf(data[i].xbuf)) {
286 while (i--)
287 testmgr_free_buf(data[i].xbuf);
288 goto out_free_tfm;
289 }
290
291 for (i = 0; i < num_mb; ++i)
292 if (testmgr_alloc_buf(data[i].axbuf)) {
293 while (i--)
294 testmgr_free_buf(data[i].axbuf);
295 goto out_free_xbuf;
296 }
297
298 for (i = 0; i < num_mb; ++i)
299 if (testmgr_alloc_buf(data[i].xoutbuf)) {
300 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000301 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000302 goto out_free_axbuf;
303 }
304
305 for (i = 0; i < num_mb; ++i) {
306 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
307 if (!data[i].req) {
308 pr_err("alg: skcipher: Failed to allocate request for %s\n",
309 algo);
310 while (i--)
311 aead_request_free(data[i].req);
312 goto out_free_xoutbuf;
313 }
314 }
315
316 for (i = 0; i < num_mb; ++i) {
317 crypto_init_wait(&data[i].wait);
318 aead_request_set_callback(data[i].req,
319 CRYPTO_TFM_REQ_MAY_BACKLOG,
320 crypto_req_done, &data[i].wait);
321 }
322
323 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
324 get_driver_name(crypto_aead, tfm), e);
325
326 i = 0;
327 do {
328 b_size = aead_sizes;
329 do {
330 if (*b_size + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000331 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000332 authsize + *b_size,
333 XBUFSIZE * PAGE_SIZE);
334 goto out;
335 }
336
337 pr_info("test %u (%d bit key, %d byte blocks): ", i,
338 *keysize * 8, *b_size);
339
340 /* Set up tfm global state, i.e. the key */
341
342 memset(tvmem[0], 0xff, PAGE_SIZE);
343 key = tvmem[0];
344 for (j = 0; j < tcount; j++) {
345 if (template[j].klen == *keysize) {
346 key = template[j].key;
347 break;
348 }
349 }
350
351 crypto_aead_clear_flags(tfm, ~0);
352
353 ret = crypto_aead_setkey(tfm, key, *keysize);
354 if (ret) {
355 pr_err("setkey() failed flags=%x\n",
356 crypto_aead_get_flags(tfm));
357 goto out;
358 }
359
360 iv_len = crypto_aead_ivsize(tfm);
361 if (iv_len)
362 memset(iv, 0xff, iv_len);
363
364 /* Now setup per request stuff, i.e. buffers */
365
366 for (j = 0; j < num_mb; ++j) {
367 struct test_mb_aead_data *cur = &data[j];
368
369 assoc = cur->axbuf[0];
370 memset(assoc, 0xff, aad_size);
371
372 sg_init_aead(cur->sg, cur->xbuf,
373 *b_size + (enc ? 0 : authsize),
374 assoc, aad_size);
375
376 sg_init_aead(cur->sgout, cur->xoutbuf,
377 *b_size + (enc ? authsize : 0),
378 assoc, aad_size);
379
380 aead_request_set_ad(cur->req, aad_size);
381
382 if (!enc) {
383
384 aead_request_set_crypt(cur->req,
385 cur->sgout,
386 cur->sg,
387 *b_size, iv);
388 ret = crypto_aead_encrypt(cur->req);
389 ret = do_one_aead_op(cur->req, ret);
390
391 if (ret) {
392 pr_err("calculating auth failed failed (%d)\n",
393 ret);
394 break;
395 }
396 }
397
398 aead_request_set_crypt(cur->req, cur->sg,
399 cur->sgout, *b_size +
400 (enc ? 0 : authsize),
401 iv);
402
403 }
404
405 if (secs)
406 ret = test_mb_aead_jiffies(data, enc, *b_size,
407 secs, num_mb);
408 else
409 ret = test_mb_aead_cycles(data, enc, *b_size,
410 num_mb);
411
412 if (ret) {
413 pr_err("%s() failed return code=%d\n", e, ret);
414 break;
415 }
416 b_size++;
417 i++;
418 } while (*b_size);
419 keysize++;
420 } while (*keysize);
421
422out:
423 for (i = 0; i < num_mb; ++i)
424 aead_request_free(data[i].req);
425out_free_xoutbuf:
426 for (i = 0; i < num_mb; ++i)
427 testmgr_free_buf(data[i].xoutbuf);
428out_free_axbuf:
429 for (i = 0; i < num_mb; ++i)
430 testmgr_free_buf(data[i].axbuf);
431out_free_xbuf:
432 for (i = 0; i < num_mb; ++i)
433 testmgr_free_buf(data[i].xbuf);
434out_free_tfm:
435 crypto_free_aead(tfm);
436out_free_data:
437 kfree(data);
438out_free_iv:
439 kfree(iv);
440}
441
Tim Chen53f52d72013-12-11 14:28:47 -0800442static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700443 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800444{
445 unsigned long start, end;
446 int bcount;
447 int ret;
448
Mark Rustad3e3dc252014-07-25 02:53:38 -0700449 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800450 time_before(jiffies, end); bcount++) {
451 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530452 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800453 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530454 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800455
456 if (ret)
457 return ret;
458 }
459
460 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700461 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800462 return 0;
463}
464
465static int test_aead_cycles(struct aead_request *req, int enc, int blen)
466{
467 unsigned long cycles = 0;
468 int ret = 0;
469 int i;
470
Tim Chen53f52d72013-12-11 14:28:47 -0800471 /* Warm-up run. */
472 for (i = 0; i < 4; i++) {
473 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530474 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800475 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530476 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800477
478 if (ret)
479 goto out;
480 }
481
482 /* The real thing. */
483 for (i = 0; i < 8; i++) {
484 cycles_t start, end;
485
486 start = get_cycles();
487 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530488 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800489 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530490 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800491 end = get_cycles();
492
493 if (ret)
494 goto out;
495
496 cycles += end - start;
497 }
498
499out:
Tim Chen53f52d72013-12-11 14:28:47 -0800500 if (ret == 0)
501 printk("1 operation in %lu cycles (%d bytes)\n",
502 (cycles + 4) / 8, blen);
503
504 return ret;
505}
506
Mark Rustad3e3dc252014-07-25 02:53:38 -0700507static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800508 struct aead_speed_template *template,
509 unsigned int tcount, u8 authsize,
510 unsigned int aad_size, u8 *keysize)
511{
512 unsigned int i, j;
513 struct crypto_aead *tfm;
514 int ret = -ENOMEM;
515 const char *key;
516 struct aead_request *req;
517 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800518 struct scatterlist *sgout;
519 const char *e;
520 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200521 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800522 char *xbuf[XBUFSIZE];
523 char *xoutbuf[XBUFSIZE];
524 char *axbuf[XBUFSIZE];
525 unsigned int *b_size;
526 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100527 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800528
Cristian Stoica96692a732015-01-28 13:07:32 +0200529 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
530 if (!iv)
531 return;
532
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200533 if (aad_size >= PAGE_SIZE) {
534 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200535 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200536 }
537
Tim Chen53f52d72013-12-11 14:28:47 -0800538 if (enc == ENCRYPT)
539 e = "encryption";
540 else
541 e = "decryption";
542
543 if (testmgr_alloc_buf(xbuf))
544 goto out_noxbuf;
545 if (testmgr_alloc_buf(axbuf))
546 goto out_noaxbuf;
547 if (testmgr_alloc_buf(xoutbuf))
548 goto out_nooutbuf;
549
Herbert Xua3f21852015-05-27 16:03:51 +0800550 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800551 if (!sg)
552 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800553 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800554
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800555 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800556
557 if (IS_ERR(tfm)) {
558 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
559 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200560 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800561 }
562
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100563 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700564 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
565 get_driver_name(crypto_aead, tfm), e);
566
Tim Chen53f52d72013-12-11 14:28:47 -0800567 req = aead_request_alloc(tfm, GFP_KERNEL);
568 if (!req) {
569 pr_err("alg: aead: Failed to allocate request for %s\n",
570 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200571 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800572 }
573
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530574 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100575 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530576
Tim Chen53f52d72013-12-11 14:28:47 -0800577 i = 0;
578 do {
579 b_size = aead_sizes;
580 do {
581 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200582 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800583
584 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
585 pr_err("template (%u) too big for tvmem (%lu)\n",
586 *keysize + *b_size,
587 TVMEMSIZE * PAGE_SIZE);
588 goto out;
589 }
590
591 key = tvmem[0];
592 for (j = 0; j < tcount; j++) {
593 if (template[j].klen == *keysize) {
594 key = template[j].key;
595 break;
596 }
597 }
598 ret = crypto_aead_setkey(tfm, key, *keysize);
599 ret = crypto_aead_setauthsize(tfm, authsize);
600
601 iv_len = crypto_aead_ivsize(tfm);
602 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200603 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800604
605 crypto_aead_clear_flags(tfm, ~0);
606 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
607 i, *keysize * 8, *b_size);
608
609
610 memset(tvmem[0], 0xff, PAGE_SIZE);
611
612 if (ret) {
613 pr_err("setkey() failed flags=%x\n",
614 crypto_aead_get_flags(tfm));
615 goto out;
616 }
617
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200618 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
619 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800620
Herbert Xu31267272015-06-17 14:05:26 +0800621 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200622 *b_size + (enc ? authsize : 0), assoc,
623 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800624
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000625 aead_request_set_ad(req, aad_size);
626
627 if (!enc) {
628
629 /*
630 * For decryption we need a proper auth so
631 * we do the encryption path once with buffers
632 * reversed (input <-> output) to calculate it
633 */
634 aead_request_set_crypt(req, sgout, sg,
635 *b_size, iv);
636 ret = do_one_aead_op(req,
637 crypto_aead_encrypt(req));
638
639 if (ret) {
640 pr_err("calculating auth failed failed (%d)\n",
641 ret);
642 break;
643 }
644 }
645
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300646 aead_request_set_crypt(req, sg, sgout,
647 *b_size + (enc ? 0 : authsize),
648 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800649
Mark Rustad3e3dc252014-07-25 02:53:38 -0700650 if (secs)
651 ret = test_aead_jiffies(req, enc, *b_size,
652 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800653 else
654 ret = test_aead_cycles(req, enc, *b_size);
655
656 if (ret) {
657 pr_err("%s() failed return code=%d\n", e, ret);
658 break;
659 }
660 b_size++;
661 i++;
662 } while (*b_size);
663 keysize++;
664 } while (*keysize);
665
666out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200667 aead_request_free(req);
668out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800669 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200670out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800671 kfree(sg);
672out_nosg:
673 testmgr_free_buf(xoutbuf);
674out_nooutbuf:
675 testmgr_free_buf(axbuf);
676out_noaxbuf:
677 testmgr_free_buf(xbuf);
678out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200679 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800680}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800681
David S. Millerbeb63da2010-05-19 14:11:21 +1000682static void test_hash_sg_init(struct scatterlist *sg)
683{
684 int i;
685
686 sg_init_table(sg, TVMEMSIZE);
687 for (i = 0; i < TVMEMSIZE; i++) {
688 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
689 memset(tvmem[i], 0xff, PAGE_SIZE);
690 }
691}
692
David S. Millerbeb63da2010-05-19 14:11:21 +1000693static inline int do_one_ahash_op(struct ahash_request *req, int ret)
694{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100695 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000696
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100697 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000698}
699
Herbert Xu72259de2016-06-28 20:33:52 +0800700struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000701 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800702 char result[64];
703 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100704 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800705 char *xbuf[XBUFSIZE];
706};
Megha Dey087bcd22016-06-23 18:40:47 -0700707
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000708static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb)
709{
710 int i, rc[num_mb], err = 0;
711
712 /* Fire up a bunch of concurrent requests */
713 for (i = 0; i < num_mb; i++)
714 rc[i] = crypto_ahash_digest(data[i].req);
715
716 /* Wait for all requests to finish */
717 for (i = 0; i < num_mb; i++) {
718 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
719
720 if (rc[i]) {
721 pr_info("concurrent request %d error %d\n", i, rc[i]);
722 err = rc[i];
723 }
724 }
725
726 return err;
727}
728
729static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
730 int secs, u32 num_mb)
731{
732 unsigned long start, end;
733 int bcount;
734 int ret;
735
736 for (start = jiffies, end = start + secs * HZ, bcount = 0;
737 time_before(jiffies, end); bcount++) {
738 ret = do_mult_ahash_op(data, num_mb);
739 if (ret)
740 return ret;
741 }
742
743 pr_cont("%d operations in %d seconds (%ld bytes)\n",
744 bcount * num_mb, secs, (long)bcount * blen * num_mb);
745 return 0;
746}
747
748static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
749 u32 num_mb)
750{
751 unsigned long cycles = 0;
752 int ret = 0;
753 int i;
754
755 /* Warm-up run. */
756 for (i = 0; i < 4; i++) {
757 ret = do_mult_ahash_op(data, num_mb);
758 if (ret)
759 goto out;
760 }
761
762 /* The real thing. */
763 for (i = 0; i < 8; i++) {
764 cycles_t start, end;
765
766 start = get_cycles();
767 ret = do_mult_ahash_op(data, num_mb);
768 end = get_cycles();
769
770 if (ret)
771 goto out;
772
773 cycles += end - start;
774 }
775
776out:
777 if (ret == 0)
778 pr_cont("1 operation in %lu cycles (%d bytes)\n",
779 (cycles + 4) / (8 * num_mb), blen);
780
781 return ret;
782}
783
784static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000785 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700786{
Herbert Xu72259de2016-06-28 20:33:52 +0800787 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700788 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800789 unsigned int i, j, k;
790 int ret;
791
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000792 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800793 if (!data)
794 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700795
796 tfm = crypto_alloc_ahash(algo, 0, 0);
797 if (IS_ERR(tfm)) {
798 pr_err("failed to load transform for %s: %ld\n",
799 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800800 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700801 }
Herbert Xu72259de2016-06-28 20:33:52 +0800802
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000803 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800804 if (testmgr_alloc_buf(data[i].xbuf))
805 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700806
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100807 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700808
Herbert Xu72259de2016-06-28 20:33:52 +0800809 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
810 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200811 pr_err("alg: hash: Failed to allocate request for %s\n",
812 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800813 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700814 }
Megha Dey087bcd22016-06-23 18:40:47 -0700815
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100816 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
817 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000818
819 sg_init_table(data[i].sg, XBUFSIZE);
820 for (j = 0; j < XBUFSIZE; j++) {
821 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
822 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
823 }
Megha Dey087bcd22016-06-23 18:40:47 -0700824 }
825
Herbert Xu72259de2016-06-28 20:33:52 +0800826 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
827 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700828
829 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800830 /* For some reason this only tests digests. */
831 if (speed[i].blen != speed[i].plen)
832 continue;
833
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000834 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200835 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000836 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200837 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700838 }
839
840 if (speed[i].klen)
841 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
842
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000843 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800844 ahash_request_set_crypt(data[k].req, data[k].sg,
845 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700846
Herbert Xu72259de2016-06-28 20:33:52 +0800847 pr_info("test%3u "
848 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700849 i, speed[i].blen, speed[i].plen,
850 speed[i].blen / speed[i].plen);
851
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000852 if (secs)
853 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
854 num_mb);
855 else
856 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Herbert Xu72259de2016-06-28 20:33:52 +0800857
Herbert Xu72259de2016-06-28 20:33:52 +0800858
859 if (ret) {
860 pr_err("At least one hashing failed ret=%d\n", ret);
861 break;
862 }
Megha Dey087bcd22016-06-23 18:40:47 -0700863 }
Megha Dey087bcd22016-06-23 18:40:47 -0700864
865out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000866 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800867 ahash_request_free(data[k].req);
868
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000869 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800870 testmgr_free_buf(data[k].xbuf);
871
872 crypto_free_ahash(tfm);
873
874free_data:
875 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700876}
877
David S. Millerbeb63da2010-05-19 14:11:21 +1000878static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700879 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000880{
881 unsigned long start, end;
882 int bcount;
883 int ret;
884
Mark Rustad3e3dc252014-07-25 02:53:38 -0700885 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000886 time_before(jiffies, end); bcount++) {
887 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
888 if (ret)
889 return ret;
890 }
891
892 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700893 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000894
895 return 0;
896}
897
898static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700899 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000900{
901 unsigned long start, end;
902 int bcount, pcount;
903 int ret;
904
905 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700906 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000907
Mark Rustad3e3dc252014-07-25 02:53:38 -0700908 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000909 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800910 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000911 if (ret)
912 return ret;
913 for (pcount = 0; pcount < blen; pcount += plen) {
914 ret = do_one_ahash_op(req, crypto_ahash_update(req));
915 if (ret)
916 return ret;
917 }
918 /* we assume there is enough space in 'out' for the result */
919 ret = do_one_ahash_op(req, crypto_ahash_final(req));
920 if (ret)
921 return ret;
922 }
923
924 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700925 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000926
927 return 0;
928}
929
930static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
931 char *out)
932{
933 unsigned long cycles = 0;
934 int ret, i;
935
936 /* Warm-up run. */
937 for (i = 0; i < 4; i++) {
938 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
939 if (ret)
940 goto out;
941 }
942
943 /* The real thing. */
944 for (i = 0; i < 8; i++) {
945 cycles_t start, end;
946
947 start = get_cycles();
948
949 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
950 if (ret)
951 goto out;
952
953 end = get_cycles();
954
955 cycles += end - start;
956 }
957
958out:
959 if (ret)
960 return ret;
961
962 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
963 cycles / 8, cycles / (8 * blen));
964
965 return 0;
966}
967
968static int test_ahash_cycles(struct ahash_request *req, int blen,
969 int plen, char *out)
970{
971 unsigned long cycles = 0;
972 int i, pcount, ret;
973
974 if (plen == blen)
975 return test_ahash_cycles_digest(req, blen, out);
976
977 /* Warm-up run. */
978 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800979 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000980 if (ret)
981 goto out;
982 for (pcount = 0; pcount < blen; pcount += plen) {
983 ret = do_one_ahash_op(req, crypto_ahash_update(req));
984 if (ret)
985 goto out;
986 }
987 ret = do_one_ahash_op(req, crypto_ahash_final(req));
988 if (ret)
989 goto out;
990 }
991
992 /* The real thing. */
993 for (i = 0; i < 8; i++) {
994 cycles_t start, end;
995
996 start = get_cycles();
997
Herbert Xu43a96072015-04-22 11:02:27 +0800998 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000999 if (ret)
1000 goto out;
1001 for (pcount = 0; pcount < blen; pcount += plen) {
1002 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1003 if (ret)
1004 goto out;
1005 }
1006 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1007 if (ret)
1008 goto out;
1009
1010 end = get_cycles();
1011
1012 cycles += end - start;
1013 }
1014
1015out:
1016 if (ret)
1017 return ret;
1018
1019 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1020 cycles / 8, cycles / (8 * blen));
1021
1022 return 0;
1023}
1024
Herbert Xu06605112016-02-01 21:36:49 +08001025static void test_ahash_speed_common(const char *algo, unsigned int secs,
1026 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001027{
1028 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001029 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001030 struct ahash_request *req;
1031 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001032 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001033 int i, ret;
1034
Herbert Xu06605112016-02-01 21:36:49 +08001035 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001036 if (IS_ERR(tfm)) {
1037 pr_err("failed to load transform for %s: %ld\n",
1038 algo, PTR_ERR(tfm));
1039 return;
1040 }
1041
Luca Clementi263a8df2014-06-25 22:57:42 -07001042 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1043 get_driver_name(crypto_ahash, tfm));
1044
Horia Geant?f074f7b2015-08-27 18:38:36 +03001045 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1046 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1047 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001048 goto out;
1049 }
1050
1051 test_hash_sg_init(sg);
1052 req = ahash_request_alloc(tfm, GFP_KERNEL);
1053 if (!req) {
1054 pr_err("ahash request allocation failure\n");
1055 goto out;
1056 }
1057
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001058 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001059 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001060 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001061
Horia Geant?f074f7b2015-08-27 18:38:36 +03001062 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1063 if (!output)
1064 goto out_nomem;
1065
David S. Millerbeb63da2010-05-19 14:11:21 +10001066 for (i = 0; speed[i].blen != 0; i++) {
1067 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1068 pr_err("template (%u) too big for tvmem (%lu)\n",
1069 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1070 break;
1071 }
1072
1073 pr_info("test%3u "
1074 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1075 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1076
1077 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1078
Mark Rustad3e3dc252014-07-25 02:53:38 -07001079 if (secs)
David S. Millerbeb63da2010-05-19 14:11:21 +10001080 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001081 speed[i].plen, output, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +10001082 else
1083 ret = test_ahash_cycles(req, speed[i].blen,
1084 speed[i].plen, output);
1085
1086 if (ret) {
1087 pr_err("hashing failed ret=%d\n", ret);
1088 break;
1089 }
1090 }
1091
Horia Geant?f074f7b2015-08-27 18:38:36 +03001092 kfree(output);
1093
1094out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001095 ahash_request_free(req);
1096
1097out:
1098 crypto_free_ahash(tfm);
1099}
1100
Herbert Xu06605112016-02-01 21:36:49 +08001101static void test_ahash_speed(const char *algo, unsigned int secs,
1102 struct hash_speed *speed)
1103{
1104 return test_ahash_speed_common(algo, secs, speed, 0);
1105}
1106
1107static void test_hash_speed(const char *algo, unsigned int secs,
1108 struct hash_speed *speed)
1109{
1110 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1111}
1112
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001113struct test_mb_skcipher_data {
1114 struct scatterlist sg[XBUFSIZE];
1115 struct skcipher_request *req;
1116 struct crypto_wait wait;
1117 char *xbuf[XBUFSIZE];
1118};
1119
1120static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
1121 u32 num_mb)
1122{
1123 int i, rc[num_mb], err = 0;
1124
1125 /* Fire up a bunch of concurrent requests */
1126 for (i = 0; i < num_mb; i++) {
1127 if (enc == ENCRYPT)
1128 rc[i] = crypto_skcipher_encrypt(data[i].req);
1129 else
1130 rc[i] = crypto_skcipher_decrypt(data[i].req);
1131 }
1132
1133 /* Wait for all requests to finish */
1134 for (i = 0; i < num_mb; i++) {
1135 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1136
1137 if (rc[i]) {
1138 pr_info("concurrent request %d error %d\n", i, rc[i]);
1139 err = rc[i];
1140 }
1141 }
1142
1143 return err;
1144}
1145
1146static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1147 int blen, int secs, u32 num_mb)
1148{
1149 unsigned long start, end;
1150 int bcount;
1151 int ret;
1152
1153 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1154 time_before(jiffies, end); bcount++) {
1155 ret = do_mult_acipher_op(data, enc, num_mb);
1156 if (ret)
1157 return ret;
1158 }
1159
1160 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1161 bcount * num_mb, secs, (long)bcount * blen * num_mb);
1162 return 0;
1163}
1164
1165static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1166 int blen, u32 num_mb)
1167{
1168 unsigned long cycles = 0;
1169 int ret = 0;
1170 int i;
1171
1172 /* Warm-up run. */
1173 for (i = 0; i < 4; i++) {
1174 ret = do_mult_acipher_op(data, enc, num_mb);
1175 if (ret)
1176 goto out;
1177 }
1178
1179 /* The real thing. */
1180 for (i = 0; i < 8; i++) {
1181 cycles_t start, end;
1182
1183 start = get_cycles();
1184 ret = do_mult_acipher_op(data, enc, num_mb);
1185 end = get_cycles();
1186
1187 if (ret)
1188 goto out;
1189
1190 cycles += end - start;
1191 }
1192
1193out:
1194 if (ret == 0)
1195 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1196 (cycles + 4) / (8 * num_mb), blen);
1197
1198 return ret;
1199}
1200
1201static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1202 struct cipher_speed_template *template,
1203 unsigned int tcount, u8 *keysize, u32 num_mb)
1204{
1205 struct test_mb_skcipher_data *data;
1206 struct crypto_skcipher *tfm;
1207 unsigned int i, j, iv_len;
1208 const char *key;
1209 const char *e;
1210 u32 *b_size;
1211 char iv[128];
1212 int ret;
1213
1214 if (enc == ENCRYPT)
1215 e = "encryption";
1216 else
1217 e = "decryption";
1218
1219 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1220 if (!data)
1221 return;
1222
1223 tfm = crypto_alloc_skcipher(algo, 0, 0);
1224 if (IS_ERR(tfm)) {
1225 pr_err("failed to load transform for %s: %ld\n",
1226 algo, PTR_ERR(tfm));
1227 goto out_free_data;
1228 }
1229
1230 for (i = 0; i < num_mb; ++i)
1231 if (testmgr_alloc_buf(data[i].xbuf)) {
1232 while (i--)
1233 testmgr_free_buf(data[i].xbuf);
1234 goto out_free_tfm;
1235 }
1236
1237
1238 for (i = 0; i < num_mb; ++i)
1239 if (testmgr_alloc_buf(data[i].xbuf)) {
1240 while (i--)
1241 testmgr_free_buf(data[i].xbuf);
1242 goto out_free_tfm;
1243 }
1244
1245
1246 for (i = 0; i < num_mb; ++i) {
1247 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1248 if (!data[i].req) {
1249 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1250 algo);
1251 while (i--)
1252 skcipher_request_free(data[i].req);
1253 goto out_free_xbuf;
1254 }
1255 }
1256
1257 for (i = 0; i < num_mb; ++i) {
1258 skcipher_request_set_callback(data[i].req,
1259 CRYPTO_TFM_REQ_MAY_BACKLOG,
1260 crypto_req_done, &data[i].wait);
1261 crypto_init_wait(&data[i].wait);
1262 }
1263
1264 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1265 get_driver_name(crypto_skcipher, tfm), e);
1266
1267 i = 0;
1268 do {
1269 b_size = block_sizes;
1270 do {
1271 if (*b_size > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001272 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001273 *b_size, XBUFSIZE * PAGE_SIZE);
1274 goto out;
1275 }
1276
1277 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1278 *keysize * 8, *b_size);
1279
1280 /* Set up tfm global state, i.e. the key */
1281
1282 memset(tvmem[0], 0xff, PAGE_SIZE);
1283 key = tvmem[0];
1284 for (j = 0; j < tcount; j++) {
1285 if (template[j].klen == *keysize) {
1286 key = template[j].key;
1287 break;
1288 }
1289 }
1290
1291 crypto_skcipher_clear_flags(tfm, ~0);
1292
1293 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1294 if (ret) {
1295 pr_err("setkey() failed flags=%x\n",
1296 crypto_skcipher_get_flags(tfm));
1297 goto out;
1298 }
1299
1300 iv_len = crypto_skcipher_ivsize(tfm);
1301 if (iv_len)
1302 memset(&iv, 0xff, iv_len);
1303
1304 /* Now setup per request stuff, i.e. buffers */
1305
1306 for (j = 0; j < num_mb; ++j) {
1307 struct test_mb_skcipher_data *cur = &data[j];
1308 unsigned int k = *b_size;
1309 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1310 unsigned int p = 0;
1311
1312 sg_init_table(cur->sg, pages);
1313
1314 while (k > PAGE_SIZE) {
1315 sg_set_buf(cur->sg + p, cur->xbuf[p],
1316 PAGE_SIZE);
1317 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1318 p++;
1319 k -= PAGE_SIZE;
1320 }
1321
1322 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1323 memset(cur->xbuf[p], 0xff, k);
1324
1325 skcipher_request_set_crypt(cur->req, cur->sg,
1326 cur->sg, *b_size,
1327 iv);
1328 }
1329
1330 if (secs)
1331 ret = test_mb_acipher_jiffies(data, enc,
1332 *b_size, secs,
1333 num_mb);
1334 else
1335 ret = test_mb_acipher_cycles(data, enc,
1336 *b_size, num_mb);
1337
1338 if (ret) {
1339 pr_err("%s() failed flags=%x\n", e,
1340 crypto_skcipher_get_flags(tfm));
1341 break;
1342 }
1343 b_size++;
1344 i++;
1345 } while (*b_size);
1346 keysize++;
1347 } while (*keysize);
1348
1349out:
1350 for (i = 0; i < num_mb; ++i)
1351 skcipher_request_free(data[i].req);
1352out_free_xbuf:
1353 for (i = 0; i < num_mb; ++i)
1354 testmgr_free_buf(data[i].xbuf);
1355out_free_tfm:
1356 crypto_free_skcipher(tfm);
1357out_free_data:
1358 kfree(data);
1359}
1360
Herbert Xu7166e582016-06-29 18:03:50 +08001361static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001362{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001363 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001364
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001365 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001366}
1367
Herbert Xu7166e582016-06-29 18:03:50 +08001368static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001369 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001370{
1371 unsigned long start, end;
1372 int bcount;
1373 int ret;
1374
Mark Rustad3e3dc252014-07-25 02:53:38 -07001375 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001376 time_before(jiffies, end); bcount++) {
1377 if (enc)
1378 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001379 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001380 else
1381 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001382 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001383
1384 if (ret)
1385 return ret;
1386 }
1387
1388 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001389 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001390 return 0;
1391}
1392
Herbert Xu7166e582016-06-29 18:03:50 +08001393static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001394 int blen)
1395{
1396 unsigned long cycles = 0;
1397 int ret = 0;
1398 int i;
1399
1400 /* Warm-up run. */
1401 for (i = 0; i < 4; i++) {
1402 if (enc)
1403 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001404 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001405 else
1406 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001407 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001408
1409 if (ret)
1410 goto out;
1411 }
1412
1413 /* The real thing. */
1414 for (i = 0; i < 8; i++) {
1415 cycles_t start, end;
1416
1417 start = get_cycles();
1418 if (enc)
1419 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001420 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001421 else
1422 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001423 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001424 end = get_cycles();
1425
1426 if (ret)
1427 goto out;
1428
1429 cycles += end - start;
1430 }
1431
1432out:
1433 if (ret == 0)
1434 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1435 (cycles + 4) / 8, blen);
1436
1437 return ret;
1438}
1439
Herbert Xu7166e582016-06-29 18:03:50 +08001440static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1441 struct cipher_speed_template *template,
1442 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001443{
Nicolas Royerde1975332012-07-01 19:19:47 +02001444 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001445 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001446 const char *key;
1447 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001448 struct skcipher_request *req;
1449 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001450 const char *e;
1451 u32 *b_size;
1452
1453 if (enc == ENCRYPT)
1454 e = "encryption";
1455 else
1456 e = "decryption";
1457
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001458 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001459
Herbert Xu7166e582016-06-29 18:03:50 +08001460 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001461
1462 if (IS_ERR(tfm)) {
1463 pr_err("failed to load transform for %s: %ld\n", algo,
1464 PTR_ERR(tfm));
1465 return;
1466 }
1467
Luca Clementi263a8df2014-06-25 22:57:42 -07001468 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
Herbert Xu7166e582016-06-29 18:03:50 +08001469 get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001470
Herbert Xu7166e582016-06-29 18:03:50 +08001471 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001472 if (!req) {
1473 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1474 algo);
1475 goto out;
1476 }
1477
Herbert Xu7166e582016-06-29 18:03:50 +08001478 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001479 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001480
1481 i = 0;
1482 do {
1483 b_size = block_sizes;
1484
1485 do {
1486 struct scatterlist sg[TVMEMSIZE];
1487
1488 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1489 pr_err("template (%u) too big for "
1490 "tvmem (%lu)\n", *keysize + *b_size,
1491 TVMEMSIZE * PAGE_SIZE);
1492 goto out_free_req;
1493 }
1494
1495 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1496 *keysize * 8, *b_size);
1497
1498 memset(tvmem[0], 0xff, PAGE_SIZE);
1499
1500 /* set key, plain text and IV */
1501 key = tvmem[0];
1502 for (j = 0; j < tcount; j++) {
1503 if (template[j].klen == *keysize) {
1504 key = template[j].key;
1505 break;
1506 }
1507 }
1508
Herbert Xu7166e582016-06-29 18:03:50 +08001509 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001510
Herbert Xu7166e582016-06-29 18:03:50 +08001511 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001512 if (ret) {
1513 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001514 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001515 goto out_free_req;
1516 }
1517
Nicolas Royerde1975332012-07-01 19:19:47 +02001518 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001519 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1520
Nicolas Royerde1975332012-07-01 19:19:47 +02001521 if (k > PAGE_SIZE) {
1522 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001523 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001524 k -= PAGE_SIZE;
1525 j = 1;
1526 while (k > PAGE_SIZE) {
1527 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1528 memset(tvmem[j], 0xff, PAGE_SIZE);
1529 j++;
1530 k -= PAGE_SIZE;
1531 }
1532 sg_set_buf(sg + j, tvmem[j], k);
1533 memset(tvmem[j], 0xff, k);
1534 } else {
1535 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001536 }
1537
Herbert Xu7166e582016-06-29 18:03:50 +08001538 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001539 if (iv_len)
1540 memset(&iv, 0xff, iv_len);
1541
Herbert Xu7166e582016-06-29 18:03:50 +08001542 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001543
Mark Rustad3e3dc252014-07-25 02:53:38 -07001544 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001545 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001546 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001547 else
1548 ret = test_acipher_cycles(req, enc,
1549 *b_size);
1550
1551 if (ret) {
1552 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001553 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001554 break;
1555 }
1556 b_size++;
1557 i++;
1558 } while (*b_size);
1559 keysize++;
1560 } while (*keysize);
1561
1562out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001563 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001564out:
Herbert Xu7166e582016-06-29 18:03:50 +08001565 crypto_free_skcipher(tfm);
1566}
1567
1568static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1569 struct cipher_speed_template *template,
1570 unsigned int tcount, u8 *keysize)
1571{
1572 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1573 true);
1574}
1575
1576static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1577 struct cipher_speed_template *template,
1578 unsigned int tcount, u8 *keysize)
1579{
1580 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1581 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001582}
1583
Herbert Xuef2736f2005-06-22 13:26:03 -07001584static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001585{
1586 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001587
Linus Torvalds1da177e2005-04-16 15:20:36 -07001588 while (*name) {
1589 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001590 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001591 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001592 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001593 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001594}
1595
Herbert Xu01b32322008-07-31 15:41:55 +08001596static inline int tcrypt_test(const char *alg)
1597{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001598 int ret;
1599
Rabin Vincent76512f22017-01-18 14:54:05 +01001600 pr_debug("testing %s\n", alg);
1601
Jarod Wilson4e033a62009-05-27 15:10:21 +10001602 ret = alg_test(alg, alg, 0, 0);
1603 /* non-fips algs return -EINVAL in fips mode */
1604 if (fips_enabled && ret == -EINVAL)
1605 ret = 0;
1606 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001607}
1608
Herbert Xu86068132014-12-04 16:43:29 +08001609static int do_test(const char *alg, u32 type, u32 mask, int m)
Herbert Xu01b32322008-07-31 15:41:55 +08001610{
1611 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001612 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001613
1614 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001615 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001616 if (alg) {
1617 if (!crypto_has_alg(alg, type,
1618 mask ?: CRYPTO_ALG_TYPE_MASK))
1619 ret = -ENOENT;
1620 break;
1621 }
1622
Herbert Xu01b32322008-07-31 15:41:55 +08001623 for (i = 1; i < 200; i++)
Herbert Xu86068132014-12-04 16:43:29 +08001624 ret += do_test(NULL, 0, 0, i);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001625 break;
1626
1627 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001628 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001629 break;
1630
1631 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001632 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001633 break;
1634
1635 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001636 ret += tcrypt_test("ecb(des)");
1637 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001638 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001639 break;
1640
1641 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001642 ret += tcrypt_test("ecb(des3_ede)");
1643 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001644 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001645 break;
1646
1647 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001648 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001649 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001650
Linus Torvalds1da177e2005-04-16 15:20:36 -07001651 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001652 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001653 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001654
Linus Torvalds1da177e2005-04-16 15:20:36 -07001655 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001656 ret += tcrypt_test("ecb(blowfish)");
1657 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001658 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001659 break;
1660
1661 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001662 ret += tcrypt_test("ecb(twofish)");
1663 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001664 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001665 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001666 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001667 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001668
Linus Torvalds1da177e2005-04-16 15:20:36 -07001669 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001670 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001671 ret += tcrypt_test("cbc(serpent)");
1672 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001673 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001674 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001675 break;
1676
1677 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001678 ret += tcrypt_test("ecb(aes)");
1679 ret += tcrypt_test("cbc(aes)");
1680 ret += tcrypt_test("lrw(aes)");
1681 ret += tcrypt_test("xts(aes)");
1682 ret += tcrypt_test("ctr(aes)");
1683 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001684 break;
1685
1686 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001687 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001688 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001689
Linus Torvalds1da177e2005-04-16 15:20:36 -07001690 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001691 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001692 break;
1693
1694 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001695 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001696 break;
1697
1698 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001699 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001700 ret += tcrypt_test("cbc(cast5)");
1701 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001702 break;
1703
1704 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001705 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001706 ret += tcrypt_test("cbc(cast6)");
1707 ret += tcrypt_test("ctr(cast6)");
1708 ret += tcrypt_test("lrw(cast6)");
1709 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001710 break;
1711
1712 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001713 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001714 break;
1715
1716 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001717 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001718 break;
1719
1720 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001721 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001722 break;
1723
1724 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001725 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001726 break;
1727
1728 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001729 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001730 break;
1731
1732 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001733 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001734 break;
1735
1736 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001737 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001738 break;
1739
1740 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001741 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001742 break;
1743
1744 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001745 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001746 break;
1747
1748 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001749 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001750 break;
1751
1752 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001753 ret += tcrypt_test("ecb(anubis)");
1754 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001755 break;
1756
1757 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001758 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001759 break;
1760
1761 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001762 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001763 break;
1764
1765 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001766 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001767 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001768
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001769 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001770 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001771 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001772
David Howells90831632006-12-16 12:13:14 +11001773 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001774 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001775 break;
1776
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001777 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001778 ret += tcrypt_test("ecb(camellia)");
1779 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001780 ret += tcrypt_test("ctr(camellia)");
1781 ret += tcrypt_test("lrw(camellia)");
1782 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001783 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001784
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001785 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001786 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001787 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001788
Tan Swee Heng2407d602007-11-23 19:45:00 +08001789 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001790 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001791 break;
1792
Herbert Xu8df213d2007-12-02 14:55:47 +11001793 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001794 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001795 break;
1796
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001797 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001798 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001799 break;
1800
Joy Latten93cc74e2007-12-12 20:24:22 +08001801 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001802 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001803 break;
1804
Kevin Coffman76cb9522008-03-24 21:26:16 +08001805 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001806 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001807 break;
1808
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001809 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001810 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001811 break;
1812
1813 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001814 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001815 break;
1816
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001817 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001818 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001819 break;
1820
1821 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001822 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001823 break;
1824
1825 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001826 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001827 break;
1828
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001829 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001830 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001831 break;
1832
Jarod Wilson5d667322009-05-04 19:23:40 +08001833 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001834 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001835 break;
1836
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001837 case 46:
1838 ret += tcrypt_test("ghash");
1839 break;
1840
Herbert Xu684115212013-09-07 12:56:26 +10001841 case 47:
1842 ret += tcrypt_test("crct10dif");
1843 break;
1844
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301845 case 48:
1846 ret += tcrypt_test("sha3-224");
1847 break;
1848
1849 case 49:
1850 ret += tcrypt_test("sha3-256");
1851 break;
1852
1853 case 50:
1854 ret += tcrypt_test("sha3-384");
1855 break;
1856
1857 case 51:
1858 ret += tcrypt_test("sha3-512");
1859 break;
1860
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001861 case 52:
1862 ret += tcrypt_test("sm3");
1863 break;
1864
Linus Torvalds1da177e2005-04-16 15:20:36 -07001865 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001866 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001867 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001868
Linus Torvalds1da177e2005-04-16 15:20:36 -07001869 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001870 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001871 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001872
Linus Torvalds1da177e2005-04-16 15:20:36 -07001873 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001874 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001875 break;
1876
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001877 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001878 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001879 break;
1880
1881 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001882 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001883 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001884
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001885 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001886 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001887 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001888
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001889 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001890 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001891 break;
1892
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001893 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001894 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001895 break;
1896
1897 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001898 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001899 break;
1900
Shane Wangf1939f72009-09-02 20:05:22 +10001901 case 109:
1902 ret += tcrypt_test("vmac(aes)");
1903 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001904
Sonic Zhanga482b082012-05-25 17:54:13 +08001905 case 110:
1906 ret += tcrypt_test("hmac(crc32)");
1907 break;
Shane Wangf1939f72009-09-02 20:05:22 +10001908
raveendra padasalagi98eca722016-07-01 11:16:54 +05301909 case 111:
1910 ret += tcrypt_test("hmac(sha3-224)");
1911 break;
1912
1913 case 112:
1914 ret += tcrypt_test("hmac(sha3-256)");
1915 break;
1916
1917 case 113:
1918 ret += tcrypt_test("hmac(sha3-384)");
1919 break;
1920
1921 case 114:
1922 ret += tcrypt_test("hmac(sha3-512)");
1923 break;
1924
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001925 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001926 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001927 break;
1928
Adrian Hoban69435b92010-11-04 15:02:04 -04001929 case 151:
1930 ret += tcrypt_test("rfc4106(gcm(aes))");
1931 break;
1932
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001933 case 152:
1934 ret += tcrypt_test("rfc4543(gcm(aes))");
1935 break;
1936
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001937 case 153:
1938 ret += tcrypt_test("cmac(aes)");
1939 break;
1940
1941 case 154:
1942 ret += tcrypt_test("cmac(des3_ede)");
1943 break;
1944
Horia Geantabbf9c892013-11-28 15:11:16 +02001945 case 155:
1946 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1947 break;
1948
Horia Geantabca4feb2014-03-14 17:46:51 +02001949 case 156:
1950 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1951 break;
1952
1953 case 157:
1954 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1955 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301956 case 181:
1957 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1958 break;
1959 case 182:
1960 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1961 break;
1962 case 183:
1963 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
1964 break;
1965 case 184:
1966 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
1967 break;
1968 case 185:
1969 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
1970 break;
1971 case 186:
1972 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
1973 break;
1974 case 187:
1975 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
1976 break;
1977 case 188:
1978 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
1979 break;
1980 case 189:
1981 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
1982 break;
1983 case 190:
1984 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1985 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00001986 case 191:
1987 ret += tcrypt_test("ecb(sm4)");
1988 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07001989 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10001990 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001991 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001992 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001993 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001994 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001995 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001996 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001997 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11001998 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001999 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002000 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002001 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002002 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002003 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002004 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002005 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002006 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2007 speed_template_16_24_32);
2008 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2009 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002010 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2011 speed_template_16_24_32);
2012 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2013 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002014 break;
2015
2016 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002017 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002018 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002019 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002020 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002021 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002022 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002023 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002024 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002025 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002026 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002027 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002028 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002029 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2030 des3_speed_template, DES3_SPEED_VECTORS,
2031 speed_template_24);
2032 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2033 des3_speed_template, DES3_SPEED_VECTORS,
2034 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002035 break;
2036
2037 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002038 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002039 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002040 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002041 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002042 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002043 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002044 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002045 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002046 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2047 speed_template_16_24_32);
2048 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2049 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002050 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2051 speed_template_32_40_48);
2052 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2053 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002054 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2055 speed_template_32_48_64);
2056 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2057 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002058 break;
2059
2060 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002061 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002062 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002063 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002064 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002065 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002066 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002067 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002068 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002069 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2070 speed_template_8_32);
2071 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2072 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002073 break;
2074
2075 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002076 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002077 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002078 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002079 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002080 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002081 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002082 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002083 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002084 break;
2085
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002086 case 205:
2087 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002088 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002089 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002090 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002091 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002092 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002093 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002094 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002095 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2096 speed_template_16_24_32);
2097 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2098 speed_template_16_24_32);
2099 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2100 speed_template_32_40_48);
2101 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2102 speed_template_32_40_48);
2103 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2104 speed_template_32_48_64);
2105 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2106 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002107 break;
2108
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002109 case 206:
2110 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002111 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002112 break;
2113
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002114 case 207:
2115 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2116 speed_template_16_32);
2117 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2118 speed_template_16_32);
2119 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2120 speed_template_16_32);
2121 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2122 speed_template_16_32);
2123 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2124 speed_template_16_32);
2125 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2126 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002127 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2128 speed_template_32_48);
2129 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2130 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002131 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2132 speed_template_32_64);
2133 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2134 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002135 break;
2136
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002137 case 208:
2138 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2139 speed_template_8);
2140 break;
2141
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002142 case 209:
2143 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2144 speed_template_8_16);
2145 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2146 speed_template_8_16);
2147 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2148 speed_template_8_16);
2149 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2150 speed_template_8_16);
2151 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2152 speed_template_8_16);
2153 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2154 speed_template_8_16);
2155 break;
2156
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002157 case 210:
2158 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2159 speed_template_16_32);
2160 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2161 speed_template_16_32);
2162 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2163 speed_template_16_32);
2164 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2165 speed_template_16_32);
2166 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2167 speed_template_16_32);
2168 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2169 speed_template_16_32);
2170 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2171 speed_template_32_48);
2172 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2173 speed_template_32_48);
2174 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2175 speed_template_32_64);
2176 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2177 speed_template_32_64);
2178 break;
2179
Tim Chen53f52d72013-12-11 14:28:47 -08002180 case 211:
2181 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002182 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302183 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002184 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002185 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2186 NULL, 0, 16, 16, aead_speed_template_20);
2187 test_aead_speed("gcm(aes)", DECRYPT, sec,
2188 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002189 break;
2190
Herbert Xu4e4aab62015-06-17 14:04:21 +08002191 case 212:
2192 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002193 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002194 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2195 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002196 break;
2197
Martin Willi2dce0632015-07-16 19:13:59 +02002198 case 213:
2199 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2200 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002201 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2202 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002203 break;
2204
2205 case 214:
2206 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2207 speed_template_32);
2208 break;
2209
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002210 case 215:
2211 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2212 0, 16, 16, aead_speed_template_20, num_mb);
2213 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2214 speed_template_16_24_32, num_mb);
2215 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2216 0, 16, 16, aead_speed_template_20, num_mb);
2217 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2218 speed_template_16_24_32, num_mb);
2219 break;
2220
2221 case 216:
2222 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2223 16, 16, aead_speed_template_19, num_mb);
2224 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2225 16, 16, aead_speed_template_19, num_mb);
2226 break;
2227
2228 case 217:
2229 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2230 sec, NULL, 0, 16, 8, aead_speed_template_36,
2231 num_mb);
2232 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2233 sec, NULL, 0, 16, 8, aead_speed_template_36,
2234 num_mb);
2235 break;
2236
Michal Ludvige8057922006-05-30 22:04:19 +10002237 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002238 if (alg) {
2239 test_hash_speed(alg, sec, generic_hash_speed_template);
2240 break;
2241 }
Michal Ludvige8057922006-05-30 22:04:19 +10002242 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002243 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002244 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002245 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002246 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002247 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002248 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002249 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002250 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002251 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002252 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002253 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002254 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002255 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002256 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002257 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002258 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002259 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002260 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002261 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002262 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002263 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002264 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002265 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002266 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002267 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002268 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002269 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002270 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002271 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002272 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002273 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002274 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002275 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002276 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002277 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002278 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002279 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002280 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002281 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002282 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002283 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002284 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002285 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002286 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002287 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002288 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002289 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002290 /* fall through */
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08002291 case 313:
2292 test_hash_speed("sha224", sec, generic_hash_speed_template);
2293 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002294 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002295 case 314:
2296 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2297 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002298 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002299 case 315:
2300 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2301 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002302 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002303 case 316:
2304 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2305 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002306 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002307 case 317:
2308 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2309 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002310 /* fall through */
Huang Ying18bcc912010-03-10 18:30:32 +08002311 case 318:
2312 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
2313 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002314 /* fall through */
Tim Chene3899e42012-09-27 15:44:24 -07002315 case 319:
2316 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2317 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002318 /* fall through */
Herbert Xu684115212013-09-07 12:56:26 +10002319 case 320:
2320 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2321 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002322 /* fall through */
Martin Willi2dce0632015-07-16 19:13:59 +02002323 case 321:
2324 test_hash_speed("poly1305", sec, poly1305_speed_template);
2325 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002326 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302327 case 322:
2328 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2329 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002330 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302331 case 323:
2332 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2333 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002334 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302335 case 324:
2336 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2337 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002338 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302339 case 325:
2340 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2341 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002342 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002343 case 326:
2344 test_hash_speed("sm3", sec, generic_hash_speed_template);
2345 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002346 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002347 case 399:
2348 break;
2349
David S. Millerbeb63da2010-05-19 14:11:21 +10002350 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002351 if (alg) {
2352 test_ahash_speed(alg, sec, generic_hash_speed_template);
2353 break;
2354 }
David S. Millerbeb63da2010-05-19 14:11:21 +10002355 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002356 case 401:
2357 test_ahash_speed("md4", sec, generic_hash_speed_template);
2358 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002359 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002360 case 402:
2361 test_ahash_speed("md5", sec, generic_hash_speed_template);
2362 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002363 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002364 case 403:
2365 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2366 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002367 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002368 case 404:
2369 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2370 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002371 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002372 case 405:
2373 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2374 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002375 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002376 case 406:
2377 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2378 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002379 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002380 case 407:
2381 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2382 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002383 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002384 case 408:
2385 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2386 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002387 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002388 case 409:
2389 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2390 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002391 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002392 case 410:
2393 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2394 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002395 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002396 case 411:
2397 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2398 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002399 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002400 case 412:
2401 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2402 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002403 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002404 case 413:
2405 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2406 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002407 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002408 case 414:
2409 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2410 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002411 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002412 case 415:
2413 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2414 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002415 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002416 case 416:
2417 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2418 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002419 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002420 case 417:
2421 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2422 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002423 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302424 case 418:
2425 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2426 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002427 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302428 case 419:
2429 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2430 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002431 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302432 case 420:
2433 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2434 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002435 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302436 case 421:
2437 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2438 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002439 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002440 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002441 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2442 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002443 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002444 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002445 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002446 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2447 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002448 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002449 /* fall through */
Megha Dey14009c42016-06-27 10:20:09 -07002450 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002451 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2452 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002453 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002454 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002455 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002456 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2457 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002458 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002459 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002460 case 499:
2461 break;
2462
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002463 case 500:
2464 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2465 speed_template_16_24_32);
2466 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2467 speed_template_16_24_32);
2468 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2469 speed_template_16_24_32);
2470 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2471 speed_template_16_24_32);
2472 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2473 speed_template_32_40_48);
2474 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2475 speed_template_32_40_48);
2476 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002477 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002478 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002479 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002480 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2481 speed_template_16_24_32);
2482 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2483 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002484 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2485 speed_template_16_24_32);
2486 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2487 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002488 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2489 speed_template_16_24_32);
2490 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2491 speed_template_16_24_32);
2492 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2493 speed_template_16_24_32);
2494 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2495 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002496 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2497 speed_template_20_28_36);
2498 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2499 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002500 break;
2501
2502 case 501:
2503 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2504 des3_speed_template, DES3_SPEED_VECTORS,
2505 speed_template_24);
2506 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2507 des3_speed_template, DES3_SPEED_VECTORS,
2508 speed_template_24);
2509 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2510 des3_speed_template, DES3_SPEED_VECTORS,
2511 speed_template_24);
2512 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2513 des3_speed_template, DES3_SPEED_VECTORS,
2514 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002515 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2516 des3_speed_template, DES3_SPEED_VECTORS,
2517 speed_template_24);
2518 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2519 des3_speed_template, DES3_SPEED_VECTORS,
2520 speed_template_24);
2521 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2522 des3_speed_template, DES3_SPEED_VECTORS,
2523 speed_template_24);
2524 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2525 des3_speed_template, DES3_SPEED_VECTORS,
2526 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002527 break;
2528
2529 case 502:
2530 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2531 speed_template_8);
2532 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2533 speed_template_8);
2534 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2535 speed_template_8);
2536 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2537 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002538 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2539 speed_template_8);
2540 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2541 speed_template_8);
2542 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2543 speed_template_8);
2544 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2545 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002546 break;
2547
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002548 case 503:
2549 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2550 speed_template_16_32);
2551 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2552 speed_template_16_32);
2553 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2554 speed_template_16_32);
2555 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2556 speed_template_16_32);
2557 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2558 speed_template_16_32);
2559 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2560 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002561 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2562 speed_template_32_48);
2563 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2564 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002565 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2566 speed_template_32_64);
2567 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2568 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002569 break;
2570
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002571 case 504:
2572 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2573 speed_template_16_24_32);
2574 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2575 speed_template_16_24_32);
2576 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2577 speed_template_16_24_32);
2578 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2579 speed_template_16_24_32);
2580 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2581 speed_template_16_24_32);
2582 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2583 speed_template_16_24_32);
2584 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2585 speed_template_32_40_48);
2586 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2587 speed_template_32_40_48);
2588 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2589 speed_template_32_48_64);
2590 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2591 speed_template_32_48_64);
2592 break;
2593
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002594 case 505:
2595 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2596 speed_template_8);
2597 break;
2598
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002599 case 506:
2600 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2601 speed_template_8_16);
2602 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2603 speed_template_8_16);
2604 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2605 speed_template_8_16);
2606 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2607 speed_template_8_16);
2608 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2609 speed_template_8_16);
2610 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2611 speed_template_8_16);
2612 break;
2613
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002614 case 507:
2615 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2616 speed_template_16_32);
2617 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2618 speed_template_16_32);
2619 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2620 speed_template_16_32);
2621 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2622 speed_template_16_32);
2623 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2624 speed_template_16_32);
2625 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2626 speed_template_16_32);
2627 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2628 speed_template_32_48);
2629 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2630 speed_template_32_48);
2631 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2632 speed_template_32_64);
2633 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2634 speed_template_32_64);
2635 break;
2636
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002637 case 508:
2638 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2639 speed_template_16_32);
2640 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2641 speed_template_16_32);
2642 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2643 speed_template_16_32);
2644 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2645 speed_template_16_32);
2646 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2647 speed_template_16_32);
2648 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2649 speed_template_16_32);
2650 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2651 speed_template_32_48);
2652 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2653 speed_template_32_48);
2654 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2655 speed_template_32_64);
2656 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2657 speed_template_32_64);
2658 break;
2659
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002660 case 509:
2661 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2662 speed_template_8_32);
2663 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2664 speed_template_8_32);
2665 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2666 speed_template_8_32);
2667 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2668 speed_template_8_32);
2669 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2670 speed_template_8_32);
2671 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2672 speed_template_8_32);
2673 break;
2674
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002675 case 600:
2676 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2677 speed_template_16_24_32, num_mb);
2678 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2679 speed_template_16_24_32, num_mb);
2680 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2681 speed_template_16_24_32, num_mb);
2682 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2683 speed_template_16_24_32, num_mb);
2684 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2685 speed_template_32_40_48, num_mb);
2686 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2687 speed_template_32_40_48, num_mb);
2688 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2689 speed_template_32_64, num_mb);
2690 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2691 speed_template_32_64, num_mb);
2692 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2693 speed_template_16_24_32, num_mb);
2694 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2695 speed_template_16_24_32, num_mb);
2696 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2697 speed_template_16_24_32, num_mb);
2698 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2699 speed_template_16_24_32, num_mb);
2700 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2701 speed_template_16_24_32, num_mb);
2702 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2703 speed_template_16_24_32, num_mb);
2704 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2705 speed_template_16_24_32, num_mb);
2706 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2707 speed_template_16_24_32, num_mb);
2708 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2709 0, speed_template_20_28_36, num_mb);
2710 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2711 0, speed_template_20_28_36, num_mb);
2712 break;
2713
2714 case 601:
2715 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2716 des3_speed_template, DES3_SPEED_VECTORS,
2717 speed_template_24, num_mb);
2718 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2719 des3_speed_template, DES3_SPEED_VECTORS,
2720 speed_template_24, num_mb);
2721 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2722 des3_speed_template, DES3_SPEED_VECTORS,
2723 speed_template_24, num_mb);
2724 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2725 des3_speed_template, DES3_SPEED_VECTORS,
2726 speed_template_24, num_mb);
2727 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2728 des3_speed_template, DES3_SPEED_VECTORS,
2729 speed_template_24, num_mb);
2730 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2731 des3_speed_template, DES3_SPEED_VECTORS,
2732 speed_template_24, num_mb);
2733 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2734 des3_speed_template, DES3_SPEED_VECTORS,
2735 speed_template_24, num_mb);
2736 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2737 des3_speed_template, DES3_SPEED_VECTORS,
2738 speed_template_24, num_mb);
2739 break;
2740
2741 case 602:
2742 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2743 speed_template_8, num_mb);
2744 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2745 speed_template_8, num_mb);
2746 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2747 speed_template_8, num_mb);
2748 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2749 speed_template_8, num_mb);
2750 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2751 speed_template_8, num_mb);
2752 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2753 speed_template_8, num_mb);
2754 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2755 speed_template_8, num_mb);
2756 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2757 speed_template_8, num_mb);
2758 break;
2759
2760 case 603:
2761 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2762 speed_template_16_32, num_mb);
2763 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2764 speed_template_16_32, num_mb);
2765 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2766 speed_template_16_32, num_mb);
2767 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2768 speed_template_16_32, num_mb);
2769 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2770 speed_template_16_32, num_mb);
2771 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2772 speed_template_16_32, num_mb);
2773 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2774 speed_template_32_48, num_mb);
2775 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2776 speed_template_32_48, num_mb);
2777 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2778 speed_template_32_64, num_mb);
2779 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2780 speed_template_32_64, num_mb);
2781 break;
2782
2783 case 604:
2784 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2785 speed_template_16_24_32, num_mb);
2786 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2787 speed_template_16_24_32, num_mb);
2788 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2789 speed_template_16_24_32, num_mb);
2790 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2791 speed_template_16_24_32, num_mb);
2792 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2793 speed_template_16_24_32, num_mb);
2794 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2795 speed_template_16_24_32, num_mb);
2796 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2797 speed_template_32_40_48, num_mb);
2798 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2799 speed_template_32_40_48, num_mb);
2800 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2801 speed_template_32_48_64, num_mb);
2802 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2803 speed_template_32_48_64, num_mb);
2804 break;
2805
2806 case 605:
2807 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2808 speed_template_8, num_mb);
2809 break;
2810
2811 case 606:
2812 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2813 speed_template_8_16, num_mb);
2814 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2815 speed_template_8_16, num_mb);
2816 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2817 speed_template_8_16, num_mb);
2818 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2819 speed_template_8_16, num_mb);
2820 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2821 speed_template_8_16, num_mb);
2822 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2823 speed_template_8_16, num_mb);
2824 break;
2825
2826 case 607:
2827 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2828 speed_template_16_32, num_mb);
2829 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2830 speed_template_16_32, num_mb);
2831 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2832 speed_template_16_32, num_mb);
2833 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2834 speed_template_16_32, num_mb);
2835 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2836 speed_template_16_32, num_mb);
2837 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2838 speed_template_16_32, num_mb);
2839 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2840 speed_template_32_48, num_mb);
2841 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2842 speed_template_32_48, num_mb);
2843 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2844 speed_template_32_64, num_mb);
2845 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2846 speed_template_32_64, num_mb);
2847 break;
2848
2849 case 608:
2850 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2851 speed_template_16_32, num_mb);
2852 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2853 speed_template_16_32, num_mb);
2854 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2855 speed_template_16_32, num_mb);
2856 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2857 speed_template_16_32, num_mb);
2858 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2859 speed_template_16_32, num_mb);
2860 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2861 speed_template_16_32, num_mb);
2862 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2863 speed_template_32_48, num_mb);
2864 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2865 speed_template_32_48, num_mb);
2866 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2867 speed_template_32_64, num_mb);
2868 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2869 speed_template_32_64, num_mb);
2870 break;
2871
2872 case 609:
2873 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2874 speed_template_8_32, num_mb);
2875 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2876 speed_template_8_32, num_mb);
2877 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2878 speed_template_8_32, num_mb);
2879 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2880 speed_template_8_32, num_mb);
2881 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2882 speed_template_8_32, num_mb);
2883 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2884 speed_template_8_32, num_mb);
2885 break;
2886
Linus Torvalds1da177e2005-04-16 15:20:36 -07002887 case 1000:
2888 test_available();
2889 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002890 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002891
2892 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002893}
2894
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002895static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002896{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002897 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002898 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002899
Herbert Xuf139cfa2008-07-31 12:23:53 +08002900 for (i = 0; i < TVMEMSIZE; i++) {
2901 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2902 if (!tvmem[i])
2903 goto err_free_tv;
2904 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002905
Herbert Xu86068132014-12-04 16:43:29 +08002906 err = do_test(alg, type, mask, mode);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002907
Jarod Wilson4e033a62009-05-27 15:10:21 +10002908 if (err) {
2909 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2910 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01002911 } else {
2912 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10002913 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002914
Jarod Wilson4e033a62009-05-27 15:10:21 +10002915 /* We intentionaly return -EAGAIN to prevent keeping the module,
2916 * unless we're running in fips mode. It does all its work from
2917 * init() and doesn't offer any runtime functionality, but in
2918 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002919 * => we don't need it in the memory, do we?
2920 * -- mludvig
2921 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002922 if (!fips_enabled)
2923 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002924
Herbert Xuf139cfa2008-07-31 12:23:53 +08002925err_free_tv:
2926 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2927 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002928
2929 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002930}
2931
2932/*
2933 * If an init function is provided, an exit function must also be provided
2934 * to allow module unload.
2935 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002936static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002937
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002938module_init(tcrypt_mod_init);
2939module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002940
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002941module_param(alg, charp, 0);
2942module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002943module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002944module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002945module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002946MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2947 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002948module_param(num_mb, uint, 0000);
2949MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002950
2951MODULE_LICENSE("GPL");
2952MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2953MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");