blob: 0590a9204562b0fdbf97ee2c12229354ab86f8b7 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Rabin Vincent76512f22017-01-18 14:54:05 +010025#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
26
Herbert Xu1ce5a042015-04-22 15:06:30 +080027#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080028#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080029#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100030#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080031#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090033#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100035#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070038#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070039#include <linux/timex.h>
40#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070041#include "tcrypt.h"
42
43/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080044 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070045 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080046#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
48/*
Herbert Xuda7f0332008-07-31 17:08:25 +080049* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070050*/
51#define ENCRYPT 1
52#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070053
Horia Geant?f074f7b2015-08-27 18:38:36 +030054#define MAX_DIGEST_SIZE 64
55
Harald Welteebfd9bc2005-06-22 13:27:23 -070056/*
Luca Clementi263a8df2014-06-25 22:57:42 -070057 * return a string with the driver name
58 */
59#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
60
61/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070062 * Used by test_cipher_speed()
63 */
Herbert Xu6a179442005-06-22 13:29:03 -070064static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070065
Steffen Klasserta873a5f2009-06-19 19:46:53 +080066static char *alg = NULL;
67static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080068static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070069static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000070static u32 num_mb = 8;
Herbert Xuf139cfa2008-07-31 12:23:53 +080071static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070072
73static char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030074 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080075 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
76 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110077 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080078 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +030079 "lzo", "cts", "sha3-224", "sha3-256", "sha3-384", "sha3-512",
80 "streebog256", "streebog512",
81 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070082};
83
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000084static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
85static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
86
87#define XBUFSIZE 8
88#define MAX_IVLEN 32
89
90static int testmgr_alloc_buf(char *buf[XBUFSIZE])
91{
92 int i;
93
94 for (i = 0; i < XBUFSIZE; i++) {
95 buf[i] = (void *)__get_free_page(GFP_KERNEL);
96 if (!buf[i])
97 goto err_free_buf;
98 }
99
100 return 0;
101
102err_free_buf:
103 while (i-- > 0)
104 free_page((unsigned long)buf[i]);
105
106 return -ENOMEM;
107}
108
109static void testmgr_free_buf(char *buf[XBUFSIZE])
110{
111 int i;
112
113 for (i = 0; i < XBUFSIZE; i++)
114 free_page((unsigned long)buf[i]);
115}
116
117static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
118 unsigned int buflen, const void *assoc,
119 unsigned int aad_size)
120{
121 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
122 int k, rem;
123
124 if (np > XBUFSIZE) {
125 rem = PAGE_SIZE;
126 np = XBUFSIZE;
127 } else {
128 rem = buflen % PAGE_SIZE;
129 }
130
131 sg_init_table(sg, np + 1);
132
133 sg_set_buf(&sg[0], assoc, aad_size);
134
135 if (rem)
136 np--;
137 for (k = 0; k < np; k++)
138 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
139
140 if (rem)
141 sg_set_buf(&sg[k + 1], xbuf[k], rem);
142}
143
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530144static inline int do_one_aead_op(struct aead_request *req, int ret)
145{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100146 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530147
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100148 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530149}
150
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000151struct test_mb_aead_data {
152 struct scatterlist sg[XBUFSIZE];
153 struct scatterlist sgout[XBUFSIZE];
154 struct aead_request *req;
155 struct crypto_wait wait;
156 char *xbuf[XBUFSIZE];
157 char *xoutbuf[XBUFSIZE];
158 char *axbuf[XBUFSIZE];
159};
160
161static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700162 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000163{
Kees Cook4e234ee2018-04-26 19:57:28 -0700164 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000165
166 /* Fire up a bunch of concurrent requests */
167 for (i = 0; i < num_mb; i++) {
168 if (enc == ENCRYPT)
169 rc[i] = crypto_aead_encrypt(data[i].req);
170 else
171 rc[i] = crypto_aead_decrypt(data[i].req);
172 }
173
174 /* Wait for all requests to finish */
175 for (i = 0; i < num_mb; i++) {
176 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
177
178 if (rc[i]) {
179 pr_info("concurrent request %d error %d\n", i, rc[i]);
180 err = rc[i];
181 }
182 }
183
184 return err;
185}
186
187static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
188 int blen, int secs, u32 num_mb)
189{
190 unsigned long start, end;
191 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700192 int ret = 0;
193 int *rc;
194
195 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
196 if (!rc)
197 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000198
199 for (start = jiffies, end = start + secs * HZ, bcount = 0;
200 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700201 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000202 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700203 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000204 }
205
206 pr_cont("%d operations in %d seconds (%ld bytes)\n",
207 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700208
209out:
210 kfree(rc);
211 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000212}
213
214static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
215 int blen, u32 num_mb)
216{
217 unsigned long cycles = 0;
218 int ret = 0;
219 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700220 int *rc;
221
222 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
223 if (!rc)
224 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000225
226 /* Warm-up run. */
227 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700228 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000229 if (ret)
230 goto out;
231 }
232
233 /* The real thing. */
234 for (i = 0; i < 8; i++) {
235 cycles_t start, end;
236
237 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700238 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000239 end = get_cycles();
240
241 if (ret)
242 goto out;
243
244 cycles += end - start;
245 }
246
Kees Cook4e234ee2018-04-26 19:57:28 -0700247 pr_cont("1 operation in %lu cycles (%d bytes)\n",
248 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000249
Kees Cook4e234ee2018-04-26 19:57:28 -0700250out:
251 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000252 return ret;
253}
254
255static void test_mb_aead_speed(const char *algo, int enc, int secs,
256 struct aead_speed_template *template,
257 unsigned int tcount, u8 authsize,
258 unsigned int aad_size, u8 *keysize, u32 num_mb)
259{
260 struct test_mb_aead_data *data;
261 struct crypto_aead *tfm;
262 unsigned int i, j, iv_len;
263 const char *key;
264 const char *e;
265 void *assoc;
266 u32 *b_size;
267 char *iv;
268 int ret;
269
270
271 if (aad_size >= PAGE_SIZE) {
272 pr_err("associate data length (%u) too big\n", aad_size);
273 return;
274 }
275
276 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
277 if (!iv)
278 return;
279
280 if (enc == ENCRYPT)
281 e = "encryption";
282 else
283 e = "decryption";
284
285 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
286 if (!data)
287 goto out_free_iv;
288
289 tfm = crypto_alloc_aead(algo, 0, 0);
290 if (IS_ERR(tfm)) {
291 pr_err("failed to load transform for %s: %ld\n",
292 algo, PTR_ERR(tfm));
293 goto out_free_data;
294 }
295
296 ret = crypto_aead_setauthsize(tfm, authsize);
297
298 for (i = 0; i < num_mb; ++i)
299 if (testmgr_alloc_buf(data[i].xbuf)) {
300 while (i--)
301 testmgr_free_buf(data[i].xbuf);
302 goto out_free_tfm;
303 }
304
305 for (i = 0; i < num_mb; ++i)
306 if (testmgr_alloc_buf(data[i].axbuf)) {
307 while (i--)
308 testmgr_free_buf(data[i].axbuf);
309 goto out_free_xbuf;
310 }
311
312 for (i = 0; i < num_mb; ++i)
313 if (testmgr_alloc_buf(data[i].xoutbuf)) {
314 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000315 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000316 goto out_free_axbuf;
317 }
318
319 for (i = 0; i < num_mb; ++i) {
320 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
321 if (!data[i].req) {
322 pr_err("alg: skcipher: Failed to allocate request for %s\n",
323 algo);
324 while (i--)
325 aead_request_free(data[i].req);
326 goto out_free_xoutbuf;
327 }
328 }
329
330 for (i = 0; i < num_mb; ++i) {
331 crypto_init_wait(&data[i].wait);
332 aead_request_set_callback(data[i].req,
333 CRYPTO_TFM_REQ_MAY_BACKLOG,
334 crypto_req_done, &data[i].wait);
335 }
336
337 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
338 get_driver_name(crypto_aead, tfm), e);
339
340 i = 0;
341 do {
342 b_size = aead_sizes;
343 do {
344 if (*b_size + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000345 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000346 authsize + *b_size,
347 XBUFSIZE * PAGE_SIZE);
348 goto out;
349 }
350
351 pr_info("test %u (%d bit key, %d byte blocks): ", i,
352 *keysize * 8, *b_size);
353
354 /* Set up tfm global state, i.e. the key */
355
356 memset(tvmem[0], 0xff, PAGE_SIZE);
357 key = tvmem[0];
358 for (j = 0; j < tcount; j++) {
359 if (template[j].klen == *keysize) {
360 key = template[j].key;
361 break;
362 }
363 }
364
365 crypto_aead_clear_flags(tfm, ~0);
366
367 ret = crypto_aead_setkey(tfm, key, *keysize);
368 if (ret) {
369 pr_err("setkey() failed flags=%x\n",
370 crypto_aead_get_flags(tfm));
371 goto out;
372 }
373
374 iv_len = crypto_aead_ivsize(tfm);
375 if (iv_len)
376 memset(iv, 0xff, iv_len);
377
378 /* Now setup per request stuff, i.e. buffers */
379
380 for (j = 0; j < num_mb; ++j) {
381 struct test_mb_aead_data *cur = &data[j];
382
383 assoc = cur->axbuf[0];
384 memset(assoc, 0xff, aad_size);
385
386 sg_init_aead(cur->sg, cur->xbuf,
387 *b_size + (enc ? 0 : authsize),
388 assoc, aad_size);
389
390 sg_init_aead(cur->sgout, cur->xoutbuf,
391 *b_size + (enc ? authsize : 0),
392 assoc, aad_size);
393
394 aead_request_set_ad(cur->req, aad_size);
395
396 if (!enc) {
397
398 aead_request_set_crypt(cur->req,
399 cur->sgout,
400 cur->sg,
401 *b_size, iv);
402 ret = crypto_aead_encrypt(cur->req);
403 ret = do_one_aead_op(cur->req, ret);
404
405 if (ret) {
406 pr_err("calculating auth failed failed (%d)\n",
407 ret);
408 break;
409 }
410 }
411
412 aead_request_set_crypt(cur->req, cur->sg,
413 cur->sgout, *b_size +
414 (enc ? 0 : authsize),
415 iv);
416
417 }
418
Horia Geantă2af63292018-07-23 17:18:48 +0300419 if (secs) {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000420 ret = test_mb_aead_jiffies(data, enc, *b_size,
421 secs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300422 cond_resched();
423 } else {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000424 ret = test_mb_aead_cycles(data, enc, *b_size,
425 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300426 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000427
428 if (ret) {
429 pr_err("%s() failed return code=%d\n", e, ret);
430 break;
431 }
432 b_size++;
433 i++;
434 } while (*b_size);
435 keysize++;
436 } while (*keysize);
437
438out:
439 for (i = 0; i < num_mb; ++i)
440 aead_request_free(data[i].req);
441out_free_xoutbuf:
442 for (i = 0; i < num_mb; ++i)
443 testmgr_free_buf(data[i].xoutbuf);
444out_free_axbuf:
445 for (i = 0; i < num_mb; ++i)
446 testmgr_free_buf(data[i].axbuf);
447out_free_xbuf:
448 for (i = 0; i < num_mb; ++i)
449 testmgr_free_buf(data[i].xbuf);
450out_free_tfm:
451 crypto_free_aead(tfm);
452out_free_data:
453 kfree(data);
454out_free_iv:
455 kfree(iv);
456}
457
Tim Chen53f52d72013-12-11 14:28:47 -0800458static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700459 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800460{
461 unsigned long start, end;
462 int bcount;
463 int ret;
464
Mark Rustad3e3dc252014-07-25 02:53:38 -0700465 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800466 time_before(jiffies, end); bcount++) {
467 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530468 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800469 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530470 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800471
472 if (ret)
473 return ret;
474 }
475
476 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700477 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800478 return 0;
479}
480
481static int test_aead_cycles(struct aead_request *req, int enc, int blen)
482{
483 unsigned long cycles = 0;
484 int ret = 0;
485 int i;
486
Tim Chen53f52d72013-12-11 14:28:47 -0800487 /* Warm-up run. */
488 for (i = 0; i < 4; i++) {
489 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530490 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800491 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530492 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800493
494 if (ret)
495 goto out;
496 }
497
498 /* The real thing. */
499 for (i = 0; i < 8; i++) {
500 cycles_t start, end;
501
502 start = get_cycles();
503 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530504 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800505 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530506 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800507 end = get_cycles();
508
509 if (ret)
510 goto out;
511
512 cycles += end - start;
513 }
514
515out:
Tim Chen53f52d72013-12-11 14:28:47 -0800516 if (ret == 0)
517 printk("1 operation in %lu cycles (%d bytes)\n",
518 (cycles + 4) / 8, blen);
519
520 return ret;
521}
522
Mark Rustad3e3dc252014-07-25 02:53:38 -0700523static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800524 struct aead_speed_template *template,
525 unsigned int tcount, u8 authsize,
526 unsigned int aad_size, u8 *keysize)
527{
528 unsigned int i, j;
529 struct crypto_aead *tfm;
530 int ret = -ENOMEM;
531 const char *key;
532 struct aead_request *req;
533 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800534 struct scatterlist *sgout;
535 const char *e;
536 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200537 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800538 char *xbuf[XBUFSIZE];
539 char *xoutbuf[XBUFSIZE];
540 char *axbuf[XBUFSIZE];
541 unsigned int *b_size;
542 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100543 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800544
Cristian Stoica96692a732015-01-28 13:07:32 +0200545 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
546 if (!iv)
547 return;
548
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200549 if (aad_size >= PAGE_SIZE) {
550 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200551 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200552 }
553
Tim Chen53f52d72013-12-11 14:28:47 -0800554 if (enc == ENCRYPT)
555 e = "encryption";
556 else
557 e = "decryption";
558
559 if (testmgr_alloc_buf(xbuf))
560 goto out_noxbuf;
561 if (testmgr_alloc_buf(axbuf))
562 goto out_noaxbuf;
563 if (testmgr_alloc_buf(xoutbuf))
564 goto out_nooutbuf;
565
Herbert Xua3f21852015-05-27 16:03:51 +0800566 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800567 if (!sg)
568 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800569 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800570
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800571 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800572
573 if (IS_ERR(tfm)) {
574 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
575 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200576 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800577 }
578
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100579 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700580 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
581 get_driver_name(crypto_aead, tfm), e);
582
Tim Chen53f52d72013-12-11 14:28:47 -0800583 req = aead_request_alloc(tfm, GFP_KERNEL);
584 if (!req) {
585 pr_err("alg: aead: Failed to allocate request for %s\n",
586 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200587 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800588 }
589
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530590 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100591 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530592
Tim Chen53f52d72013-12-11 14:28:47 -0800593 i = 0;
594 do {
595 b_size = aead_sizes;
596 do {
597 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200598 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800599
600 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
601 pr_err("template (%u) too big for tvmem (%lu)\n",
602 *keysize + *b_size,
603 TVMEMSIZE * PAGE_SIZE);
604 goto out;
605 }
606
607 key = tvmem[0];
608 for (j = 0; j < tcount; j++) {
609 if (template[j].klen == *keysize) {
610 key = template[j].key;
611 break;
612 }
613 }
614 ret = crypto_aead_setkey(tfm, key, *keysize);
615 ret = crypto_aead_setauthsize(tfm, authsize);
616
617 iv_len = crypto_aead_ivsize(tfm);
618 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200619 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800620
621 crypto_aead_clear_flags(tfm, ~0);
622 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
623 i, *keysize * 8, *b_size);
624
625
626 memset(tvmem[0], 0xff, PAGE_SIZE);
627
628 if (ret) {
629 pr_err("setkey() failed flags=%x\n",
630 crypto_aead_get_flags(tfm));
631 goto out;
632 }
633
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200634 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
635 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800636
Herbert Xu31267272015-06-17 14:05:26 +0800637 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200638 *b_size + (enc ? authsize : 0), assoc,
639 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800640
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000641 aead_request_set_ad(req, aad_size);
642
643 if (!enc) {
644
645 /*
646 * For decryption we need a proper auth so
647 * we do the encryption path once with buffers
648 * reversed (input <-> output) to calculate it
649 */
650 aead_request_set_crypt(req, sgout, sg,
651 *b_size, iv);
652 ret = do_one_aead_op(req,
653 crypto_aead_encrypt(req));
654
655 if (ret) {
656 pr_err("calculating auth failed failed (%d)\n",
657 ret);
658 break;
659 }
660 }
661
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300662 aead_request_set_crypt(req, sg, sgout,
663 *b_size + (enc ? 0 : authsize),
664 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800665
Horia Geantă2af63292018-07-23 17:18:48 +0300666 if (secs) {
Mark Rustad3e3dc252014-07-25 02:53:38 -0700667 ret = test_aead_jiffies(req, enc, *b_size,
668 secs);
Horia Geantă2af63292018-07-23 17:18:48 +0300669 cond_resched();
670 } else {
Tim Chen53f52d72013-12-11 14:28:47 -0800671 ret = test_aead_cycles(req, enc, *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +0300672 }
Tim Chen53f52d72013-12-11 14:28:47 -0800673
674 if (ret) {
675 pr_err("%s() failed return code=%d\n", e, ret);
676 break;
677 }
678 b_size++;
679 i++;
680 } while (*b_size);
681 keysize++;
682 } while (*keysize);
683
684out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200685 aead_request_free(req);
686out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800687 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200688out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800689 kfree(sg);
690out_nosg:
691 testmgr_free_buf(xoutbuf);
692out_nooutbuf:
693 testmgr_free_buf(axbuf);
694out_noaxbuf:
695 testmgr_free_buf(xbuf);
696out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200697 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800698}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800699
David S. Millerbeb63da2010-05-19 14:11:21 +1000700static void test_hash_sg_init(struct scatterlist *sg)
701{
702 int i;
703
704 sg_init_table(sg, TVMEMSIZE);
705 for (i = 0; i < TVMEMSIZE; i++) {
706 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
707 memset(tvmem[i], 0xff, PAGE_SIZE);
708 }
709}
710
David S. Millerbeb63da2010-05-19 14:11:21 +1000711static inline int do_one_ahash_op(struct ahash_request *req, int ret)
712{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100713 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000714
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100715 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000716}
717
Herbert Xu72259de2016-06-28 20:33:52 +0800718struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000719 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800720 char result[64];
721 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100722 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800723 char *xbuf[XBUFSIZE];
724};
Megha Dey087bcd22016-06-23 18:40:47 -0700725
Kees Cook4e234ee2018-04-26 19:57:28 -0700726static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
727 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000728{
Kees Cook4e234ee2018-04-26 19:57:28 -0700729 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000730
731 /* Fire up a bunch of concurrent requests */
732 for (i = 0; i < num_mb; i++)
733 rc[i] = crypto_ahash_digest(data[i].req);
734
735 /* Wait for all requests to finish */
736 for (i = 0; i < num_mb; i++) {
737 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
738
739 if (rc[i]) {
740 pr_info("concurrent request %d error %d\n", i, rc[i]);
741 err = rc[i];
742 }
743 }
744
745 return err;
746}
747
748static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
749 int secs, u32 num_mb)
750{
751 unsigned long start, end;
752 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700753 int ret = 0;
754 int *rc;
755
756 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
757 if (!rc)
758 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000759
760 for (start = jiffies, end = start + secs * HZ, bcount = 0;
761 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700762 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000763 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700764 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000765 }
766
767 pr_cont("%d operations in %d seconds (%ld bytes)\n",
768 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700769
770out:
771 kfree(rc);
772 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000773}
774
775static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
776 u32 num_mb)
777{
778 unsigned long cycles = 0;
779 int ret = 0;
780 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700781 int *rc;
782
783 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
784 if (!rc)
785 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000786
787 /* Warm-up run. */
788 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700789 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000790 if (ret)
791 goto out;
792 }
793
794 /* The real thing. */
795 for (i = 0; i < 8; i++) {
796 cycles_t start, end;
797
798 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700799 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000800 end = get_cycles();
801
802 if (ret)
803 goto out;
804
805 cycles += end - start;
806 }
807
Kees Cook4e234ee2018-04-26 19:57:28 -0700808 pr_cont("1 operation in %lu cycles (%d bytes)\n",
809 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000810
Kees Cook4e234ee2018-04-26 19:57:28 -0700811out:
812 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000813 return ret;
814}
815
816static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000817 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700818{
Herbert Xu72259de2016-06-28 20:33:52 +0800819 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700820 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800821 unsigned int i, j, k;
822 int ret;
823
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000824 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800825 if (!data)
826 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700827
828 tfm = crypto_alloc_ahash(algo, 0, 0);
829 if (IS_ERR(tfm)) {
830 pr_err("failed to load transform for %s: %ld\n",
831 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800832 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700833 }
Herbert Xu72259de2016-06-28 20:33:52 +0800834
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000835 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800836 if (testmgr_alloc_buf(data[i].xbuf))
837 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700838
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100839 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700840
Herbert Xu72259de2016-06-28 20:33:52 +0800841 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
842 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200843 pr_err("alg: hash: Failed to allocate request for %s\n",
844 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800845 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700846 }
Megha Dey087bcd22016-06-23 18:40:47 -0700847
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100848 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
849 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000850
851 sg_init_table(data[i].sg, XBUFSIZE);
852 for (j = 0; j < XBUFSIZE; j++) {
853 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
854 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
855 }
Megha Dey087bcd22016-06-23 18:40:47 -0700856 }
857
Herbert Xu72259de2016-06-28 20:33:52 +0800858 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
859 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700860
861 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800862 /* For some reason this only tests digests. */
863 if (speed[i].blen != speed[i].plen)
864 continue;
865
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000866 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200867 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000868 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200869 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700870 }
871
872 if (speed[i].klen)
873 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
874
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000875 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800876 ahash_request_set_crypt(data[k].req, data[k].sg,
877 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700878
Herbert Xu72259de2016-06-28 20:33:52 +0800879 pr_info("test%3u "
880 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700881 i, speed[i].blen, speed[i].plen,
882 speed[i].blen / speed[i].plen);
883
Horia Geantă2af63292018-07-23 17:18:48 +0300884 if (secs) {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000885 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
886 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300887 cond_resched();
888 } else {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000889 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300890 }
Herbert Xu72259de2016-06-28 20:33:52 +0800891
Herbert Xu72259de2016-06-28 20:33:52 +0800892
893 if (ret) {
894 pr_err("At least one hashing failed ret=%d\n", ret);
895 break;
896 }
Megha Dey087bcd22016-06-23 18:40:47 -0700897 }
Megha Dey087bcd22016-06-23 18:40:47 -0700898
899out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000900 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800901 ahash_request_free(data[k].req);
902
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000903 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800904 testmgr_free_buf(data[k].xbuf);
905
906 crypto_free_ahash(tfm);
907
908free_data:
909 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700910}
911
David S. Millerbeb63da2010-05-19 14:11:21 +1000912static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700913 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000914{
915 unsigned long start, end;
916 int bcount;
917 int ret;
918
Mark Rustad3e3dc252014-07-25 02:53:38 -0700919 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000920 time_before(jiffies, end); bcount++) {
921 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
922 if (ret)
923 return ret;
924 }
925
926 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700927 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000928
929 return 0;
930}
931
932static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700933 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000934{
935 unsigned long start, end;
936 int bcount, pcount;
937 int ret;
938
939 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700940 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000941
Mark Rustad3e3dc252014-07-25 02:53:38 -0700942 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000943 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800944 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000945 if (ret)
946 return ret;
947 for (pcount = 0; pcount < blen; pcount += plen) {
948 ret = do_one_ahash_op(req, crypto_ahash_update(req));
949 if (ret)
950 return ret;
951 }
952 /* we assume there is enough space in 'out' for the result */
953 ret = do_one_ahash_op(req, crypto_ahash_final(req));
954 if (ret)
955 return ret;
956 }
957
958 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700959 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000960
961 return 0;
962}
963
964static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
965 char *out)
966{
967 unsigned long cycles = 0;
968 int ret, i;
969
970 /* Warm-up run. */
971 for (i = 0; i < 4; i++) {
972 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
973 if (ret)
974 goto out;
975 }
976
977 /* The real thing. */
978 for (i = 0; i < 8; i++) {
979 cycles_t start, end;
980
981 start = get_cycles();
982
983 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
984 if (ret)
985 goto out;
986
987 end = get_cycles();
988
989 cycles += end - start;
990 }
991
992out:
993 if (ret)
994 return ret;
995
996 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
997 cycles / 8, cycles / (8 * blen));
998
999 return 0;
1000}
1001
1002static int test_ahash_cycles(struct ahash_request *req, int blen,
1003 int plen, char *out)
1004{
1005 unsigned long cycles = 0;
1006 int i, pcount, ret;
1007
1008 if (plen == blen)
1009 return test_ahash_cycles_digest(req, blen, out);
1010
1011 /* Warm-up run. */
1012 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001013 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001014 if (ret)
1015 goto out;
1016 for (pcount = 0; pcount < blen; pcount += plen) {
1017 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1018 if (ret)
1019 goto out;
1020 }
1021 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1022 if (ret)
1023 goto out;
1024 }
1025
1026 /* The real thing. */
1027 for (i = 0; i < 8; i++) {
1028 cycles_t start, end;
1029
1030 start = get_cycles();
1031
Herbert Xu43a96072015-04-22 11:02:27 +08001032 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001033 if (ret)
1034 goto out;
1035 for (pcount = 0; pcount < blen; pcount += plen) {
1036 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1037 if (ret)
1038 goto out;
1039 }
1040 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1041 if (ret)
1042 goto out;
1043
1044 end = get_cycles();
1045
1046 cycles += end - start;
1047 }
1048
1049out:
1050 if (ret)
1051 return ret;
1052
1053 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1054 cycles / 8, cycles / (8 * blen));
1055
1056 return 0;
1057}
1058
Herbert Xu06605112016-02-01 21:36:49 +08001059static void test_ahash_speed_common(const char *algo, unsigned int secs,
1060 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001061{
1062 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001063 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001064 struct ahash_request *req;
1065 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001066 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001067 int i, ret;
1068
Herbert Xu06605112016-02-01 21:36:49 +08001069 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001070 if (IS_ERR(tfm)) {
1071 pr_err("failed to load transform for %s: %ld\n",
1072 algo, PTR_ERR(tfm));
1073 return;
1074 }
1075
Luca Clementi263a8df2014-06-25 22:57:42 -07001076 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1077 get_driver_name(crypto_ahash, tfm));
1078
Horia Geant?f074f7b2015-08-27 18:38:36 +03001079 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1080 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1081 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001082 goto out;
1083 }
1084
1085 test_hash_sg_init(sg);
1086 req = ahash_request_alloc(tfm, GFP_KERNEL);
1087 if (!req) {
1088 pr_err("ahash request allocation failure\n");
1089 goto out;
1090 }
1091
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001092 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001093 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001094 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001095
Horia Geant?f074f7b2015-08-27 18:38:36 +03001096 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1097 if (!output)
1098 goto out_nomem;
1099
David S. Millerbeb63da2010-05-19 14:11:21 +10001100 for (i = 0; speed[i].blen != 0; i++) {
1101 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1102 pr_err("template (%u) too big for tvmem (%lu)\n",
1103 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1104 break;
1105 }
1106
Horia Geantă331351f2018-09-12 16:20:48 +03001107 if (speed[i].klen)
1108 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
1109
David S. Millerbeb63da2010-05-19 14:11:21 +10001110 pr_info("test%3u "
1111 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1112 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1113
1114 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1115
Horia Geantă2af63292018-07-23 17:18:48 +03001116 if (secs) {
David S. Millerbeb63da2010-05-19 14:11:21 +10001117 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001118 speed[i].plen, output, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001119 cond_resched();
1120 } else {
David S. Millerbeb63da2010-05-19 14:11:21 +10001121 ret = test_ahash_cycles(req, speed[i].blen,
1122 speed[i].plen, output);
Horia Geantă2af63292018-07-23 17:18:48 +03001123 }
David S. Millerbeb63da2010-05-19 14:11:21 +10001124
1125 if (ret) {
1126 pr_err("hashing failed ret=%d\n", ret);
1127 break;
1128 }
1129 }
1130
Horia Geant?f074f7b2015-08-27 18:38:36 +03001131 kfree(output);
1132
1133out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001134 ahash_request_free(req);
1135
1136out:
1137 crypto_free_ahash(tfm);
1138}
1139
Herbert Xu06605112016-02-01 21:36:49 +08001140static void test_ahash_speed(const char *algo, unsigned int secs,
1141 struct hash_speed *speed)
1142{
1143 return test_ahash_speed_common(algo, secs, speed, 0);
1144}
1145
1146static void test_hash_speed(const char *algo, unsigned int secs,
1147 struct hash_speed *speed)
1148{
1149 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1150}
1151
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001152struct test_mb_skcipher_data {
1153 struct scatterlist sg[XBUFSIZE];
1154 struct skcipher_request *req;
1155 struct crypto_wait wait;
1156 char *xbuf[XBUFSIZE];
1157};
1158
1159static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001160 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001161{
Kees Cook4e234ee2018-04-26 19:57:28 -07001162 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001163
1164 /* Fire up a bunch of concurrent requests */
1165 for (i = 0; i < num_mb; i++) {
1166 if (enc == ENCRYPT)
1167 rc[i] = crypto_skcipher_encrypt(data[i].req);
1168 else
1169 rc[i] = crypto_skcipher_decrypt(data[i].req);
1170 }
1171
1172 /* Wait for all requests to finish */
1173 for (i = 0; i < num_mb; i++) {
1174 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1175
1176 if (rc[i]) {
1177 pr_info("concurrent request %d error %d\n", i, rc[i]);
1178 err = rc[i];
1179 }
1180 }
1181
1182 return err;
1183}
1184
1185static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1186 int blen, int secs, u32 num_mb)
1187{
1188 unsigned long start, end;
1189 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001190 int ret = 0;
1191 int *rc;
1192
1193 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1194 if (!rc)
1195 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001196
1197 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1198 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001199 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001200 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001201 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001202 }
1203
1204 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1205 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001206
1207out:
1208 kfree(rc);
1209 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001210}
1211
1212static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1213 int blen, u32 num_mb)
1214{
1215 unsigned long cycles = 0;
1216 int ret = 0;
1217 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001218 int *rc;
1219
1220 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1221 if (!rc)
1222 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001223
1224 /* Warm-up run. */
1225 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001226 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001227 if (ret)
1228 goto out;
1229 }
1230
1231 /* The real thing. */
1232 for (i = 0; i < 8; i++) {
1233 cycles_t start, end;
1234
1235 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001236 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001237 end = get_cycles();
1238
1239 if (ret)
1240 goto out;
1241
1242 cycles += end - start;
1243 }
1244
Kees Cook4e234ee2018-04-26 19:57:28 -07001245 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1246 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001247
Kees Cook4e234ee2018-04-26 19:57:28 -07001248out:
1249 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001250 return ret;
1251}
1252
1253static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1254 struct cipher_speed_template *template,
1255 unsigned int tcount, u8 *keysize, u32 num_mb)
1256{
1257 struct test_mb_skcipher_data *data;
1258 struct crypto_skcipher *tfm;
1259 unsigned int i, j, iv_len;
1260 const char *key;
1261 const char *e;
1262 u32 *b_size;
1263 char iv[128];
1264 int ret;
1265
1266 if (enc == ENCRYPT)
1267 e = "encryption";
1268 else
1269 e = "decryption";
1270
1271 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1272 if (!data)
1273 return;
1274
1275 tfm = crypto_alloc_skcipher(algo, 0, 0);
1276 if (IS_ERR(tfm)) {
1277 pr_err("failed to load transform for %s: %ld\n",
1278 algo, PTR_ERR(tfm));
1279 goto out_free_data;
1280 }
1281
1282 for (i = 0; i < num_mb; ++i)
1283 if (testmgr_alloc_buf(data[i].xbuf)) {
1284 while (i--)
1285 testmgr_free_buf(data[i].xbuf);
1286 goto out_free_tfm;
1287 }
1288
1289
1290 for (i = 0; i < num_mb; ++i)
1291 if (testmgr_alloc_buf(data[i].xbuf)) {
1292 while (i--)
1293 testmgr_free_buf(data[i].xbuf);
1294 goto out_free_tfm;
1295 }
1296
1297
1298 for (i = 0; i < num_mb; ++i) {
1299 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1300 if (!data[i].req) {
1301 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1302 algo);
1303 while (i--)
1304 skcipher_request_free(data[i].req);
1305 goto out_free_xbuf;
1306 }
1307 }
1308
1309 for (i = 0; i < num_mb; ++i) {
1310 skcipher_request_set_callback(data[i].req,
1311 CRYPTO_TFM_REQ_MAY_BACKLOG,
1312 crypto_req_done, &data[i].wait);
1313 crypto_init_wait(&data[i].wait);
1314 }
1315
1316 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1317 get_driver_name(crypto_skcipher, tfm), e);
1318
1319 i = 0;
1320 do {
1321 b_size = block_sizes;
1322 do {
1323 if (*b_size > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001324 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001325 *b_size, XBUFSIZE * PAGE_SIZE);
1326 goto out;
1327 }
1328
1329 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1330 *keysize * 8, *b_size);
1331
1332 /* Set up tfm global state, i.e. the key */
1333
1334 memset(tvmem[0], 0xff, PAGE_SIZE);
1335 key = tvmem[0];
1336 for (j = 0; j < tcount; j++) {
1337 if (template[j].klen == *keysize) {
1338 key = template[j].key;
1339 break;
1340 }
1341 }
1342
1343 crypto_skcipher_clear_flags(tfm, ~0);
1344
1345 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1346 if (ret) {
1347 pr_err("setkey() failed flags=%x\n",
1348 crypto_skcipher_get_flags(tfm));
1349 goto out;
1350 }
1351
1352 iv_len = crypto_skcipher_ivsize(tfm);
1353 if (iv_len)
1354 memset(&iv, 0xff, iv_len);
1355
1356 /* Now setup per request stuff, i.e. buffers */
1357
1358 for (j = 0; j < num_mb; ++j) {
1359 struct test_mb_skcipher_data *cur = &data[j];
1360 unsigned int k = *b_size;
1361 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1362 unsigned int p = 0;
1363
1364 sg_init_table(cur->sg, pages);
1365
1366 while (k > PAGE_SIZE) {
1367 sg_set_buf(cur->sg + p, cur->xbuf[p],
1368 PAGE_SIZE);
1369 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1370 p++;
1371 k -= PAGE_SIZE;
1372 }
1373
1374 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1375 memset(cur->xbuf[p], 0xff, k);
1376
1377 skcipher_request_set_crypt(cur->req, cur->sg,
1378 cur->sg, *b_size,
1379 iv);
1380 }
1381
Horia Geantă2af63292018-07-23 17:18:48 +03001382 if (secs) {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001383 ret = test_mb_acipher_jiffies(data, enc,
1384 *b_size, secs,
1385 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001386 cond_resched();
1387 } else {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001388 ret = test_mb_acipher_cycles(data, enc,
1389 *b_size, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001390 }
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001391
1392 if (ret) {
1393 pr_err("%s() failed flags=%x\n", e,
1394 crypto_skcipher_get_flags(tfm));
1395 break;
1396 }
1397 b_size++;
1398 i++;
1399 } while (*b_size);
1400 keysize++;
1401 } while (*keysize);
1402
1403out:
1404 for (i = 0; i < num_mb; ++i)
1405 skcipher_request_free(data[i].req);
1406out_free_xbuf:
1407 for (i = 0; i < num_mb; ++i)
1408 testmgr_free_buf(data[i].xbuf);
1409out_free_tfm:
1410 crypto_free_skcipher(tfm);
1411out_free_data:
1412 kfree(data);
1413}
1414
Herbert Xu7166e582016-06-29 18:03:50 +08001415static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001416{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001417 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001418
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001419 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001420}
1421
Herbert Xu7166e582016-06-29 18:03:50 +08001422static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001423 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001424{
1425 unsigned long start, end;
1426 int bcount;
1427 int ret;
1428
Mark Rustad3e3dc252014-07-25 02:53:38 -07001429 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001430 time_before(jiffies, end); bcount++) {
1431 if (enc)
1432 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001433 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001434 else
1435 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001436 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001437
1438 if (ret)
1439 return ret;
1440 }
1441
1442 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001443 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001444 return 0;
1445}
1446
Herbert Xu7166e582016-06-29 18:03:50 +08001447static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001448 int blen)
1449{
1450 unsigned long cycles = 0;
1451 int ret = 0;
1452 int i;
1453
1454 /* Warm-up run. */
1455 for (i = 0; i < 4; i++) {
1456 if (enc)
1457 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001458 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001459 else
1460 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001461 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001462
1463 if (ret)
1464 goto out;
1465 }
1466
1467 /* The real thing. */
1468 for (i = 0; i < 8; i++) {
1469 cycles_t start, end;
1470
1471 start = get_cycles();
1472 if (enc)
1473 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001474 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001475 else
1476 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001477 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001478 end = get_cycles();
1479
1480 if (ret)
1481 goto out;
1482
1483 cycles += end - start;
1484 }
1485
1486out:
1487 if (ret == 0)
1488 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1489 (cycles + 4) / 8, blen);
1490
1491 return ret;
1492}
1493
Herbert Xu7166e582016-06-29 18:03:50 +08001494static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1495 struct cipher_speed_template *template,
1496 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001497{
Nicolas Royerde1975332012-07-01 19:19:47 +02001498 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001499 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001500 const char *key;
1501 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001502 struct skcipher_request *req;
1503 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001504 const char *e;
1505 u32 *b_size;
1506
1507 if (enc == ENCRYPT)
1508 e = "encryption";
1509 else
1510 e = "decryption";
1511
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001512 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001513
Herbert Xu7166e582016-06-29 18:03:50 +08001514 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001515
1516 if (IS_ERR(tfm)) {
1517 pr_err("failed to load transform for %s: %ld\n", algo,
1518 PTR_ERR(tfm));
1519 return;
1520 }
1521
Luca Clementi263a8df2014-06-25 22:57:42 -07001522 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
Herbert Xu7166e582016-06-29 18:03:50 +08001523 get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001524
Herbert Xu7166e582016-06-29 18:03:50 +08001525 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001526 if (!req) {
1527 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1528 algo);
1529 goto out;
1530 }
1531
Herbert Xu7166e582016-06-29 18:03:50 +08001532 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001533 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001534
1535 i = 0;
1536 do {
1537 b_size = block_sizes;
1538
1539 do {
1540 struct scatterlist sg[TVMEMSIZE];
1541
1542 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1543 pr_err("template (%u) too big for "
1544 "tvmem (%lu)\n", *keysize + *b_size,
1545 TVMEMSIZE * PAGE_SIZE);
1546 goto out_free_req;
1547 }
1548
1549 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1550 *keysize * 8, *b_size);
1551
1552 memset(tvmem[0], 0xff, PAGE_SIZE);
1553
1554 /* set key, plain text and IV */
1555 key = tvmem[0];
1556 for (j = 0; j < tcount; j++) {
1557 if (template[j].klen == *keysize) {
1558 key = template[j].key;
1559 break;
1560 }
1561 }
1562
Herbert Xu7166e582016-06-29 18:03:50 +08001563 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001564
Herbert Xu7166e582016-06-29 18:03:50 +08001565 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001566 if (ret) {
1567 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001568 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001569 goto out_free_req;
1570 }
1571
Nicolas Royerde1975332012-07-01 19:19:47 +02001572 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001573 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1574
Nicolas Royerde1975332012-07-01 19:19:47 +02001575 if (k > PAGE_SIZE) {
1576 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001577 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001578 k -= PAGE_SIZE;
1579 j = 1;
1580 while (k > PAGE_SIZE) {
1581 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1582 memset(tvmem[j], 0xff, PAGE_SIZE);
1583 j++;
1584 k -= PAGE_SIZE;
1585 }
1586 sg_set_buf(sg + j, tvmem[j], k);
1587 memset(tvmem[j], 0xff, k);
1588 } else {
1589 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001590 }
1591
Herbert Xu7166e582016-06-29 18:03:50 +08001592 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001593 if (iv_len)
1594 memset(&iv, 0xff, iv_len);
1595
Herbert Xu7166e582016-06-29 18:03:50 +08001596 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001597
Horia Geantă2af63292018-07-23 17:18:48 +03001598 if (secs) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001599 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001600 *b_size, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001601 cond_resched();
1602 } else {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001603 ret = test_acipher_cycles(req, enc,
1604 *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +03001605 }
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001606
1607 if (ret) {
1608 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001609 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001610 break;
1611 }
1612 b_size++;
1613 i++;
1614 } while (*b_size);
1615 keysize++;
1616 } while (*keysize);
1617
1618out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001619 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001620out:
Herbert Xu7166e582016-06-29 18:03:50 +08001621 crypto_free_skcipher(tfm);
1622}
1623
1624static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1625 struct cipher_speed_template *template,
1626 unsigned int tcount, u8 *keysize)
1627{
1628 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1629 true);
1630}
1631
1632static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1633 struct cipher_speed_template *template,
1634 unsigned int tcount, u8 *keysize)
1635{
1636 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1637 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001638}
1639
Herbert Xuef2736f2005-06-22 13:26:03 -07001640static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001641{
1642 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001643
Linus Torvalds1da177e2005-04-16 15:20:36 -07001644 while (*name) {
1645 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001646 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001647 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001648 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001649 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001650}
1651
Herbert Xu01b32322008-07-31 15:41:55 +08001652static inline int tcrypt_test(const char *alg)
1653{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001654 int ret;
1655
Rabin Vincent76512f22017-01-18 14:54:05 +01001656 pr_debug("testing %s\n", alg);
1657
Jarod Wilson4e033a62009-05-27 15:10:21 +10001658 ret = alg_test(alg, alg, 0, 0);
1659 /* non-fips algs return -EINVAL in fips mode */
1660 if (fips_enabled && ret == -EINVAL)
1661 ret = 0;
1662 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001663}
1664
Kees Cook4e234ee2018-04-26 19:57:28 -07001665static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001666{
1667 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001668 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001669
1670 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001671 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001672 if (alg) {
1673 if (!crypto_has_alg(alg, type,
1674 mask ?: CRYPTO_ALG_TYPE_MASK))
1675 ret = -ENOENT;
1676 break;
1677 }
1678
Herbert Xu01b32322008-07-31 15:41:55 +08001679 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001680 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001681 break;
1682
1683 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001684 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001685 break;
1686
1687 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001688 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001689 break;
1690
1691 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001692 ret += tcrypt_test("ecb(des)");
1693 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001694 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001695 break;
1696
1697 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001698 ret += tcrypt_test("ecb(des3_ede)");
1699 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001700 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001701 break;
1702
1703 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001704 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001705 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001706
Linus Torvalds1da177e2005-04-16 15:20:36 -07001707 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001708 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001709 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001710
Linus Torvalds1da177e2005-04-16 15:20:36 -07001711 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001712 ret += tcrypt_test("ecb(blowfish)");
1713 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001714 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001715 break;
1716
1717 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001718 ret += tcrypt_test("ecb(twofish)");
1719 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001720 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001721 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001722 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001723 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001724
Linus Torvalds1da177e2005-04-16 15:20:36 -07001725 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001726 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001727 ret += tcrypt_test("cbc(serpent)");
1728 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001729 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001730 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001731 break;
1732
1733 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001734 ret += tcrypt_test("ecb(aes)");
1735 ret += tcrypt_test("cbc(aes)");
1736 ret += tcrypt_test("lrw(aes)");
1737 ret += tcrypt_test("xts(aes)");
1738 ret += tcrypt_test("ctr(aes)");
1739 ret += tcrypt_test("rfc3686(ctr(aes))");
Gilad Ben-Yossefdfb89ab2018-09-20 14:18:40 +01001740 ret += tcrypt_test("ofb(aes)");
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03001741 ret += tcrypt_test("cfb(aes)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001742 break;
1743
1744 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001745 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001746 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001747
Linus Torvalds1da177e2005-04-16 15:20:36 -07001748 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001749 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001750 break;
1751
1752 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001753 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001754 break;
1755
1756 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001757 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001758 ret += tcrypt_test("cbc(cast5)");
1759 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001760 break;
1761
1762 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001763 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001764 ret += tcrypt_test("cbc(cast6)");
1765 ret += tcrypt_test("ctr(cast6)");
1766 ret += tcrypt_test("lrw(cast6)");
1767 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001768 break;
1769
1770 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001771 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001772 break;
1773
1774 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001775 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001776 break;
1777
1778 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001779 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001780 break;
1781
1782 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001783 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001784 break;
1785
1786 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001787 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001788 break;
1789
1790 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001791 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001792 break;
1793
1794 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001795 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001796 break;
1797
1798 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001799 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001800 break;
1801
1802 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001803 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001804 break;
1805
1806 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001807 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001808 break;
1809
1810 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001811 ret += tcrypt_test("ecb(anubis)");
1812 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001813 break;
1814
1815 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001816 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001817 break;
1818
1819 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001820 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001821 break;
1822
1823 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001824 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001825 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001826
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001827 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001828 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001829 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001830
David Howells90831632006-12-16 12:13:14 +11001831 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001832 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001833 break;
1834
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001835 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001836 ret += tcrypt_test("ecb(camellia)");
1837 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001838 ret += tcrypt_test("ctr(camellia)");
1839 ret += tcrypt_test("lrw(camellia)");
1840 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001841 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001842
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001843 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001844 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001845 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001846
Tan Swee Heng2407d602007-11-23 19:45:00 +08001847 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001848 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001849 break;
1850
Herbert Xu8df213d2007-12-02 14:55:47 +11001851 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001852 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001853 break;
1854
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001855 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001856 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001857 break;
1858
Joy Latten93cc74e2007-12-12 20:24:22 +08001859 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001860 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001861 break;
1862
Kevin Coffman76cb9522008-03-24 21:26:16 +08001863 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001864 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001865 break;
1866
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001867 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001868 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001869 break;
1870
1871 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001872 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001873 break;
1874
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001875 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001876 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001877 break;
1878
1879 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001880 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001881 break;
1882
1883 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001884 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001885 break;
1886
Jarod Wilson5d667322009-05-04 19:23:40 +08001887 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001888 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001889 break;
1890
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001891 case 46:
1892 ret += tcrypt_test("ghash");
1893 break;
1894
Herbert Xu684115212013-09-07 12:56:26 +10001895 case 47:
1896 ret += tcrypt_test("crct10dif");
1897 break;
1898
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301899 case 48:
1900 ret += tcrypt_test("sha3-224");
1901 break;
1902
1903 case 49:
1904 ret += tcrypt_test("sha3-256");
1905 break;
1906
1907 case 50:
1908 ret += tcrypt_test("sha3-384");
1909 break;
1910
1911 case 51:
1912 ret += tcrypt_test("sha3-512");
1913 break;
1914
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001915 case 52:
1916 ret += tcrypt_test("sm3");
1917 break;
1918
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001919 case 53:
1920 ret += tcrypt_test("streebog256");
1921 break;
1922
1923 case 54:
1924 ret += tcrypt_test("streebog512");
1925 break;
1926
Linus Torvalds1da177e2005-04-16 15:20:36 -07001927 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001928 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001929 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001930
Linus Torvalds1da177e2005-04-16 15:20:36 -07001931 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001932 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001933 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001934
Linus Torvalds1da177e2005-04-16 15:20:36 -07001935 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001936 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001937 break;
1938
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001939 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001940 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001941 break;
1942
1943 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001944 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001945 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001946
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001947 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001948 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001949 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001950
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001951 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001952 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001953 break;
1954
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001955 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001956 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001957 break;
1958
1959 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001960 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001961 break;
1962
Shane Wangf1939f72009-09-02 20:05:22 +10001963 case 109:
Eric Biggers0917b872018-06-18 10:22:40 -07001964 ret += tcrypt_test("vmac64(aes)");
Shane Wangf1939f72009-09-02 20:05:22 +10001965 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001966
raveendra padasalagi98eca722016-07-01 11:16:54 +05301967 case 111:
1968 ret += tcrypt_test("hmac(sha3-224)");
1969 break;
1970
1971 case 112:
1972 ret += tcrypt_test("hmac(sha3-256)");
1973 break;
1974
1975 case 113:
1976 ret += tcrypt_test("hmac(sha3-384)");
1977 break;
1978
1979 case 114:
1980 ret += tcrypt_test("hmac(sha3-512)");
1981 break;
1982
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001983 case 115:
1984 ret += tcrypt_test("hmac(streebog256)");
1985 break;
1986
1987 case 116:
1988 ret += tcrypt_test("hmac(streebog512)");
1989 break;
1990
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001991 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001992 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001993 break;
1994
Adrian Hoban69435b92010-11-04 15:02:04 -04001995 case 151:
1996 ret += tcrypt_test("rfc4106(gcm(aes))");
1997 break;
1998
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001999 case 152:
2000 ret += tcrypt_test("rfc4543(gcm(aes))");
2001 break;
2002
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002003 case 153:
2004 ret += tcrypt_test("cmac(aes)");
2005 break;
2006
2007 case 154:
2008 ret += tcrypt_test("cmac(des3_ede)");
2009 break;
2010
Horia Geantabbf9c892013-11-28 15:11:16 +02002011 case 155:
2012 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
2013 break;
2014
Horia Geantabca4feb2014-03-14 17:46:51 +02002015 case 156:
2016 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
2017 break;
2018
2019 case 157:
2020 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
2021 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05302022 case 181:
2023 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
2024 break;
2025 case 182:
2026 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
2027 break;
2028 case 183:
2029 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2030 break;
2031 case 184:
2032 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2033 break;
2034 case 185:
2035 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2036 break;
2037 case 186:
2038 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2039 break;
2040 case 187:
2041 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2042 break;
2043 case 188:
2044 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2045 break;
2046 case 189:
2047 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2048 break;
2049 case 190:
2050 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2051 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002052 case 191:
2053 ret += tcrypt_test("ecb(sm4)");
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002054 ret += tcrypt_test("cbc(sm4)");
2055 ret += tcrypt_test("ctr(sm4)");
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002056 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002057 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002058 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002059 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002060 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002061 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002062 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002063 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002064 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002065 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002066 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002067 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002068 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002069 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002070 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002071 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002072 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002073 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002074 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2075 speed_template_16_24_32);
2076 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2077 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002078 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2079 speed_template_16_24_32);
2080 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2081 speed_template_16_24_32);
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03002082 test_cipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2083 speed_template_16_24_32);
2084 test_cipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2085 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002086 break;
2087
2088 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002089 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002090 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002091 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002092 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002093 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002094 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002095 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002096 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002097 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002098 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002099 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002100 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002101 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2102 des3_speed_template, DES3_SPEED_VECTORS,
2103 speed_template_24);
2104 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2105 des3_speed_template, DES3_SPEED_VECTORS,
2106 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002107 break;
2108
2109 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002110 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002111 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002112 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002113 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002114 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002115 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002116 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002117 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002118 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2119 speed_template_16_24_32);
2120 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2121 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002122 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2123 speed_template_32_40_48);
2124 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2125 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002126 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2127 speed_template_32_48_64);
2128 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2129 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002130 break;
2131
2132 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002133 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002134 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002135 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002136 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002137 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002138 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002139 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002140 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002141 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2142 speed_template_8_32);
2143 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2144 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002145 break;
2146
2147 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002148 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002149 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002150 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002151 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002152 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002153 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002154 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002155 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002156 break;
2157
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002158 case 205:
2159 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002160 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002161 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002162 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002163 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002164 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002165 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002166 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002167 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2168 speed_template_16_24_32);
2169 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2170 speed_template_16_24_32);
2171 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2172 speed_template_32_40_48);
2173 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2174 speed_template_32_40_48);
2175 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2176 speed_template_32_48_64);
2177 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2178 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002179 break;
2180
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002181 case 206:
2182 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002183 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002184 break;
2185
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002186 case 207:
2187 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2188 speed_template_16_32);
2189 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2190 speed_template_16_32);
2191 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2192 speed_template_16_32);
2193 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2194 speed_template_16_32);
2195 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2196 speed_template_16_32);
2197 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2198 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002199 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2200 speed_template_32_48);
2201 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2202 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002203 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2204 speed_template_32_64);
2205 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2206 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002207 break;
2208
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002209 case 208:
2210 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2211 speed_template_8);
2212 break;
2213
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002214 case 209:
2215 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2216 speed_template_8_16);
2217 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2218 speed_template_8_16);
2219 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2220 speed_template_8_16);
2221 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2222 speed_template_8_16);
2223 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2224 speed_template_8_16);
2225 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2226 speed_template_8_16);
2227 break;
2228
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002229 case 210:
2230 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2231 speed_template_16_32);
2232 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2233 speed_template_16_32);
2234 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2235 speed_template_16_32);
2236 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2237 speed_template_16_32);
2238 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2239 speed_template_16_32);
2240 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2241 speed_template_16_32);
2242 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2243 speed_template_32_48);
2244 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2245 speed_template_32_48);
2246 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2247 speed_template_32_64);
2248 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2249 speed_template_32_64);
2250 break;
2251
Tim Chen53f52d72013-12-11 14:28:47 -08002252 case 211:
2253 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002254 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302255 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002256 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002257 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2258 NULL, 0, 16, 16, aead_speed_template_20);
2259 test_aead_speed("gcm(aes)", DECRYPT, sec,
2260 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002261 break;
2262
Herbert Xu4e4aab62015-06-17 14:04:21 +08002263 case 212:
2264 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002265 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002266 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2267 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002268 break;
2269
Martin Willi2dce0632015-07-16 19:13:59 +02002270 case 213:
2271 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2272 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002273 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2274 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002275 break;
2276
2277 case 214:
2278 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2279 speed_template_32);
2280 break;
2281
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002282 case 215:
2283 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2284 0, 16, 16, aead_speed_template_20, num_mb);
2285 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2286 speed_template_16_24_32, num_mb);
2287 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2288 0, 16, 16, aead_speed_template_20, num_mb);
2289 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2290 speed_template_16_24_32, num_mb);
2291 break;
2292
2293 case 216:
2294 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2295 16, 16, aead_speed_template_19, num_mb);
2296 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2297 16, 16, aead_speed_template_19, num_mb);
2298 break;
2299
2300 case 217:
2301 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2302 sec, NULL, 0, 16, 8, aead_speed_template_36,
2303 num_mb);
2304 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2305 sec, NULL, 0, 16, 8, aead_speed_template_36,
2306 num_mb);
2307 break;
2308
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002309 case 218:
2310 test_cipher_speed("ecb(sm4)", ENCRYPT, sec, NULL, 0,
2311 speed_template_16);
2312 test_cipher_speed("ecb(sm4)", DECRYPT, sec, NULL, 0,
2313 speed_template_16);
2314 test_cipher_speed("cbc(sm4)", ENCRYPT, sec, NULL, 0,
2315 speed_template_16);
2316 test_cipher_speed("cbc(sm4)", DECRYPT, sec, NULL, 0,
2317 speed_template_16);
2318 test_cipher_speed("ctr(sm4)", ENCRYPT, sec, NULL, 0,
2319 speed_template_16);
2320 test_cipher_speed("ctr(sm4)", DECRYPT, sec, NULL, 0,
2321 speed_template_16);
2322 break;
Eric Biggers059c2a42018-11-16 17:26:31 -08002323
2324 case 219:
2325 test_cipher_speed("adiantum(xchacha12,aes)", ENCRYPT, sec, NULL,
2326 0, speed_template_32);
2327 test_cipher_speed("adiantum(xchacha12,aes)", DECRYPT, sec, NULL,
2328 0, speed_template_32);
2329 test_cipher_speed("adiantum(xchacha20,aes)", ENCRYPT, sec, NULL,
2330 0, speed_template_32);
2331 test_cipher_speed("adiantum(xchacha20,aes)", DECRYPT, sec, NULL,
2332 0, speed_template_32);
2333 break;
2334
Michal Ludvige8057922006-05-30 22:04:19 +10002335 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002336 if (alg) {
2337 test_hash_speed(alg, sec, generic_hash_speed_template);
2338 break;
2339 }
Michal Ludvige8057922006-05-30 22:04:19 +10002340 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002341 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002342 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002343 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002344 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002345 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002346 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002347 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002348 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002349 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002350 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002351 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002352 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002353 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002354 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002355 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002356 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002357 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002358 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002359 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002360 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002361 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002362 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002363 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002364 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002365 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002366 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002367 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002368 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002369 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002370 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002371 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002372 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002373 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002374 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002375 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002376 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002377 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002378 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002379 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002380 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002381 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002382 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002383 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002384 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002385 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002386 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002387 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002388 /* fall through */
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08002389 case 313:
2390 test_hash_speed("sha224", sec, generic_hash_speed_template);
2391 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002392 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002393 case 314:
2394 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2395 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002396 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002397 case 315:
2398 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2399 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002400 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002401 case 316:
2402 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2403 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002404 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002405 case 317:
2406 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2407 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002408 /* fall through */
Huang Ying18bcc912010-03-10 18:30:32 +08002409 case 318:
2410 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
2411 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002412 /* fall through */
Tim Chene3899e42012-09-27 15:44:24 -07002413 case 319:
2414 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2415 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002416 /* fall through */
Herbert Xu684115212013-09-07 12:56:26 +10002417 case 320:
2418 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2419 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002420 /* fall through */
Martin Willi2dce0632015-07-16 19:13:59 +02002421 case 321:
2422 test_hash_speed("poly1305", sec, poly1305_speed_template);
2423 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002424 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302425 case 322:
2426 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2427 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002428 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302429 case 323:
2430 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2431 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002432 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302433 case 324:
2434 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2435 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002436 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302437 case 325:
2438 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2439 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002440 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002441 case 326:
2442 test_hash_speed("sm3", sec, generic_hash_speed_template);
2443 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002444 /* fall through */
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002445 case 327:
2446 test_hash_speed("streebog256", sec,
2447 generic_hash_speed_template);
2448 if (mode > 300 && mode < 400) break;
2449 /* fall through */
2450 case 328:
2451 test_hash_speed("streebog512", sec,
2452 generic_hash_speed_template);
2453 if (mode > 300 && mode < 400) break;
2454 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002455 case 399:
2456 break;
2457
David S. Millerbeb63da2010-05-19 14:11:21 +10002458 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002459 if (alg) {
2460 test_ahash_speed(alg, sec, generic_hash_speed_template);
2461 break;
2462 }
David S. Millerbeb63da2010-05-19 14:11:21 +10002463 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002464 case 401:
2465 test_ahash_speed("md4", sec, generic_hash_speed_template);
2466 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002467 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002468 case 402:
2469 test_ahash_speed("md5", sec, generic_hash_speed_template);
2470 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002471 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002472 case 403:
2473 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2474 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002475 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002476 case 404:
2477 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2478 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002479 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002480 case 405:
2481 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2482 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002483 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002484 case 406:
2485 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2486 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002487 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002488 case 407:
2489 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2490 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002491 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002492 case 408:
2493 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2494 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002495 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002496 case 409:
2497 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2498 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002499 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002500 case 410:
2501 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2502 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002503 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002504 case 411:
2505 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2506 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002507 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002508 case 412:
2509 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2510 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002511 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002512 case 413:
2513 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2514 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002515 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002516 case 414:
2517 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2518 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002519 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002520 case 415:
2521 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2522 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002523 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002524 case 416:
2525 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2526 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002527 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002528 case 417:
2529 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2530 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002531 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302532 case 418:
2533 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2534 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002535 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302536 case 419:
2537 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2538 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002539 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302540 case 420:
2541 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2542 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002543 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302544 case 421:
2545 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2546 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002547 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002548 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002549 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2550 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002551 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002552 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002553 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002554 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2555 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002556 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002557 /* fall through */
Megha Dey14009c42016-06-27 10:20:09 -07002558 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002559 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2560 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002561 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002562 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002563 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002564 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2565 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002566 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002567 /* fall through */
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002568 case 426:
2569 test_mb_ahash_speed("streebog256", sec,
2570 generic_hash_speed_template, num_mb);
2571 if (mode > 400 && mode < 500) break;
2572 /* fall through */
2573 case 427:
2574 test_mb_ahash_speed("streebog512", sec,
2575 generic_hash_speed_template, num_mb);
2576 if (mode > 400 && mode < 500) break;
2577 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002578 case 499:
2579 break;
2580
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002581 case 500:
2582 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2583 speed_template_16_24_32);
2584 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2585 speed_template_16_24_32);
2586 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2587 speed_template_16_24_32);
2588 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2589 speed_template_16_24_32);
2590 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2591 speed_template_32_40_48);
2592 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2593 speed_template_32_40_48);
2594 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002595 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002596 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002597 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002598 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2599 speed_template_16_24_32);
2600 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2601 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002602 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2603 speed_template_16_24_32);
2604 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2605 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002606 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2607 speed_template_16_24_32);
2608 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2609 speed_template_16_24_32);
2610 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2611 speed_template_16_24_32);
2612 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2613 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002614 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2615 speed_template_20_28_36);
2616 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2617 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002618 break;
2619
2620 case 501:
2621 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2622 des3_speed_template, DES3_SPEED_VECTORS,
2623 speed_template_24);
2624 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2625 des3_speed_template, DES3_SPEED_VECTORS,
2626 speed_template_24);
2627 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2628 des3_speed_template, DES3_SPEED_VECTORS,
2629 speed_template_24);
2630 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2631 des3_speed_template, DES3_SPEED_VECTORS,
2632 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002633 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2634 des3_speed_template, DES3_SPEED_VECTORS,
2635 speed_template_24);
2636 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2637 des3_speed_template, DES3_SPEED_VECTORS,
2638 speed_template_24);
2639 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2640 des3_speed_template, DES3_SPEED_VECTORS,
2641 speed_template_24);
2642 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2643 des3_speed_template, DES3_SPEED_VECTORS,
2644 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002645 break;
2646
2647 case 502:
2648 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2649 speed_template_8);
2650 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2651 speed_template_8);
2652 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2653 speed_template_8);
2654 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2655 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002656 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2657 speed_template_8);
2658 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2659 speed_template_8);
2660 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2661 speed_template_8);
2662 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2663 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002664 break;
2665
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002666 case 503:
2667 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2668 speed_template_16_32);
2669 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2670 speed_template_16_32);
2671 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2672 speed_template_16_32);
2673 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2674 speed_template_16_32);
2675 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2676 speed_template_16_32);
2677 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2678 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002679 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2680 speed_template_32_48);
2681 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2682 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002683 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2684 speed_template_32_64);
2685 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2686 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002687 break;
2688
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002689 case 504:
2690 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2691 speed_template_16_24_32);
2692 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2693 speed_template_16_24_32);
2694 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2695 speed_template_16_24_32);
2696 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2697 speed_template_16_24_32);
2698 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2699 speed_template_16_24_32);
2700 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2701 speed_template_16_24_32);
2702 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2703 speed_template_32_40_48);
2704 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2705 speed_template_32_40_48);
2706 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2707 speed_template_32_48_64);
2708 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2709 speed_template_32_48_64);
2710 break;
2711
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002712 case 505:
2713 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2714 speed_template_8);
2715 break;
2716
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002717 case 506:
2718 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2719 speed_template_8_16);
2720 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2721 speed_template_8_16);
2722 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2723 speed_template_8_16);
2724 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2725 speed_template_8_16);
2726 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2727 speed_template_8_16);
2728 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2729 speed_template_8_16);
2730 break;
2731
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002732 case 507:
2733 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2734 speed_template_16_32);
2735 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2736 speed_template_16_32);
2737 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2738 speed_template_16_32);
2739 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2740 speed_template_16_32);
2741 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2742 speed_template_16_32);
2743 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2744 speed_template_16_32);
2745 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2746 speed_template_32_48);
2747 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2748 speed_template_32_48);
2749 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2750 speed_template_32_64);
2751 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2752 speed_template_32_64);
2753 break;
2754
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002755 case 508:
2756 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2757 speed_template_16_32);
2758 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2759 speed_template_16_32);
2760 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2761 speed_template_16_32);
2762 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2763 speed_template_16_32);
2764 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2765 speed_template_16_32);
2766 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2767 speed_template_16_32);
2768 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2769 speed_template_32_48);
2770 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2771 speed_template_32_48);
2772 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2773 speed_template_32_64);
2774 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2775 speed_template_32_64);
2776 break;
2777
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002778 case 509:
2779 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2780 speed_template_8_32);
2781 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2782 speed_template_8_32);
2783 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2784 speed_template_8_32);
2785 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2786 speed_template_8_32);
2787 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2788 speed_template_8_32);
2789 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2790 speed_template_8_32);
2791 break;
2792
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002793 case 600:
2794 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2795 speed_template_16_24_32, num_mb);
2796 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2797 speed_template_16_24_32, num_mb);
2798 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2799 speed_template_16_24_32, num_mb);
2800 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2801 speed_template_16_24_32, num_mb);
2802 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2803 speed_template_32_40_48, num_mb);
2804 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2805 speed_template_32_40_48, num_mb);
2806 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2807 speed_template_32_64, num_mb);
2808 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2809 speed_template_32_64, num_mb);
2810 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2811 speed_template_16_24_32, num_mb);
2812 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2813 speed_template_16_24_32, num_mb);
2814 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2815 speed_template_16_24_32, num_mb);
2816 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2817 speed_template_16_24_32, num_mb);
2818 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2819 speed_template_16_24_32, num_mb);
2820 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2821 speed_template_16_24_32, num_mb);
2822 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2823 speed_template_16_24_32, num_mb);
2824 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2825 speed_template_16_24_32, num_mb);
2826 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2827 0, speed_template_20_28_36, num_mb);
2828 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2829 0, speed_template_20_28_36, num_mb);
2830 break;
2831
2832 case 601:
2833 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2834 des3_speed_template, DES3_SPEED_VECTORS,
2835 speed_template_24, num_mb);
2836 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2837 des3_speed_template, DES3_SPEED_VECTORS,
2838 speed_template_24, num_mb);
2839 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2840 des3_speed_template, DES3_SPEED_VECTORS,
2841 speed_template_24, num_mb);
2842 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2843 des3_speed_template, DES3_SPEED_VECTORS,
2844 speed_template_24, num_mb);
2845 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2846 des3_speed_template, DES3_SPEED_VECTORS,
2847 speed_template_24, num_mb);
2848 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2849 des3_speed_template, DES3_SPEED_VECTORS,
2850 speed_template_24, num_mb);
2851 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2852 des3_speed_template, DES3_SPEED_VECTORS,
2853 speed_template_24, num_mb);
2854 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2855 des3_speed_template, DES3_SPEED_VECTORS,
2856 speed_template_24, num_mb);
2857 break;
2858
2859 case 602:
2860 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2861 speed_template_8, num_mb);
2862 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2863 speed_template_8, num_mb);
2864 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2865 speed_template_8, num_mb);
2866 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2867 speed_template_8, num_mb);
2868 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2869 speed_template_8, num_mb);
2870 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2871 speed_template_8, num_mb);
2872 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2873 speed_template_8, num_mb);
2874 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2875 speed_template_8, num_mb);
2876 break;
2877
2878 case 603:
2879 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2880 speed_template_16_32, num_mb);
2881 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2882 speed_template_16_32, num_mb);
2883 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2884 speed_template_16_32, num_mb);
2885 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2886 speed_template_16_32, num_mb);
2887 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2888 speed_template_16_32, num_mb);
2889 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2890 speed_template_16_32, num_mb);
2891 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2892 speed_template_32_48, num_mb);
2893 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2894 speed_template_32_48, num_mb);
2895 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2896 speed_template_32_64, num_mb);
2897 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2898 speed_template_32_64, num_mb);
2899 break;
2900
2901 case 604:
2902 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2903 speed_template_16_24_32, num_mb);
2904 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2905 speed_template_16_24_32, num_mb);
2906 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2907 speed_template_16_24_32, num_mb);
2908 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2909 speed_template_16_24_32, num_mb);
2910 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2911 speed_template_16_24_32, num_mb);
2912 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2913 speed_template_16_24_32, num_mb);
2914 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2915 speed_template_32_40_48, num_mb);
2916 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2917 speed_template_32_40_48, num_mb);
2918 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2919 speed_template_32_48_64, num_mb);
2920 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2921 speed_template_32_48_64, num_mb);
2922 break;
2923
2924 case 605:
2925 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2926 speed_template_8, num_mb);
2927 break;
2928
2929 case 606:
2930 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2931 speed_template_8_16, num_mb);
2932 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2933 speed_template_8_16, num_mb);
2934 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2935 speed_template_8_16, num_mb);
2936 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2937 speed_template_8_16, num_mb);
2938 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2939 speed_template_8_16, num_mb);
2940 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2941 speed_template_8_16, num_mb);
2942 break;
2943
2944 case 607:
2945 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2946 speed_template_16_32, num_mb);
2947 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2948 speed_template_16_32, num_mb);
2949 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2950 speed_template_16_32, num_mb);
2951 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2952 speed_template_16_32, num_mb);
2953 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2954 speed_template_16_32, num_mb);
2955 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2956 speed_template_16_32, num_mb);
2957 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2958 speed_template_32_48, num_mb);
2959 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2960 speed_template_32_48, num_mb);
2961 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2962 speed_template_32_64, num_mb);
2963 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2964 speed_template_32_64, num_mb);
2965 break;
2966
2967 case 608:
2968 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2969 speed_template_16_32, num_mb);
2970 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2971 speed_template_16_32, num_mb);
2972 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2973 speed_template_16_32, num_mb);
2974 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2975 speed_template_16_32, num_mb);
2976 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2977 speed_template_16_32, num_mb);
2978 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2979 speed_template_16_32, num_mb);
2980 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2981 speed_template_32_48, num_mb);
2982 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2983 speed_template_32_48, num_mb);
2984 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2985 speed_template_32_64, num_mb);
2986 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2987 speed_template_32_64, num_mb);
2988 break;
2989
2990 case 609:
2991 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2992 speed_template_8_32, num_mb);
2993 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2994 speed_template_8_32, num_mb);
2995 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2996 speed_template_8_32, num_mb);
2997 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2998 speed_template_8_32, num_mb);
2999 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
3000 speed_template_8_32, num_mb);
3001 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
3002 speed_template_8_32, num_mb);
3003 break;
3004
Linus Torvalds1da177e2005-04-16 15:20:36 -07003005 case 1000:
3006 test_available();
3007 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003008 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10003009
3010 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003011}
3012
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003013static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07003014{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003015 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08003016 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003017
Herbert Xuf139cfa2008-07-31 12:23:53 +08003018 for (i = 0; i < TVMEMSIZE; i++) {
3019 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3020 if (!tvmem[i])
3021 goto err_free_tv;
3022 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003023
Kees Cook4e234ee2018-04-26 19:57:28 -07003024 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003025
Jarod Wilson4e033a62009-05-27 15:10:21 +10003026 if (err) {
3027 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3028 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01003029 } else {
3030 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10003031 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003032
Jarod Wilson4e033a62009-05-27 15:10:21 +10003033 /* We intentionaly return -EAGAIN to prevent keeping the module,
3034 * unless we're running in fips mode. It does all its work from
3035 * init() and doesn't offer any runtime functionality, but in
3036 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10003037 * => we don't need it in the memory, do we?
3038 * -- mludvig
3039 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10003040 if (!fips_enabled)
3041 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003042
Herbert Xuf139cfa2008-07-31 12:23:53 +08003043err_free_tv:
3044 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
3045 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003046
3047 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003048}
3049
3050/*
3051 * If an init function is provided, an exit function must also be provided
3052 * to allow module unload.
3053 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003054static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003055
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003056module_init(tcrypt_mod_init);
3057module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003058
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003059module_param(alg, charp, 0);
3060module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08003061module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003062module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07003063module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07003064MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3065 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00003066module_param(num_mb, uint, 0000);
3067MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07003068
3069MODULE_LICENSE("GPL");
3070MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3071MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");