blob: a129c12ca468a457a68e05ac505e2bab66ae39f3 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Rabin Vincent76512f22017-01-18 14:54:05 +010025#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
26
Herbert Xu1ce5a042015-04-22 15:06:30 +080027#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080028#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080029#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100030#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080031#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090033#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100035#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070038#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070039#include <linux/timex.h>
40#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070041#include "tcrypt.h"
42
43/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080044 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070045 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080046#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
48/*
Herbert Xuda7f0332008-07-31 17:08:25 +080049* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070050*/
51#define ENCRYPT 1
52#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070053
Horia Geant?f074f7b2015-08-27 18:38:36 +030054#define MAX_DIGEST_SIZE 64
55
Harald Welteebfd9bc2005-06-22 13:27:23 -070056/*
Luca Clementi263a8df2014-06-25 22:57:42 -070057 * return a string with the driver name
58 */
59#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
60
61/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070062 * Used by test_cipher_speed()
63 */
Herbert Xu6a179442005-06-22 13:29:03 -070064static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070065
Steffen Klasserta873a5f2009-06-19 19:46:53 +080066static char *alg = NULL;
67static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080068static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070069static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000070static u32 num_mb = 8;
Herbert Xuf139cfa2008-07-31 12:23:53 +080071static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070072
73static char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030074 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb902007-11-10 20:08:25 +080075 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
76 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110077 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080078 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +053079 "lzo", "cts", "zlib", "sha3-224", "sha3-256", "sha3-384", "sha3-512",
80 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070081};
82
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000083static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
84static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
85
86#define XBUFSIZE 8
87#define MAX_IVLEN 32
88
89static int testmgr_alloc_buf(char *buf[XBUFSIZE])
90{
91 int i;
92
93 for (i = 0; i < XBUFSIZE; i++) {
94 buf[i] = (void *)__get_free_page(GFP_KERNEL);
95 if (!buf[i])
96 goto err_free_buf;
97 }
98
99 return 0;
100
101err_free_buf:
102 while (i-- > 0)
103 free_page((unsigned long)buf[i]);
104
105 return -ENOMEM;
106}
107
108static void testmgr_free_buf(char *buf[XBUFSIZE])
109{
110 int i;
111
112 for (i = 0; i < XBUFSIZE; i++)
113 free_page((unsigned long)buf[i]);
114}
115
116static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
117 unsigned int buflen, const void *assoc,
118 unsigned int aad_size)
119{
120 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
121 int k, rem;
122
123 if (np > XBUFSIZE) {
124 rem = PAGE_SIZE;
125 np = XBUFSIZE;
126 } else {
127 rem = buflen % PAGE_SIZE;
128 }
129
130 sg_init_table(sg, np + 1);
131
132 sg_set_buf(&sg[0], assoc, aad_size);
133
134 if (rem)
135 np--;
136 for (k = 0; k < np; k++)
137 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
138
139 if (rem)
140 sg_set_buf(&sg[k + 1], xbuf[k], rem);
141}
142
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530143static inline int do_one_aead_op(struct aead_request *req, int ret)
144{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100145 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530146
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100147 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530148}
149
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000150struct test_mb_aead_data {
151 struct scatterlist sg[XBUFSIZE];
152 struct scatterlist sgout[XBUFSIZE];
153 struct aead_request *req;
154 struct crypto_wait wait;
155 char *xbuf[XBUFSIZE];
156 char *xoutbuf[XBUFSIZE];
157 char *axbuf[XBUFSIZE];
158};
159
160static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700161 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000162{
Kees Cook4e234ee2018-04-26 19:57:28 -0700163 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000164
165 /* Fire up a bunch of concurrent requests */
166 for (i = 0; i < num_mb; i++) {
167 if (enc == ENCRYPT)
168 rc[i] = crypto_aead_encrypt(data[i].req);
169 else
170 rc[i] = crypto_aead_decrypt(data[i].req);
171 }
172
173 /* Wait for all requests to finish */
174 for (i = 0; i < num_mb; i++) {
175 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
176
177 if (rc[i]) {
178 pr_info("concurrent request %d error %d\n", i, rc[i]);
179 err = rc[i];
180 }
181 }
182
183 return err;
184}
185
186static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
187 int blen, int secs, u32 num_mb)
188{
189 unsigned long start, end;
190 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700191 int ret = 0;
192 int *rc;
193
194 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
195 if (!rc)
196 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000197
198 for (start = jiffies, end = start + secs * HZ, bcount = 0;
199 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700200 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000201 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700202 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000203 }
204
205 pr_cont("%d operations in %d seconds (%ld bytes)\n",
206 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700207
208out:
209 kfree(rc);
210 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000211}
212
213static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
214 int blen, u32 num_mb)
215{
216 unsigned long cycles = 0;
217 int ret = 0;
218 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700219 int *rc;
220
221 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
222 if (!rc)
223 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000224
225 /* Warm-up run. */
226 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700227 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000228 if (ret)
229 goto out;
230 }
231
232 /* The real thing. */
233 for (i = 0; i < 8; i++) {
234 cycles_t start, end;
235
236 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700237 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000238 end = get_cycles();
239
240 if (ret)
241 goto out;
242
243 cycles += end - start;
244 }
245
Kees Cook4e234ee2018-04-26 19:57:28 -0700246 pr_cont("1 operation in %lu cycles (%d bytes)\n",
247 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000248
Kees Cook4e234ee2018-04-26 19:57:28 -0700249out:
250 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000251 return ret;
252}
253
254static void test_mb_aead_speed(const char *algo, int enc, int secs,
255 struct aead_speed_template *template,
256 unsigned int tcount, u8 authsize,
257 unsigned int aad_size, u8 *keysize, u32 num_mb)
258{
259 struct test_mb_aead_data *data;
260 struct crypto_aead *tfm;
261 unsigned int i, j, iv_len;
262 const char *key;
263 const char *e;
264 void *assoc;
265 u32 *b_size;
266 char *iv;
267 int ret;
268
269
270 if (aad_size >= PAGE_SIZE) {
271 pr_err("associate data length (%u) too big\n", aad_size);
272 return;
273 }
274
275 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
276 if (!iv)
277 return;
278
279 if (enc == ENCRYPT)
280 e = "encryption";
281 else
282 e = "decryption";
283
284 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
285 if (!data)
286 goto out_free_iv;
287
288 tfm = crypto_alloc_aead(algo, 0, 0);
289 if (IS_ERR(tfm)) {
290 pr_err("failed to load transform for %s: %ld\n",
291 algo, PTR_ERR(tfm));
292 goto out_free_data;
293 }
294
295 ret = crypto_aead_setauthsize(tfm, authsize);
296
297 for (i = 0; i < num_mb; ++i)
298 if (testmgr_alloc_buf(data[i].xbuf)) {
299 while (i--)
300 testmgr_free_buf(data[i].xbuf);
301 goto out_free_tfm;
302 }
303
304 for (i = 0; i < num_mb; ++i)
305 if (testmgr_alloc_buf(data[i].axbuf)) {
306 while (i--)
307 testmgr_free_buf(data[i].axbuf);
308 goto out_free_xbuf;
309 }
310
311 for (i = 0; i < num_mb; ++i)
312 if (testmgr_alloc_buf(data[i].xoutbuf)) {
313 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000314 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000315 goto out_free_axbuf;
316 }
317
318 for (i = 0; i < num_mb; ++i) {
319 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
320 if (!data[i].req) {
321 pr_err("alg: skcipher: Failed to allocate request for %s\n",
322 algo);
323 while (i--)
324 aead_request_free(data[i].req);
325 goto out_free_xoutbuf;
326 }
327 }
328
329 for (i = 0; i < num_mb; ++i) {
330 crypto_init_wait(&data[i].wait);
331 aead_request_set_callback(data[i].req,
332 CRYPTO_TFM_REQ_MAY_BACKLOG,
333 crypto_req_done, &data[i].wait);
334 }
335
336 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
337 get_driver_name(crypto_aead, tfm), e);
338
339 i = 0;
340 do {
341 b_size = aead_sizes;
342 do {
343 if (*b_size + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000344 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000345 authsize + *b_size,
346 XBUFSIZE * PAGE_SIZE);
347 goto out;
348 }
349
350 pr_info("test %u (%d bit key, %d byte blocks): ", i,
351 *keysize * 8, *b_size);
352
353 /* Set up tfm global state, i.e. the key */
354
355 memset(tvmem[0], 0xff, PAGE_SIZE);
356 key = tvmem[0];
357 for (j = 0; j < tcount; j++) {
358 if (template[j].klen == *keysize) {
359 key = template[j].key;
360 break;
361 }
362 }
363
364 crypto_aead_clear_flags(tfm, ~0);
365
366 ret = crypto_aead_setkey(tfm, key, *keysize);
367 if (ret) {
368 pr_err("setkey() failed flags=%x\n",
369 crypto_aead_get_flags(tfm));
370 goto out;
371 }
372
373 iv_len = crypto_aead_ivsize(tfm);
374 if (iv_len)
375 memset(iv, 0xff, iv_len);
376
377 /* Now setup per request stuff, i.e. buffers */
378
379 for (j = 0; j < num_mb; ++j) {
380 struct test_mb_aead_data *cur = &data[j];
381
382 assoc = cur->axbuf[0];
383 memset(assoc, 0xff, aad_size);
384
385 sg_init_aead(cur->sg, cur->xbuf,
386 *b_size + (enc ? 0 : authsize),
387 assoc, aad_size);
388
389 sg_init_aead(cur->sgout, cur->xoutbuf,
390 *b_size + (enc ? authsize : 0),
391 assoc, aad_size);
392
393 aead_request_set_ad(cur->req, aad_size);
394
395 if (!enc) {
396
397 aead_request_set_crypt(cur->req,
398 cur->sgout,
399 cur->sg,
400 *b_size, iv);
401 ret = crypto_aead_encrypt(cur->req);
402 ret = do_one_aead_op(cur->req, ret);
403
404 if (ret) {
405 pr_err("calculating auth failed failed (%d)\n",
406 ret);
407 break;
408 }
409 }
410
411 aead_request_set_crypt(cur->req, cur->sg,
412 cur->sgout, *b_size +
413 (enc ? 0 : authsize),
414 iv);
415
416 }
417
Horia Geantă2af63292018-07-23 17:18:48 +0300418 if (secs) {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000419 ret = test_mb_aead_jiffies(data, enc, *b_size,
420 secs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300421 cond_resched();
422 } else {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000423 ret = test_mb_aead_cycles(data, enc, *b_size,
424 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300425 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000426
427 if (ret) {
428 pr_err("%s() failed return code=%d\n", e, ret);
429 break;
430 }
431 b_size++;
432 i++;
433 } while (*b_size);
434 keysize++;
435 } while (*keysize);
436
437out:
438 for (i = 0; i < num_mb; ++i)
439 aead_request_free(data[i].req);
440out_free_xoutbuf:
441 for (i = 0; i < num_mb; ++i)
442 testmgr_free_buf(data[i].xoutbuf);
443out_free_axbuf:
444 for (i = 0; i < num_mb; ++i)
445 testmgr_free_buf(data[i].axbuf);
446out_free_xbuf:
447 for (i = 0; i < num_mb; ++i)
448 testmgr_free_buf(data[i].xbuf);
449out_free_tfm:
450 crypto_free_aead(tfm);
451out_free_data:
452 kfree(data);
453out_free_iv:
454 kfree(iv);
455}
456
Tim Chen53f52d72013-12-11 14:28:47 -0800457static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700458 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800459{
460 unsigned long start, end;
461 int bcount;
462 int ret;
463
Mark Rustad3e3dc252014-07-25 02:53:38 -0700464 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800465 time_before(jiffies, end); bcount++) {
466 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530467 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800468 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530469 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800470
471 if (ret)
472 return ret;
473 }
474
475 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700476 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800477 return 0;
478}
479
480static int test_aead_cycles(struct aead_request *req, int enc, int blen)
481{
482 unsigned long cycles = 0;
483 int ret = 0;
484 int i;
485
Tim Chen53f52d72013-12-11 14:28:47 -0800486 /* Warm-up run. */
487 for (i = 0; i < 4; i++) {
488 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530489 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800490 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530491 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800492
493 if (ret)
494 goto out;
495 }
496
497 /* The real thing. */
498 for (i = 0; i < 8; i++) {
499 cycles_t start, end;
500
501 start = get_cycles();
502 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530503 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800504 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530505 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800506 end = get_cycles();
507
508 if (ret)
509 goto out;
510
511 cycles += end - start;
512 }
513
514out:
Tim Chen53f52d72013-12-11 14:28:47 -0800515 if (ret == 0)
516 printk("1 operation in %lu cycles (%d bytes)\n",
517 (cycles + 4) / 8, blen);
518
519 return ret;
520}
521
Mark Rustad3e3dc252014-07-25 02:53:38 -0700522static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800523 struct aead_speed_template *template,
524 unsigned int tcount, u8 authsize,
525 unsigned int aad_size, u8 *keysize)
526{
527 unsigned int i, j;
528 struct crypto_aead *tfm;
529 int ret = -ENOMEM;
530 const char *key;
531 struct aead_request *req;
532 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800533 struct scatterlist *sgout;
534 const char *e;
535 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200536 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800537 char *xbuf[XBUFSIZE];
538 char *xoutbuf[XBUFSIZE];
539 char *axbuf[XBUFSIZE];
540 unsigned int *b_size;
541 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100542 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800543
Cristian Stoica96692a732015-01-28 13:07:32 +0200544 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
545 if (!iv)
546 return;
547
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200548 if (aad_size >= PAGE_SIZE) {
549 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200550 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200551 }
552
Tim Chen53f52d72013-12-11 14:28:47 -0800553 if (enc == ENCRYPT)
554 e = "encryption";
555 else
556 e = "decryption";
557
558 if (testmgr_alloc_buf(xbuf))
559 goto out_noxbuf;
560 if (testmgr_alloc_buf(axbuf))
561 goto out_noaxbuf;
562 if (testmgr_alloc_buf(xoutbuf))
563 goto out_nooutbuf;
564
Herbert Xua3f21852015-05-27 16:03:51 +0800565 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800566 if (!sg)
567 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800568 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800569
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800570 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800571
572 if (IS_ERR(tfm)) {
573 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
574 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200575 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800576 }
577
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100578 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700579 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
580 get_driver_name(crypto_aead, tfm), e);
581
Tim Chen53f52d72013-12-11 14:28:47 -0800582 req = aead_request_alloc(tfm, GFP_KERNEL);
583 if (!req) {
584 pr_err("alg: aead: Failed to allocate request for %s\n",
585 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200586 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800587 }
588
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530589 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100590 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530591
Tim Chen53f52d72013-12-11 14:28:47 -0800592 i = 0;
593 do {
594 b_size = aead_sizes;
595 do {
596 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200597 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800598
599 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
600 pr_err("template (%u) too big for tvmem (%lu)\n",
601 *keysize + *b_size,
602 TVMEMSIZE * PAGE_SIZE);
603 goto out;
604 }
605
606 key = tvmem[0];
607 for (j = 0; j < tcount; j++) {
608 if (template[j].klen == *keysize) {
609 key = template[j].key;
610 break;
611 }
612 }
613 ret = crypto_aead_setkey(tfm, key, *keysize);
614 ret = crypto_aead_setauthsize(tfm, authsize);
615
616 iv_len = crypto_aead_ivsize(tfm);
617 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200618 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800619
620 crypto_aead_clear_flags(tfm, ~0);
621 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
622 i, *keysize * 8, *b_size);
623
624
625 memset(tvmem[0], 0xff, PAGE_SIZE);
626
627 if (ret) {
628 pr_err("setkey() failed flags=%x\n",
629 crypto_aead_get_flags(tfm));
630 goto out;
631 }
632
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200633 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
634 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800635
Herbert Xu31267272015-06-17 14:05:26 +0800636 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200637 *b_size + (enc ? authsize : 0), assoc,
638 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800639
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000640 aead_request_set_ad(req, aad_size);
641
642 if (!enc) {
643
644 /*
645 * For decryption we need a proper auth so
646 * we do the encryption path once with buffers
647 * reversed (input <-> output) to calculate it
648 */
649 aead_request_set_crypt(req, sgout, sg,
650 *b_size, iv);
651 ret = do_one_aead_op(req,
652 crypto_aead_encrypt(req));
653
654 if (ret) {
655 pr_err("calculating auth failed failed (%d)\n",
656 ret);
657 break;
658 }
659 }
660
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300661 aead_request_set_crypt(req, sg, sgout,
662 *b_size + (enc ? 0 : authsize),
663 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800664
Horia Geantă2af63292018-07-23 17:18:48 +0300665 if (secs) {
Mark Rustad3e3dc252014-07-25 02:53:38 -0700666 ret = test_aead_jiffies(req, enc, *b_size,
667 secs);
Horia Geantă2af63292018-07-23 17:18:48 +0300668 cond_resched();
669 } else {
Tim Chen53f52d72013-12-11 14:28:47 -0800670 ret = test_aead_cycles(req, enc, *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +0300671 }
Tim Chen53f52d72013-12-11 14:28:47 -0800672
673 if (ret) {
674 pr_err("%s() failed return code=%d\n", e, ret);
675 break;
676 }
677 b_size++;
678 i++;
679 } while (*b_size);
680 keysize++;
681 } while (*keysize);
682
683out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200684 aead_request_free(req);
685out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800686 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200687out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800688 kfree(sg);
689out_nosg:
690 testmgr_free_buf(xoutbuf);
691out_nooutbuf:
692 testmgr_free_buf(axbuf);
693out_noaxbuf:
694 testmgr_free_buf(xbuf);
695out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200696 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800697}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800698
David S. Millerbeb63da2010-05-19 14:11:21 +1000699static void test_hash_sg_init(struct scatterlist *sg)
700{
701 int i;
702
703 sg_init_table(sg, TVMEMSIZE);
704 for (i = 0; i < TVMEMSIZE; i++) {
705 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
706 memset(tvmem[i], 0xff, PAGE_SIZE);
707 }
708}
709
David S. Millerbeb63da2010-05-19 14:11:21 +1000710static inline int do_one_ahash_op(struct ahash_request *req, int ret)
711{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100712 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000713
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100714 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000715}
716
Herbert Xu72259de2016-06-28 20:33:52 +0800717struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000718 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800719 char result[64];
720 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100721 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800722 char *xbuf[XBUFSIZE];
723};
Megha Dey087bcd22016-06-23 18:40:47 -0700724
Kees Cook4e234ee2018-04-26 19:57:28 -0700725static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
726 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000727{
Kees Cook4e234ee2018-04-26 19:57:28 -0700728 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000729
730 /* Fire up a bunch of concurrent requests */
731 for (i = 0; i < num_mb; i++)
732 rc[i] = crypto_ahash_digest(data[i].req);
733
734 /* Wait for all requests to finish */
735 for (i = 0; i < num_mb; i++) {
736 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
737
738 if (rc[i]) {
739 pr_info("concurrent request %d error %d\n", i, rc[i]);
740 err = rc[i];
741 }
742 }
743
744 return err;
745}
746
747static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
748 int secs, u32 num_mb)
749{
750 unsigned long start, end;
751 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700752 int ret = 0;
753 int *rc;
754
755 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
756 if (!rc)
757 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000758
759 for (start = jiffies, end = start + secs * HZ, bcount = 0;
760 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700761 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000762 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700763 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000764 }
765
766 pr_cont("%d operations in %d seconds (%ld bytes)\n",
767 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700768
769out:
770 kfree(rc);
771 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000772}
773
774static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
775 u32 num_mb)
776{
777 unsigned long cycles = 0;
778 int ret = 0;
779 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700780 int *rc;
781
782 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
783 if (!rc)
784 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000785
786 /* Warm-up run. */
787 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700788 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000789 if (ret)
790 goto out;
791 }
792
793 /* The real thing. */
794 for (i = 0; i < 8; i++) {
795 cycles_t start, end;
796
797 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700798 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000799 end = get_cycles();
800
801 if (ret)
802 goto out;
803
804 cycles += end - start;
805 }
806
Kees Cook4e234ee2018-04-26 19:57:28 -0700807 pr_cont("1 operation in %lu cycles (%d bytes)\n",
808 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000809
Kees Cook4e234ee2018-04-26 19:57:28 -0700810out:
811 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000812 return ret;
813}
814
815static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000816 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700817{
Herbert Xu72259de2016-06-28 20:33:52 +0800818 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700819 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800820 unsigned int i, j, k;
821 int ret;
822
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000823 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800824 if (!data)
825 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700826
827 tfm = crypto_alloc_ahash(algo, 0, 0);
828 if (IS_ERR(tfm)) {
829 pr_err("failed to load transform for %s: %ld\n",
830 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800831 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700832 }
Herbert Xu72259de2016-06-28 20:33:52 +0800833
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000834 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800835 if (testmgr_alloc_buf(data[i].xbuf))
836 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700837
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100838 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700839
Herbert Xu72259de2016-06-28 20:33:52 +0800840 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
841 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200842 pr_err("alg: hash: Failed to allocate request for %s\n",
843 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800844 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700845 }
Megha Dey087bcd22016-06-23 18:40:47 -0700846
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100847 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
848 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000849
850 sg_init_table(data[i].sg, XBUFSIZE);
851 for (j = 0; j < XBUFSIZE; j++) {
852 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
853 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
854 }
Megha Dey087bcd22016-06-23 18:40:47 -0700855 }
856
Herbert Xu72259de2016-06-28 20:33:52 +0800857 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
858 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700859
860 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800861 /* For some reason this only tests digests. */
862 if (speed[i].blen != speed[i].plen)
863 continue;
864
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000865 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200866 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000867 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200868 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700869 }
870
871 if (speed[i].klen)
872 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
873
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000874 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800875 ahash_request_set_crypt(data[k].req, data[k].sg,
876 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700877
Herbert Xu72259de2016-06-28 20:33:52 +0800878 pr_info("test%3u "
879 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700880 i, speed[i].blen, speed[i].plen,
881 speed[i].blen / speed[i].plen);
882
Horia Geantă2af63292018-07-23 17:18:48 +0300883 if (secs) {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000884 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
885 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300886 cond_resched();
887 } else {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000888 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300889 }
Herbert Xu72259de2016-06-28 20:33:52 +0800890
Herbert Xu72259de2016-06-28 20:33:52 +0800891
892 if (ret) {
893 pr_err("At least one hashing failed ret=%d\n", ret);
894 break;
895 }
Megha Dey087bcd22016-06-23 18:40:47 -0700896 }
Megha Dey087bcd22016-06-23 18:40:47 -0700897
898out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000899 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800900 ahash_request_free(data[k].req);
901
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000902 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800903 testmgr_free_buf(data[k].xbuf);
904
905 crypto_free_ahash(tfm);
906
907free_data:
908 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700909}
910
David S. Millerbeb63da2010-05-19 14:11:21 +1000911static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700912 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000913{
914 unsigned long start, end;
915 int bcount;
916 int ret;
917
Mark Rustad3e3dc252014-07-25 02:53:38 -0700918 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000919 time_before(jiffies, end); bcount++) {
920 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
921 if (ret)
922 return ret;
923 }
924
925 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700926 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000927
928 return 0;
929}
930
931static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700932 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000933{
934 unsigned long start, end;
935 int bcount, pcount;
936 int ret;
937
938 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700939 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000940
Mark Rustad3e3dc252014-07-25 02:53:38 -0700941 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000942 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800943 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000944 if (ret)
945 return ret;
946 for (pcount = 0; pcount < blen; pcount += plen) {
947 ret = do_one_ahash_op(req, crypto_ahash_update(req));
948 if (ret)
949 return ret;
950 }
951 /* we assume there is enough space in 'out' for the result */
952 ret = do_one_ahash_op(req, crypto_ahash_final(req));
953 if (ret)
954 return ret;
955 }
956
957 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700958 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000959
960 return 0;
961}
962
963static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
964 char *out)
965{
966 unsigned long cycles = 0;
967 int ret, i;
968
969 /* Warm-up run. */
970 for (i = 0; i < 4; i++) {
971 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
972 if (ret)
973 goto out;
974 }
975
976 /* The real thing. */
977 for (i = 0; i < 8; i++) {
978 cycles_t start, end;
979
980 start = get_cycles();
981
982 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
983 if (ret)
984 goto out;
985
986 end = get_cycles();
987
988 cycles += end - start;
989 }
990
991out:
992 if (ret)
993 return ret;
994
995 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
996 cycles / 8, cycles / (8 * blen));
997
998 return 0;
999}
1000
1001static int test_ahash_cycles(struct ahash_request *req, int blen,
1002 int plen, char *out)
1003{
1004 unsigned long cycles = 0;
1005 int i, pcount, ret;
1006
1007 if (plen == blen)
1008 return test_ahash_cycles_digest(req, blen, out);
1009
1010 /* Warm-up run. */
1011 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001012 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001013 if (ret)
1014 goto out;
1015 for (pcount = 0; pcount < blen; pcount += plen) {
1016 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1017 if (ret)
1018 goto out;
1019 }
1020 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1021 if (ret)
1022 goto out;
1023 }
1024
1025 /* The real thing. */
1026 for (i = 0; i < 8; i++) {
1027 cycles_t start, end;
1028
1029 start = get_cycles();
1030
Herbert Xu43a96072015-04-22 11:02:27 +08001031 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001032 if (ret)
1033 goto out;
1034 for (pcount = 0; pcount < blen; pcount += plen) {
1035 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1036 if (ret)
1037 goto out;
1038 }
1039 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1040 if (ret)
1041 goto out;
1042
1043 end = get_cycles();
1044
1045 cycles += end - start;
1046 }
1047
1048out:
1049 if (ret)
1050 return ret;
1051
1052 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1053 cycles / 8, cycles / (8 * blen));
1054
1055 return 0;
1056}
1057
Herbert Xu06605112016-02-01 21:36:49 +08001058static void test_ahash_speed_common(const char *algo, unsigned int secs,
1059 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001060{
1061 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001062 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001063 struct ahash_request *req;
1064 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001065 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001066 int i, ret;
1067
Herbert Xu06605112016-02-01 21:36:49 +08001068 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001069 if (IS_ERR(tfm)) {
1070 pr_err("failed to load transform for %s: %ld\n",
1071 algo, PTR_ERR(tfm));
1072 return;
1073 }
1074
Luca Clementi263a8df2014-06-25 22:57:42 -07001075 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1076 get_driver_name(crypto_ahash, tfm));
1077
Horia Geant?f074f7b2015-08-27 18:38:36 +03001078 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1079 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1080 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001081 goto out;
1082 }
1083
1084 test_hash_sg_init(sg);
1085 req = ahash_request_alloc(tfm, GFP_KERNEL);
1086 if (!req) {
1087 pr_err("ahash request allocation failure\n");
1088 goto out;
1089 }
1090
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001091 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001092 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001093 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001094
Horia Geant?f074f7b2015-08-27 18:38:36 +03001095 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1096 if (!output)
1097 goto out_nomem;
1098
David S. Millerbeb63da2010-05-19 14:11:21 +10001099 for (i = 0; speed[i].blen != 0; i++) {
1100 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1101 pr_err("template (%u) too big for tvmem (%lu)\n",
1102 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1103 break;
1104 }
1105
Horia Geantă94c7bb62018-09-12 16:20:48 +03001106 if (speed[i].klen)
1107 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
1108
David S. Millerbeb63da2010-05-19 14:11:21 +10001109 pr_info("test%3u "
1110 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1111 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1112
1113 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1114
Horia Geantă2af63292018-07-23 17:18:48 +03001115 if (secs) {
David S. Millerbeb63da2010-05-19 14:11:21 +10001116 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001117 speed[i].plen, output, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001118 cond_resched();
1119 } else {
David S. Millerbeb63da2010-05-19 14:11:21 +10001120 ret = test_ahash_cycles(req, speed[i].blen,
1121 speed[i].plen, output);
Horia Geantă2af63292018-07-23 17:18:48 +03001122 }
David S. Millerbeb63da2010-05-19 14:11:21 +10001123
1124 if (ret) {
1125 pr_err("hashing failed ret=%d\n", ret);
1126 break;
1127 }
1128 }
1129
Horia Geant?f074f7b2015-08-27 18:38:36 +03001130 kfree(output);
1131
1132out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001133 ahash_request_free(req);
1134
1135out:
1136 crypto_free_ahash(tfm);
1137}
1138
Herbert Xu06605112016-02-01 21:36:49 +08001139static void test_ahash_speed(const char *algo, unsigned int secs,
1140 struct hash_speed *speed)
1141{
1142 return test_ahash_speed_common(algo, secs, speed, 0);
1143}
1144
1145static void test_hash_speed(const char *algo, unsigned int secs,
1146 struct hash_speed *speed)
1147{
1148 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1149}
1150
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001151struct test_mb_skcipher_data {
1152 struct scatterlist sg[XBUFSIZE];
1153 struct skcipher_request *req;
1154 struct crypto_wait wait;
1155 char *xbuf[XBUFSIZE];
1156};
1157
1158static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001159 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001160{
Kees Cook4e234ee2018-04-26 19:57:28 -07001161 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001162
1163 /* Fire up a bunch of concurrent requests */
1164 for (i = 0; i < num_mb; i++) {
1165 if (enc == ENCRYPT)
1166 rc[i] = crypto_skcipher_encrypt(data[i].req);
1167 else
1168 rc[i] = crypto_skcipher_decrypt(data[i].req);
1169 }
1170
1171 /* Wait for all requests to finish */
1172 for (i = 0; i < num_mb; i++) {
1173 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1174
1175 if (rc[i]) {
1176 pr_info("concurrent request %d error %d\n", i, rc[i]);
1177 err = rc[i];
1178 }
1179 }
1180
1181 return err;
1182}
1183
1184static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1185 int blen, int secs, u32 num_mb)
1186{
1187 unsigned long start, end;
1188 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001189 int ret = 0;
1190 int *rc;
1191
1192 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1193 if (!rc)
1194 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001195
1196 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1197 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001198 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001199 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001200 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001201 }
1202
1203 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1204 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001205
1206out:
1207 kfree(rc);
1208 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001209}
1210
1211static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1212 int blen, u32 num_mb)
1213{
1214 unsigned long cycles = 0;
1215 int ret = 0;
1216 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001217 int *rc;
1218
1219 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1220 if (!rc)
1221 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001222
1223 /* Warm-up run. */
1224 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001225 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001226 if (ret)
1227 goto out;
1228 }
1229
1230 /* The real thing. */
1231 for (i = 0; i < 8; i++) {
1232 cycles_t start, end;
1233
1234 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001235 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001236 end = get_cycles();
1237
1238 if (ret)
1239 goto out;
1240
1241 cycles += end - start;
1242 }
1243
Kees Cook4e234ee2018-04-26 19:57:28 -07001244 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1245 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001246
Kees Cook4e234ee2018-04-26 19:57:28 -07001247out:
1248 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001249 return ret;
1250}
1251
1252static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1253 struct cipher_speed_template *template,
1254 unsigned int tcount, u8 *keysize, u32 num_mb)
1255{
1256 struct test_mb_skcipher_data *data;
1257 struct crypto_skcipher *tfm;
1258 unsigned int i, j, iv_len;
1259 const char *key;
1260 const char *e;
1261 u32 *b_size;
1262 char iv[128];
1263 int ret;
1264
1265 if (enc == ENCRYPT)
1266 e = "encryption";
1267 else
1268 e = "decryption";
1269
1270 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1271 if (!data)
1272 return;
1273
1274 tfm = crypto_alloc_skcipher(algo, 0, 0);
1275 if (IS_ERR(tfm)) {
1276 pr_err("failed to load transform for %s: %ld\n",
1277 algo, PTR_ERR(tfm));
1278 goto out_free_data;
1279 }
1280
1281 for (i = 0; i < num_mb; ++i)
1282 if (testmgr_alloc_buf(data[i].xbuf)) {
1283 while (i--)
1284 testmgr_free_buf(data[i].xbuf);
1285 goto out_free_tfm;
1286 }
1287
1288
1289 for (i = 0; i < num_mb; ++i)
1290 if (testmgr_alloc_buf(data[i].xbuf)) {
1291 while (i--)
1292 testmgr_free_buf(data[i].xbuf);
1293 goto out_free_tfm;
1294 }
1295
1296
1297 for (i = 0; i < num_mb; ++i) {
1298 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1299 if (!data[i].req) {
1300 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1301 algo);
1302 while (i--)
1303 skcipher_request_free(data[i].req);
1304 goto out_free_xbuf;
1305 }
1306 }
1307
1308 for (i = 0; i < num_mb; ++i) {
1309 skcipher_request_set_callback(data[i].req,
1310 CRYPTO_TFM_REQ_MAY_BACKLOG,
1311 crypto_req_done, &data[i].wait);
1312 crypto_init_wait(&data[i].wait);
1313 }
1314
1315 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1316 get_driver_name(crypto_skcipher, tfm), e);
1317
1318 i = 0;
1319 do {
1320 b_size = block_sizes;
1321 do {
1322 if (*b_size > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001323 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001324 *b_size, XBUFSIZE * PAGE_SIZE);
1325 goto out;
1326 }
1327
1328 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1329 *keysize * 8, *b_size);
1330
1331 /* Set up tfm global state, i.e. the key */
1332
1333 memset(tvmem[0], 0xff, PAGE_SIZE);
1334 key = tvmem[0];
1335 for (j = 0; j < tcount; j++) {
1336 if (template[j].klen == *keysize) {
1337 key = template[j].key;
1338 break;
1339 }
1340 }
1341
1342 crypto_skcipher_clear_flags(tfm, ~0);
1343
1344 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1345 if (ret) {
1346 pr_err("setkey() failed flags=%x\n",
1347 crypto_skcipher_get_flags(tfm));
1348 goto out;
1349 }
1350
1351 iv_len = crypto_skcipher_ivsize(tfm);
1352 if (iv_len)
1353 memset(&iv, 0xff, iv_len);
1354
1355 /* Now setup per request stuff, i.e. buffers */
1356
1357 for (j = 0; j < num_mb; ++j) {
1358 struct test_mb_skcipher_data *cur = &data[j];
1359 unsigned int k = *b_size;
1360 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1361 unsigned int p = 0;
1362
1363 sg_init_table(cur->sg, pages);
1364
1365 while (k > PAGE_SIZE) {
1366 sg_set_buf(cur->sg + p, cur->xbuf[p],
1367 PAGE_SIZE);
1368 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1369 p++;
1370 k -= PAGE_SIZE;
1371 }
1372
1373 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1374 memset(cur->xbuf[p], 0xff, k);
1375
1376 skcipher_request_set_crypt(cur->req, cur->sg,
1377 cur->sg, *b_size,
1378 iv);
1379 }
1380
Horia Geantă2af63292018-07-23 17:18:48 +03001381 if (secs) {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001382 ret = test_mb_acipher_jiffies(data, enc,
1383 *b_size, secs,
1384 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001385 cond_resched();
1386 } else {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001387 ret = test_mb_acipher_cycles(data, enc,
1388 *b_size, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001389 }
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001390
1391 if (ret) {
1392 pr_err("%s() failed flags=%x\n", e,
1393 crypto_skcipher_get_flags(tfm));
1394 break;
1395 }
1396 b_size++;
1397 i++;
1398 } while (*b_size);
1399 keysize++;
1400 } while (*keysize);
1401
1402out:
1403 for (i = 0; i < num_mb; ++i)
1404 skcipher_request_free(data[i].req);
1405out_free_xbuf:
1406 for (i = 0; i < num_mb; ++i)
1407 testmgr_free_buf(data[i].xbuf);
1408out_free_tfm:
1409 crypto_free_skcipher(tfm);
1410out_free_data:
1411 kfree(data);
1412}
1413
Herbert Xu7166e582016-06-29 18:03:50 +08001414static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001415{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001416 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001417
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001418 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001419}
1420
Herbert Xu7166e582016-06-29 18:03:50 +08001421static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001422 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001423{
1424 unsigned long start, end;
1425 int bcount;
1426 int ret;
1427
Mark Rustad3e3dc252014-07-25 02:53:38 -07001428 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001429 time_before(jiffies, end); bcount++) {
1430 if (enc)
1431 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001432 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001433 else
1434 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001435 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001436
1437 if (ret)
1438 return ret;
1439 }
1440
1441 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001442 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001443 return 0;
1444}
1445
Herbert Xu7166e582016-06-29 18:03:50 +08001446static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001447 int blen)
1448{
1449 unsigned long cycles = 0;
1450 int ret = 0;
1451 int i;
1452
1453 /* Warm-up run. */
1454 for (i = 0; i < 4; i++) {
1455 if (enc)
1456 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001457 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001458 else
1459 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001460 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001461
1462 if (ret)
1463 goto out;
1464 }
1465
1466 /* The real thing. */
1467 for (i = 0; i < 8; i++) {
1468 cycles_t start, end;
1469
1470 start = get_cycles();
1471 if (enc)
1472 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001473 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001474 else
1475 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001476 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001477 end = get_cycles();
1478
1479 if (ret)
1480 goto out;
1481
1482 cycles += end - start;
1483 }
1484
1485out:
1486 if (ret == 0)
1487 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1488 (cycles + 4) / 8, blen);
1489
1490 return ret;
1491}
1492
Herbert Xu7166e582016-06-29 18:03:50 +08001493static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1494 struct cipher_speed_template *template,
1495 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001496{
Nicolas Royerde1975332012-07-01 19:19:47 +02001497 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001498 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001499 const char *key;
1500 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001501 struct skcipher_request *req;
1502 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001503 const char *e;
1504 u32 *b_size;
1505
1506 if (enc == ENCRYPT)
1507 e = "encryption";
1508 else
1509 e = "decryption";
1510
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001511 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001512
Herbert Xu7166e582016-06-29 18:03:50 +08001513 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001514
1515 if (IS_ERR(tfm)) {
1516 pr_err("failed to load transform for %s: %ld\n", algo,
1517 PTR_ERR(tfm));
1518 return;
1519 }
1520
Luca Clementi263a8df2014-06-25 22:57:42 -07001521 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
Herbert Xu7166e582016-06-29 18:03:50 +08001522 get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001523
Herbert Xu7166e582016-06-29 18:03:50 +08001524 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001525 if (!req) {
1526 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1527 algo);
1528 goto out;
1529 }
1530
Herbert Xu7166e582016-06-29 18:03:50 +08001531 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001532 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001533
1534 i = 0;
1535 do {
1536 b_size = block_sizes;
1537
1538 do {
1539 struct scatterlist sg[TVMEMSIZE];
1540
1541 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1542 pr_err("template (%u) too big for "
1543 "tvmem (%lu)\n", *keysize + *b_size,
1544 TVMEMSIZE * PAGE_SIZE);
1545 goto out_free_req;
1546 }
1547
1548 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1549 *keysize * 8, *b_size);
1550
1551 memset(tvmem[0], 0xff, PAGE_SIZE);
1552
1553 /* set key, plain text and IV */
1554 key = tvmem[0];
1555 for (j = 0; j < tcount; j++) {
1556 if (template[j].klen == *keysize) {
1557 key = template[j].key;
1558 break;
1559 }
1560 }
1561
Herbert Xu7166e582016-06-29 18:03:50 +08001562 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001563
Herbert Xu7166e582016-06-29 18:03:50 +08001564 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001565 if (ret) {
1566 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001567 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001568 goto out_free_req;
1569 }
1570
Nicolas Royerde1975332012-07-01 19:19:47 +02001571 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001572 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1573
Nicolas Royerde1975332012-07-01 19:19:47 +02001574 if (k > PAGE_SIZE) {
1575 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001576 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001577 k -= PAGE_SIZE;
1578 j = 1;
1579 while (k > PAGE_SIZE) {
1580 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1581 memset(tvmem[j], 0xff, PAGE_SIZE);
1582 j++;
1583 k -= PAGE_SIZE;
1584 }
1585 sg_set_buf(sg + j, tvmem[j], k);
1586 memset(tvmem[j], 0xff, k);
1587 } else {
1588 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001589 }
1590
Herbert Xu7166e582016-06-29 18:03:50 +08001591 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001592 if (iv_len)
1593 memset(&iv, 0xff, iv_len);
1594
Herbert Xu7166e582016-06-29 18:03:50 +08001595 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001596
Horia Geantă2af63292018-07-23 17:18:48 +03001597 if (secs) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001598 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001599 *b_size, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001600 cond_resched();
1601 } else {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001602 ret = test_acipher_cycles(req, enc,
1603 *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +03001604 }
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001605
1606 if (ret) {
1607 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001608 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001609 break;
1610 }
1611 b_size++;
1612 i++;
1613 } while (*b_size);
1614 keysize++;
1615 } while (*keysize);
1616
1617out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001618 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001619out:
Herbert Xu7166e582016-06-29 18:03:50 +08001620 crypto_free_skcipher(tfm);
1621}
1622
1623static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1624 struct cipher_speed_template *template,
1625 unsigned int tcount, u8 *keysize)
1626{
1627 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1628 true);
1629}
1630
1631static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1632 struct cipher_speed_template *template,
1633 unsigned int tcount, u8 *keysize)
1634{
1635 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1636 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001637}
1638
Herbert Xuef2736f2005-06-22 13:26:03 -07001639static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001640{
1641 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001642
Linus Torvalds1da177e2005-04-16 15:20:36 -07001643 while (*name) {
1644 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001645 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001646 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001647 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001648 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001649}
1650
Herbert Xu01b32322008-07-31 15:41:55 +08001651static inline int tcrypt_test(const char *alg)
1652{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001653 int ret;
1654
Rabin Vincent76512f22017-01-18 14:54:05 +01001655 pr_debug("testing %s\n", alg);
1656
Jarod Wilson4e033a62009-05-27 15:10:21 +10001657 ret = alg_test(alg, alg, 0, 0);
1658 /* non-fips algs return -EINVAL in fips mode */
1659 if (fips_enabled && ret == -EINVAL)
1660 ret = 0;
1661 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001662}
1663
Kees Cook4e234ee2018-04-26 19:57:28 -07001664static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001665{
1666 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001667 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001668
1669 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001670 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001671 if (alg) {
1672 if (!crypto_has_alg(alg, type,
1673 mask ?: CRYPTO_ALG_TYPE_MASK))
1674 ret = -ENOENT;
1675 break;
1676 }
1677
Herbert Xu01b32322008-07-31 15:41:55 +08001678 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001679 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001680 break;
1681
1682 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001683 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001684 break;
1685
1686 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001687 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001688 break;
1689
1690 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001691 ret += tcrypt_test("ecb(des)");
1692 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001693 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001694 break;
1695
1696 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001697 ret += tcrypt_test("ecb(des3_ede)");
1698 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001699 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001700 break;
1701
1702 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001703 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001704 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001705
Linus Torvalds1da177e2005-04-16 15:20:36 -07001706 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001707 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001708 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001709
Linus Torvalds1da177e2005-04-16 15:20:36 -07001710 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001711 ret += tcrypt_test("ecb(blowfish)");
1712 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001713 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001714 break;
1715
1716 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001717 ret += tcrypt_test("ecb(twofish)");
1718 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001719 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001720 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001721 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001722 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001723
Linus Torvalds1da177e2005-04-16 15:20:36 -07001724 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001725 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001726 ret += tcrypt_test("cbc(serpent)");
1727 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001728 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001729 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001730 break;
1731
1732 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001733 ret += tcrypt_test("ecb(aes)");
1734 ret += tcrypt_test("cbc(aes)");
1735 ret += tcrypt_test("lrw(aes)");
1736 ret += tcrypt_test("xts(aes)");
1737 ret += tcrypt_test("ctr(aes)");
1738 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001739 break;
1740
1741 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001742 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001743 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001744
Linus Torvalds1da177e2005-04-16 15:20:36 -07001745 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001746 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001747 break;
1748
1749 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001750 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001751 break;
1752
1753 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001754 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001755 ret += tcrypt_test("cbc(cast5)");
1756 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001757 break;
1758
1759 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001760 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001761 ret += tcrypt_test("cbc(cast6)");
1762 ret += tcrypt_test("ctr(cast6)");
1763 ret += tcrypt_test("lrw(cast6)");
1764 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001765 break;
1766
1767 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001768 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001769 break;
1770
1771 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001772 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001773 break;
1774
1775 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001776 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001777 break;
1778
1779 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001780 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001781 break;
1782
1783 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001784 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001785 break;
1786
1787 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001788 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001789 break;
1790
1791 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001792 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001793 break;
1794
1795 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001796 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001797 break;
1798
1799 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001800 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001801 break;
1802
1803 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001804 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001805 break;
1806
1807 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001808 ret += tcrypt_test("ecb(anubis)");
1809 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001810 break;
1811
1812 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001813 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001814 break;
1815
1816 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001817 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001818 break;
1819
1820 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001821 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001822 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001823
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001824 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001825 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001826 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001827
David Howells90831632006-12-16 12:13:14 +11001828 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001829 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001830 break;
1831
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001832 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001833 ret += tcrypt_test("ecb(camellia)");
1834 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001835 ret += tcrypt_test("ctr(camellia)");
1836 ret += tcrypt_test("lrw(camellia)");
1837 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001838 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001839
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001840 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001841 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001842 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001843
Tan Swee Heng2407d602007-11-23 19:45:00 +08001844 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001845 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001846 break;
1847
Herbert Xu8df213d2007-12-02 14:55:47 +11001848 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001849 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001850 break;
1851
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001852 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001853 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001854 break;
1855
Joy Latten93cc74e2007-12-12 20:24:22 +08001856 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001857 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001858 break;
1859
Kevin Coffman76cb9522008-03-24 21:26:16 +08001860 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001861 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001862 break;
1863
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001864 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001865 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001866 break;
1867
1868 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001869 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001870 break;
1871
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001872 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001873 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001874 break;
1875
1876 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001877 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001878 break;
1879
1880 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001881 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001882 break;
1883
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001884 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001885 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001886 break;
1887
Jarod Wilson5d667322009-05-04 19:23:40 +08001888 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001889 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001890 break;
1891
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001892 case 46:
1893 ret += tcrypt_test("ghash");
1894 break;
1895
Herbert Xu684115212013-09-07 12:56:26 +10001896 case 47:
1897 ret += tcrypt_test("crct10dif");
1898 break;
1899
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301900 case 48:
1901 ret += tcrypt_test("sha3-224");
1902 break;
1903
1904 case 49:
1905 ret += tcrypt_test("sha3-256");
1906 break;
1907
1908 case 50:
1909 ret += tcrypt_test("sha3-384");
1910 break;
1911
1912 case 51:
1913 ret += tcrypt_test("sha3-512");
1914 break;
1915
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001916 case 52:
1917 ret += tcrypt_test("sm3");
1918 break;
1919
Linus Torvalds1da177e2005-04-16 15:20:36 -07001920 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001921 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001922 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001923
Linus Torvalds1da177e2005-04-16 15:20:36 -07001924 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001925 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001926 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001927
Linus Torvalds1da177e2005-04-16 15:20:36 -07001928 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001929 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001930 break;
1931
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001932 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001933 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001934 break;
1935
1936 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001937 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001938 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001939
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001940 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001941 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001942 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001943
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001944 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001945 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001946 break;
1947
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001948 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001949 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001950 break;
1951
1952 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001953 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001954 break;
1955
Shane Wangf1939f72009-09-02 20:05:22 +10001956 case 109:
Eric Biggers0917b872018-06-18 10:22:40 -07001957 ret += tcrypt_test("vmac64(aes)");
Shane Wangf1939f72009-09-02 20:05:22 +10001958 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001959
raveendra padasalagi98eca722016-07-01 11:16:54 +05301960 case 111:
1961 ret += tcrypt_test("hmac(sha3-224)");
1962 break;
1963
1964 case 112:
1965 ret += tcrypt_test("hmac(sha3-256)");
1966 break;
1967
1968 case 113:
1969 ret += tcrypt_test("hmac(sha3-384)");
1970 break;
1971
1972 case 114:
1973 ret += tcrypt_test("hmac(sha3-512)");
1974 break;
1975
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001976 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001977 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001978 break;
1979
Adrian Hoban69435b92010-11-04 15:02:04 -04001980 case 151:
1981 ret += tcrypt_test("rfc4106(gcm(aes))");
1982 break;
1983
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001984 case 152:
1985 ret += tcrypt_test("rfc4543(gcm(aes))");
1986 break;
1987
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001988 case 153:
1989 ret += tcrypt_test("cmac(aes)");
1990 break;
1991
1992 case 154:
1993 ret += tcrypt_test("cmac(des3_ede)");
1994 break;
1995
Horia Geantabbf9c892013-11-28 15:11:16 +02001996 case 155:
1997 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1998 break;
1999
Horia Geantabca4feb2014-03-14 17:46:51 +02002000 case 156:
2001 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
2002 break;
2003
2004 case 157:
2005 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
2006 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05302007 case 181:
2008 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
2009 break;
2010 case 182:
2011 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
2012 break;
2013 case 183:
2014 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2015 break;
2016 case 184:
2017 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2018 break;
2019 case 185:
2020 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2021 break;
2022 case 186:
2023 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2024 break;
2025 case 187:
2026 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2027 break;
2028 case 188:
2029 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2030 break;
2031 case 189:
2032 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2033 break;
2034 case 190:
2035 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2036 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002037 case 191:
2038 ret += tcrypt_test("ecb(sm4)");
2039 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002040 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002041 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002042 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002043 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002044 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002045 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002046 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002047 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002048 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002049 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002050 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002051 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002052 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002053 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002054 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002055 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002056 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002057 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2058 speed_template_16_24_32);
2059 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2060 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002061 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2062 speed_template_16_24_32);
2063 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2064 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002065 break;
2066
2067 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002068 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002069 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002070 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002071 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002072 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002073 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002074 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002075 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002076 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002077 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002078 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002079 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002080 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2081 des3_speed_template, DES3_SPEED_VECTORS,
2082 speed_template_24);
2083 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2084 des3_speed_template, DES3_SPEED_VECTORS,
2085 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002086 break;
2087
2088 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002089 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002090 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002091 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002092 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002093 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002094 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002095 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002096 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002097 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2098 speed_template_16_24_32);
2099 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2100 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002101 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2102 speed_template_32_40_48);
2103 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2104 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002105 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2106 speed_template_32_48_64);
2107 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2108 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002109 break;
2110
2111 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002112 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002113 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002114 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002115 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002116 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002117 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002118 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002119 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002120 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2121 speed_template_8_32);
2122 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2123 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002124 break;
2125
2126 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002127 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002128 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002129 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002130 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002131 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002132 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002133 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002134 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002135 break;
2136
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002137 case 205:
2138 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002139 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002140 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002141 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002142 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002143 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002144 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002145 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002146 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2147 speed_template_16_24_32);
2148 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2149 speed_template_16_24_32);
2150 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2151 speed_template_32_40_48);
2152 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2153 speed_template_32_40_48);
2154 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2155 speed_template_32_48_64);
2156 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2157 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002158 break;
2159
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002160 case 206:
2161 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002162 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002163 break;
2164
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002165 case 207:
2166 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2167 speed_template_16_32);
2168 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2169 speed_template_16_32);
2170 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2171 speed_template_16_32);
2172 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2173 speed_template_16_32);
2174 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2175 speed_template_16_32);
2176 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2177 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002178 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2179 speed_template_32_48);
2180 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2181 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002182 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2183 speed_template_32_64);
2184 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2185 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002186 break;
2187
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002188 case 208:
2189 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2190 speed_template_8);
2191 break;
2192
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002193 case 209:
2194 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2195 speed_template_8_16);
2196 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2197 speed_template_8_16);
2198 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2199 speed_template_8_16);
2200 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2201 speed_template_8_16);
2202 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2203 speed_template_8_16);
2204 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2205 speed_template_8_16);
2206 break;
2207
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002208 case 210:
2209 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2210 speed_template_16_32);
2211 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2212 speed_template_16_32);
2213 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2214 speed_template_16_32);
2215 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2216 speed_template_16_32);
2217 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2218 speed_template_16_32);
2219 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2220 speed_template_16_32);
2221 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2222 speed_template_32_48);
2223 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2224 speed_template_32_48);
2225 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2226 speed_template_32_64);
2227 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2228 speed_template_32_64);
2229 break;
2230
Tim Chen53f52d72013-12-11 14:28:47 -08002231 case 211:
2232 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002233 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302234 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002235 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002236 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2237 NULL, 0, 16, 16, aead_speed_template_20);
2238 test_aead_speed("gcm(aes)", DECRYPT, sec,
2239 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002240 break;
2241
Herbert Xu4e4aab62015-06-17 14:04:21 +08002242 case 212:
2243 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002244 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002245 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2246 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002247 break;
2248
Martin Willi2dce0632015-07-16 19:13:59 +02002249 case 213:
2250 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2251 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002252 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2253 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002254 break;
2255
2256 case 214:
2257 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2258 speed_template_32);
2259 break;
2260
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002261 case 215:
2262 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2263 0, 16, 16, aead_speed_template_20, num_mb);
2264 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2265 speed_template_16_24_32, num_mb);
2266 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2267 0, 16, 16, aead_speed_template_20, num_mb);
2268 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2269 speed_template_16_24_32, num_mb);
2270 break;
2271
2272 case 216:
2273 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2274 16, 16, aead_speed_template_19, num_mb);
2275 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2276 16, 16, aead_speed_template_19, num_mb);
2277 break;
2278
2279 case 217:
2280 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2281 sec, NULL, 0, 16, 8, aead_speed_template_36,
2282 num_mb);
2283 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2284 sec, NULL, 0, 16, 8, aead_speed_template_36,
2285 num_mb);
2286 break;
2287
Eric Biggersf467c4a2018-11-16 17:26:31 -08002288 case 219:
2289 test_cipher_speed("adiantum(xchacha12,aes)", ENCRYPT, sec, NULL,
2290 0, speed_template_32);
2291 test_cipher_speed("adiantum(xchacha12,aes)", DECRYPT, sec, NULL,
2292 0, speed_template_32);
2293 test_cipher_speed("adiantum(xchacha20,aes)", ENCRYPT, sec, NULL,
2294 0, speed_template_32);
2295 test_cipher_speed("adiantum(xchacha20,aes)", DECRYPT, sec, NULL,
2296 0, speed_template_32);
2297 break;
2298
Michal Ludvige8057922006-05-30 22:04:19 +10002299 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002300 if (alg) {
2301 test_hash_speed(alg, sec, generic_hash_speed_template);
2302 break;
2303 }
Michal Ludvige8057922006-05-30 22:04:19 +10002304 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002305 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002306 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002307 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002308 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002309 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002310 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002311 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002312 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002313 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002314 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002315 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002316 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002317 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002318 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002319 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002320 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002321 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002322 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002323 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002324 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002325 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002326 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002327 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002328 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002329 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002330 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002331 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002332 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002333 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002334 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002335 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002336 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002337 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002338 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002339 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002340 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002341 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002342 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002343 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002344 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002345 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002346 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002347 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002348 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002349 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002350 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002351 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002352 /* fall through */
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08002353 case 313:
2354 test_hash_speed("sha224", sec, generic_hash_speed_template);
2355 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002356 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002357 case 314:
2358 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2359 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002360 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002361 case 315:
2362 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2363 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002364 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002365 case 316:
2366 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2367 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002368 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002369 case 317:
2370 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2371 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002372 /* fall through */
Huang Ying18bcc912010-03-10 18:30:32 +08002373 case 318:
2374 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
2375 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002376 /* fall through */
Tim Chene3899e42012-09-27 15:44:24 -07002377 case 319:
2378 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2379 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002380 /* fall through */
Herbert Xu684115212013-09-07 12:56:26 +10002381 case 320:
2382 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2383 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002384 /* fall through */
Martin Willi2dce0632015-07-16 19:13:59 +02002385 case 321:
2386 test_hash_speed("poly1305", sec, poly1305_speed_template);
2387 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002388 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302389 case 322:
2390 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2391 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002392 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302393 case 323:
2394 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2395 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002396 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302397 case 324:
2398 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2399 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002400 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302401 case 325:
2402 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2403 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002404 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002405 case 326:
2406 test_hash_speed("sm3", sec, generic_hash_speed_template);
2407 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002408 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002409 case 399:
2410 break;
2411
David S. Millerbeb63da2010-05-19 14:11:21 +10002412 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002413 if (alg) {
2414 test_ahash_speed(alg, sec, generic_hash_speed_template);
2415 break;
2416 }
David S. Millerbeb63da2010-05-19 14:11:21 +10002417 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002418 case 401:
2419 test_ahash_speed("md4", sec, generic_hash_speed_template);
2420 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002421 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002422 case 402:
2423 test_ahash_speed("md5", sec, generic_hash_speed_template);
2424 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002425 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002426 case 403:
2427 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2428 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002429 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002430 case 404:
2431 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2432 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002433 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002434 case 405:
2435 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2436 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002437 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002438 case 406:
2439 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2440 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002441 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002442 case 407:
2443 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2444 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002445 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002446 case 408:
2447 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2448 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002449 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002450 case 409:
2451 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2452 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002453 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002454 case 410:
2455 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2456 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002457 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002458 case 411:
2459 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2460 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002461 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002462 case 412:
2463 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2464 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002465 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002466 case 413:
2467 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2468 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002469 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002470 case 414:
2471 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2472 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002473 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002474 case 415:
2475 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2476 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002477 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002478 case 416:
2479 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2480 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002481 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002482 case 417:
2483 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2484 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002485 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302486 case 418:
2487 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2488 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002489 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302490 case 419:
2491 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2492 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002493 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302494 case 420:
2495 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2496 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002497 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302498 case 421:
2499 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2500 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002501 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002502 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002503 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2504 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002505 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002506 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002507 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002508 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2509 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002510 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002511 /* fall through */
Megha Dey14009c42016-06-27 10:20:09 -07002512 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002513 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2514 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002515 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002516 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002517 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002518 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2519 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002520 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002521 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002522 case 499:
2523 break;
2524
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002525 case 500:
2526 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2527 speed_template_16_24_32);
2528 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2529 speed_template_16_24_32);
2530 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2531 speed_template_16_24_32);
2532 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2533 speed_template_16_24_32);
2534 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2535 speed_template_32_40_48);
2536 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2537 speed_template_32_40_48);
2538 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002539 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002540 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002541 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002542 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2543 speed_template_16_24_32);
2544 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2545 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002546 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2547 speed_template_16_24_32);
2548 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2549 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002550 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2551 speed_template_16_24_32);
2552 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2553 speed_template_16_24_32);
2554 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2555 speed_template_16_24_32);
2556 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2557 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002558 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2559 speed_template_20_28_36);
2560 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2561 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002562 break;
2563
2564 case 501:
2565 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2566 des3_speed_template, DES3_SPEED_VECTORS,
2567 speed_template_24);
2568 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2569 des3_speed_template, DES3_SPEED_VECTORS,
2570 speed_template_24);
2571 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2572 des3_speed_template, DES3_SPEED_VECTORS,
2573 speed_template_24);
2574 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2575 des3_speed_template, DES3_SPEED_VECTORS,
2576 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002577 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2578 des3_speed_template, DES3_SPEED_VECTORS,
2579 speed_template_24);
2580 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2581 des3_speed_template, DES3_SPEED_VECTORS,
2582 speed_template_24);
2583 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2584 des3_speed_template, DES3_SPEED_VECTORS,
2585 speed_template_24);
2586 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2587 des3_speed_template, DES3_SPEED_VECTORS,
2588 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002589 break;
2590
2591 case 502:
2592 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2593 speed_template_8);
2594 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2595 speed_template_8);
2596 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2597 speed_template_8);
2598 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2599 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002600 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2601 speed_template_8);
2602 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2603 speed_template_8);
2604 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2605 speed_template_8);
2606 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2607 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002608 break;
2609
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002610 case 503:
2611 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2612 speed_template_16_32);
2613 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2614 speed_template_16_32);
2615 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2616 speed_template_16_32);
2617 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2618 speed_template_16_32);
2619 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2620 speed_template_16_32);
2621 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2622 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002623 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2624 speed_template_32_48);
2625 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2626 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002627 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2628 speed_template_32_64);
2629 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2630 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002631 break;
2632
Johannes Goetzfried107778b2012-05-28 15:54:24 +02002633 case 504:
2634 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2635 speed_template_16_24_32);
2636 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2637 speed_template_16_24_32);
2638 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2639 speed_template_16_24_32);
2640 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2641 speed_template_16_24_32);
2642 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2643 speed_template_16_24_32);
2644 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2645 speed_template_16_24_32);
2646 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2647 speed_template_32_40_48);
2648 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2649 speed_template_32_40_48);
2650 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2651 speed_template_32_48_64);
2652 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2653 speed_template_32_48_64);
2654 break;
2655
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002656 case 505:
2657 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2658 speed_template_8);
2659 break;
2660
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002661 case 506:
2662 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2663 speed_template_8_16);
2664 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2665 speed_template_8_16);
2666 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2667 speed_template_8_16);
2668 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2669 speed_template_8_16);
2670 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2671 speed_template_8_16);
2672 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2673 speed_template_8_16);
2674 break;
2675
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002676 case 507:
2677 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2678 speed_template_16_32);
2679 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2680 speed_template_16_32);
2681 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2682 speed_template_16_32);
2683 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2684 speed_template_16_32);
2685 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2686 speed_template_16_32);
2687 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2688 speed_template_16_32);
2689 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2690 speed_template_32_48);
2691 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2692 speed_template_32_48);
2693 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2694 speed_template_32_64);
2695 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2696 speed_template_32_64);
2697 break;
2698
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002699 case 508:
2700 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2701 speed_template_16_32);
2702 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2703 speed_template_16_32);
2704 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2705 speed_template_16_32);
2706 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2707 speed_template_16_32);
2708 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2709 speed_template_16_32);
2710 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2711 speed_template_16_32);
2712 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2713 speed_template_32_48);
2714 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2715 speed_template_32_48);
2716 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2717 speed_template_32_64);
2718 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2719 speed_template_32_64);
2720 break;
2721
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002722 case 509:
2723 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2724 speed_template_8_32);
2725 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2726 speed_template_8_32);
2727 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2728 speed_template_8_32);
2729 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2730 speed_template_8_32);
2731 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2732 speed_template_8_32);
2733 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2734 speed_template_8_32);
2735 break;
2736
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002737 case 600:
2738 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2739 speed_template_16_24_32, num_mb);
2740 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2741 speed_template_16_24_32, num_mb);
2742 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2743 speed_template_16_24_32, num_mb);
2744 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2745 speed_template_16_24_32, num_mb);
2746 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2747 speed_template_32_40_48, num_mb);
2748 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2749 speed_template_32_40_48, num_mb);
2750 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2751 speed_template_32_64, num_mb);
2752 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2753 speed_template_32_64, num_mb);
2754 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2755 speed_template_16_24_32, num_mb);
2756 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2757 speed_template_16_24_32, num_mb);
2758 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2759 speed_template_16_24_32, num_mb);
2760 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2761 speed_template_16_24_32, num_mb);
2762 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2763 speed_template_16_24_32, num_mb);
2764 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2765 speed_template_16_24_32, num_mb);
2766 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2767 speed_template_16_24_32, num_mb);
2768 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2769 speed_template_16_24_32, num_mb);
2770 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2771 0, speed_template_20_28_36, num_mb);
2772 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2773 0, speed_template_20_28_36, num_mb);
2774 break;
2775
2776 case 601:
2777 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2778 des3_speed_template, DES3_SPEED_VECTORS,
2779 speed_template_24, num_mb);
2780 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2781 des3_speed_template, DES3_SPEED_VECTORS,
2782 speed_template_24, num_mb);
2783 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2784 des3_speed_template, DES3_SPEED_VECTORS,
2785 speed_template_24, num_mb);
2786 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2787 des3_speed_template, DES3_SPEED_VECTORS,
2788 speed_template_24, num_mb);
2789 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2790 des3_speed_template, DES3_SPEED_VECTORS,
2791 speed_template_24, num_mb);
2792 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2793 des3_speed_template, DES3_SPEED_VECTORS,
2794 speed_template_24, num_mb);
2795 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2796 des3_speed_template, DES3_SPEED_VECTORS,
2797 speed_template_24, num_mb);
2798 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2799 des3_speed_template, DES3_SPEED_VECTORS,
2800 speed_template_24, num_mb);
2801 break;
2802
2803 case 602:
2804 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2805 speed_template_8, num_mb);
2806 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2807 speed_template_8, num_mb);
2808 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2809 speed_template_8, num_mb);
2810 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2811 speed_template_8, num_mb);
2812 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2813 speed_template_8, num_mb);
2814 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2815 speed_template_8, num_mb);
2816 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2817 speed_template_8, num_mb);
2818 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2819 speed_template_8, num_mb);
2820 break;
2821
2822 case 603:
2823 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2824 speed_template_16_32, num_mb);
2825 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2826 speed_template_16_32, num_mb);
2827 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2828 speed_template_16_32, num_mb);
2829 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2830 speed_template_16_32, num_mb);
2831 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2832 speed_template_16_32, num_mb);
2833 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2834 speed_template_16_32, num_mb);
2835 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2836 speed_template_32_48, num_mb);
2837 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2838 speed_template_32_48, num_mb);
2839 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2840 speed_template_32_64, num_mb);
2841 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2842 speed_template_32_64, num_mb);
2843 break;
2844
2845 case 604:
2846 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2847 speed_template_16_24_32, num_mb);
2848 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2849 speed_template_16_24_32, num_mb);
2850 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2851 speed_template_16_24_32, num_mb);
2852 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2853 speed_template_16_24_32, num_mb);
2854 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2855 speed_template_16_24_32, num_mb);
2856 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2857 speed_template_16_24_32, num_mb);
2858 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2859 speed_template_32_40_48, num_mb);
2860 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2861 speed_template_32_40_48, num_mb);
2862 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2863 speed_template_32_48_64, num_mb);
2864 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2865 speed_template_32_48_64, num_mb);
2866 break;
2867
2868 case 605:
2869 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2870 speed_template_8, num_mb);
2871 break;
2872
2873 case 606:
2874 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2875 speed_template_8_16, num_mb);
2876 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2877 speed_template_8_16, num_mb);
2878 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2879 speed_template_8_16, num_mb);
2880 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2881 speed_template_8_16, num_mb);
2882 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2883 speed_template_8_16, num_mb);
2884 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2885 speed_template_8_16, num_mb);
2886 break;
2887
2888 case 607:
2889 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2890 speed_template_16_32, num_mb);
2891 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2892 speed_template_16_32, num_mb);
2893 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2894 speed_template_16_32, num_mb);
2895 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2896 speed_template_16_32, num_mb);
2897 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2898 speed_template_16_32, num_mb);
2899 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2900 speed_template_16_32, num_mb);
2901 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2902 speed_template_32_48, num_mb);
2903 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2904 speed_template_32_48, num_mb);
2905 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2906 speed_template_32_64, num_mb);
2907 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2908 speed_template_32_64, num_mb);
2909 break;
2910
2911 case 608:
2912 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2913 speed_template_16_32, num_mb);
2914 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2915 speed_template_16_32, num_mb);
2916 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2917 speed_template_16_32, num_mb);
2918 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2919 speed_template_16_32, num_mb);
2920 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2921 speed_template_16_32, num_mb);
2922 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2923 speed_template_16_32, num_mb);
2924 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2925 speed_template_32_48, num_mb);
2926 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2927 speed_template_32_48, num_mb);
2928 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2929 speed_template_32_64, num_mb);
2930 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2931 speed_template_32_64, num_mb);
2932 break;
2933
2934 case 609:
2935 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2936 speed_template_8_32, num_mb);
2937 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2938 speed_template_8_32, num_mb);
2939 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2940 speed_template_8_32, num_mb);
2941 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2942 speed_template_8_32, num_mb);
2943 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2944 speed_template_8_32, num_mb);
2945 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2946 speed_template_8_32, num_mb);
2947 break;
2948
Linus Torvalds1da177e2005-04-16 15:20:36 -07002949 case 1000:
2950 test_available();
2951 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002952 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002953
2954 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002955}
2956
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002957static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002958{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002959 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002960 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002961
Herbert Xuf139cfa2008-07-31 12:23:53 +08002962 for (i = 0; i < TVMEMSIZE; i++) {
2963 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2964 if (!tvmem[i])
2965 goto err_free_tv;
2966 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002967
Kees Cook4e234ee2018-04-26 19:57:28 -07002968 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002969
Jarod Wilson4e033a62009-05-27 15:10:21 +10002970 if (err) {
2971 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2972 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01002973 } else {
2974 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10002975 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002976
Jarod Wilson4e033a62009-05-27 15:10:21 +10002977 /* We intentionaly return -EAGAIN to prevent keeping the module,
2978 * unless we're running in fips mode. It does all its work from
2979 * init() and doesn't offer any runtime functionality, but in
2980 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002981 * => we don't need it in the memory, do we?
2982 * -- mludvig
2983 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002984 if (!fips_enabled)
2985 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002986
Herbert Xuf139cfa2008-07-31 12:23:53 +08002987err_free_tv:
2988 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2989 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002990
2991 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002992}
2993
2994/*
2995 * If an init function is provided, an exit function must also be provided
2996 * to allow module unload.
2997 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002998static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002999
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003000module_init(tcrypt_mod_init);
3001module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003002
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003003module_param(alg, charp, 0);
3004module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08003005module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003006module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07003007module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07003008MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3009 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00003010module_param(num_mb, uint, 0000);
3011MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07003012
3013MODULE_LICENSE("GPL");
3014MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3015MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");