blob: d5bcdd905007a217767ca86b228fa57eaeea0b12 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Rabin Vincent76512f22017-01-18 14:54:05 +010025#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
26
Herbert Xu1ce5a042015-04-22 15:06:30 +080027#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080028#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080029#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100030#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080031#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090033#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100035#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070038#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070039#include <linux/timex.h>
40#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070041#include "tcrypt.h"
42
43/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080044 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070045 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080046#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
48/*
Herbert Xuda7f0332008-07-31 17:08:25 +080049* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070050*/
51#define ENCRYPT 1
52#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070053
Horia Geant?f074f7b2015-08-27 18:38:36 +030054#define MAX_DIGEST_SIZE 64
55
Harald Welteebfd9bc2005-06-22 13:27:23 -070056/*
Luca Clementi263a8df2014-06-25 22:57:42 -070057 * return a string with the driver name
58 */
59#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
60
61/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070062 * Used by test_cipher_speed()
63 */
Herbert Xu6a179442005-06-22 13:29:03 -070064static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070065
Steffen Klasserta873a5f2009-06-19 19:46:53 +080066static char *alg = NULL;
67static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080068static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070069static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000070static u32 num_mb = 8;
Herbert Xuf139cfa2008-07-31 12:23:53 +080071static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070072
73static char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030074 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb902007-11-10 20:08:25 +080075 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
76 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110077 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080078 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +053079 "lzo", "cts", "zlib", "sha3-224", "sha3-256", "sha3-384", "sha3-512",
80 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070081};
82
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000083static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
84static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
85
86#define XBUFSIZE 8
87#define MAX_IVLEN 32
88
89static int testmgr_alloc_buf(char *buf[XBUFSIZE])
90{
91 int i;
92
93 for (i = 0; i < XBUFSIZE; i++) {
94 buf[i] = (void *)__get_free_page(GFP_KERNEL);
95 if (!buf[i])
96 goto err_free_buf;
97 }
98
99 return 0;
100
101err_free_buf:
102 while (i-- > 0)
103 free_page((unsigned long)buf[i]);
104
105 return -ENOMEM;
106}
107
108static void testmgr_free_buf(char *buf[XBUFSIZE])
109{
110 int i;
111
112 for (i = 0; i < XBUFSIZE; i++)
113 free_page((unsigned long)buf[i]);
114}
115
116static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
117 unsigned int buflen, const void *assoc,
118 unsigned int aad_size)
119{
120 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
121 int k, rem;
122
123 if (np > XBUFSIZE) {
124 rem = PAGE_SIZE;
125 np = XBUFSIZE;
126 } else {
127 rem = buflen % PAGE_SIZE;
128 }
129
130 sg_init_table(sg, np + 1);
131
132 sg_set_buf(&sg[0], assoc, aad_size);
133
134 if (rem)
135 np--;
136 for (k = 0; k < np; k++)
137 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
138
139 if (rem)
140 sg_set_buf(&sg[k + 1], xbuf[k], rem);
141}
142
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530143static inline int do_one_aead_op(struct aead_request *req, int ret)
144{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100145 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530146
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100147 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530148}
149
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000150struct test_mb_aead_data {
151 struct scatterlist sg[XBUFSIZE];
152 struct scatterlist sgout[XBUFSIZE];
153 struct aead_request *req;
154 struct crypto_wait wait;
155 char *xbuf[XBUFSIZE];
156 char *xoutbuf[XBUFSIZE];
157 char *axbuf[XBUFSIZE];
158};
159
160static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700161 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000162{
Kees Cook4e234ee2018-04-26 19:57:28 -0700163 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000164
165 /* Fire up a bunch of concurrent requests */
166 for (i = 0; i < num_mb; i++) {
167 if (enc == ENCRYPT)
168 rc[i] = crypto_aead_encrypt(data[i].req);
169 else
170 rc[i] = crypto_aead_decrypt(data[i].req);
171 }
172
173 /* Wait for all requests to finish */
174 for (i = 0; i < num_mb; i++) {
175 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
176
177 if (rc[i]) {
178 pr_info("concurrent request %d error %d\n", i, rc[i]);
179 err = rc[i];
180 }
181 }
182
183 return err;
184}
185
186static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
187 int blen, int secs, u32 num_mb)
188{
189 unsigned long start, end;
190 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700191 int ret = 0;
192 int *rc;
193
194 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
195 if (!rc)
196 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000197
198 for (start = jiffies, end = start + secs * HZ, bcount = 0;
199 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700200 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000201 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700202 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000203 }
204
205 pr_cont("%d operations in %d seconds (%ld bytes)\n",
206 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700207
208out:
209 kfree(rc);
210 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000211}
212
213static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
214 int blen, u32 num_mb)
215{
216 unsigned long cycles = 0;
217 int ret = 0;
218 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700219 int *rc;
220
221 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
222 if (!rc)
223 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000224
225 /* Warm-up run. */
226 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700227 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000228 if (ret)
229 goto out;
230 }
231
232 /* The real thing. */
233 for (i = 0; i < 8; i++) {
234 cycles_t start, end;
235
236 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700237 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000238 end = get_cycles();
239
240 if (ret)
241 goto out;
242
243 cycles += end - start;
244 }
245
Kees Cook4e234ee2018-04-26 19:57:28 -0700246 pr_cont("1 operation in %lu cycles (%d bytes)\n",
247 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000248
Kees Cook4e234ee2018-04-26 19:57:28 -0700249out:
250 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000251 return ret;
252}
253
254static void test_mb_aead_speed(const char *algo, int enc, int secs,
255 struct aead_speed_template *template,
256 unsigned int tcount, u8 authsize,
257 unsigned int aad_size, u8 *keysize, u32 num_mb)
258{
259 struct test_mb_aead_data *data;
260 struct crypto_aead *tfm;
261 unsigned int i, j, iv_len;
262 const char *key;
263 const char *e;
264 void *assoc;
265 u32 *b_size;
266 char *iv;
267 int ret;
268
269
270 if (aad_size >= PAGE_SIZE) {
271 pr_err("associate data length (%u) too big\n", aad_size);
272 return;
273 }
274
275 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
276 if (!iv)
277 return;
278
279 if (enc == ENCRYPT)
280 e = "encryption";
281 else
282 e = "decryption";
283
284 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
285 if (!data)
286 goto out_free_iv;
287
288 tfm = crypto_alloc_aead(algo, 0, 0);
289 if (IS_ERR(tfm)) {
290 pr_err("failed to load transform for %s: %ld\n",
291 algo, PTR_ERR(tfm));
292 goto out_free_data;
293 }
294
295 ret = crypto_aead_setauthsize(tfm, authsize);
296
297 for (i = 0; i < num_mb; ++i)
298 if (testmgr_alloc_buf(data[i].xbuf)) {
299 while (i--)
300 testmgr_free_buf(data[i].xbuf);
301 goto out_free_tfm;
302 }
303
304 for (i = 0; i < num_mb; ++i)
305 if (testmgr_alloc_buf(data[i].axbuf)) {
306 while (i--)
307 testmgr_free_buf(data[i].axbuf);
308 goto out_free_xbuf;
309 }
310
311 for (i = 0; i < num_mb; ++i)
312 if (testmgr_alloc_buf(data[i].xoutbuf)) {
313 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000314 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000315 goto out_free_axbuf;
316 }
317
318 for (i = 0; i < num_mb; ++i) {
319 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
320 if (!data[i].req) {
321 pr_err("alg: skcipher: Failed to allocate request for %s\n",
322 algo);
323 while (i--)
324 aead_request_free(data[i].req);
325 goto out_free_xoutbuf;
326 }
327 }
328
329 for (i = 0; i < num_mb; ++i) {
330 crypto_init_wait(&data[i].wait);
331 aead_request_set_callback(data[i].req,
332 CRYPTO_TFM_REQ_MAY_BACKLOG,
333 crypto_req_done, &data[i].wait);
334 }
335
336 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
337 get_driver_name(crypto_aead, tfm), e);
338
339 i = 0;
340 do {
341 b_size = aead_sizes;
342 do {
343 if (*b_size + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000344 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000345 authsize + *b_size,
346 XBUFSIZE * PAGE_SIZE);
347 goto out;
348 }
349
350 pr_info("test %u (%d bit key, %d byte blocks): ", i,
351 *keysize * 8, *b_size);
352
353 /* Set up tfm global state, i.e. the key */
354
355 memset(tvmem[0], 0xff, PAGE_SIZE);
356 key = tvmem[0];
357 for (j = 0; j < tcount; j++) {
358 if (template[j].klen == *keysize) {
359 key = template[j].key;
360 break;
361 }
362 }
363
364 crypto_aead_clear_flags(tfm, ~0);
365
366 ret = crypto_aead_setkey(tfm, key, *keysize);
367 if (ret) {
368 pr_err("setkey() failed flags=%x\n",
369 crypto_aead_get_flags(tfm));
370 goto out;
371 }
372
373 iv_len = crypto_aead_ivsize(tfm);
374 if (iv_len)
375 memset(iv, 0xff, iv_len);
376
377 /* Now setup per request stuff, i.e. buffers */
378
379 for (j = 0; j < num_mb; ++j) {
380 struct test_mb_aead_data *cur = &data[j];
381
382 assoc = cur->axbuf[0];
383 memset(assoc, 0xff, aad_size);
384
385 sg_init_aead(cur->sg, cur->xbuf,
386 *b_size + (enc ? 0 : authsize),
387 assoc, aad_size);
388
389 sg_init_aead(cur->sgout, cur->xoutbuf,
390 *b_size + (enc ? authsize : 0),
391 assoc, aad_size);
392
393 aead_request_set_ad(cur->req, aad_size);
394
395 if (!enc) {
396
397 aead_request_set_crypt(cur->req,
398 cur->sgout,
399 cur->sg,
400 *b_size, iv);
401 ret = crypto_aead_encrypt(cur->req);
402 ret = do_one_aead_op(cur->req, ret);
403
404 if (ret) {
405 pr_err("calculating auth failed failed (%d)\n",
406 ret);
407 break;
408 }
409 }
410
411 aead_request_set_crypt(cur->req, cur->sg,
412 cur->sgout, *b_size +
413 (enc ? 0 : authsize),
414 iv);
415
416 }
417
418 if (secs)
419 ret = test_mb_aead_jiffies(data, enc, *b_size,
420 secs, num_mb);
421 else
422 ret = test_mb_aead_cycles(data, enc, *b_size,
423 num_mb);
424
425 if (ret) {
426 pr_err("%s() failed return code=%d\n", e, ret);
427 break;
428 }
429 b_size++;
430 i++;
431 } while (*b_size);
432 keysize++;
433 } while (*keysize);
434
435out:
436 for (i = 0; i < num_mb; ++i)
437 aead_request_free(data[i].req);
438out_free_xoutbuf:
439 for (i = 0; i < num_mb; ++i)
440 testmgr_free_buf(data[i].xoutbuf);
441out_free_axbuf:
442 for (i = 0; i < num_mb; ++i)
443 testmgr_free_buf(data[i].axbuf);
444out_free_xbuf:
445 for (i = 0; i < num_mb; ++i)
446 testmgr_free_buf(data[i].xbuf);
447out_free_tfm:
448 crypto_free_aead(tfm);
449out_free_data:
450 kfree(data);
451out_free_iv:
452 kfree(iv);
453}
454
Tim Chen53f52d72013-12-11 14:28:47 -0800455static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700456 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800457{
458 unsigned long start, end;
459 int bcount;
460 int ret;
461
Mark Rustad3e3dc252014-07-25 02:53:38 -0700462 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800463 time_before(jiffies, end); bcount++) {
464 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530465 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800466 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530467 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800468
469 if (ret)
470 return ret;
471 }
472
473 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700474 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800475 return 0;
476}
477
478static int test_aead_cycles(struct aead_request *req, int enc, int blen)
479{
480 unsigned long cycles = 0;
481 int ret = 0;
482 int i;
483
Tim Chen53f52d72013-12-11 14:28:47 -0800484 /* Warm-up run. */
485 for (i = 0; i < 4; i++) {
486 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530487 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800488 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530489 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800490
491 if (ret)
492 goto out;
493 }
494
495 /* The real thing. */
496 for (i = 0; i < 8; i++) {
497 cycles_t start, end;
498
499 start = get_cycles();
500 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530501 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800502 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530503 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800504 end = get_cycles();
505
506 if (ret)
507 goto out;
508
509 cycles += end - start;
510 }
511
512out:
Tim Chen53f52d72013-12-11 14:28:47 -0800513 if (ret == 0)
514 printk("1 operation in %lu cycles (%d bytes)\n",
515 (cycles + 4) / 8, blen);
516
517 return ret;
518}
519
Mark Rustad3e3dc252014-07-25 02:53:38 -0700520static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800521 struct aead_speed_template *template,
522 unsigned int tcount, u8 authsize,
523 unsigned int aad_size, u8 *keysize)
524{
525 unsigned int i, j;
526 struct crypto_aead *tfm;
527 int ret = -ENOMEM;
528 const char *key;
529 struct aead_request *req;
530 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800531 struct scatterlist *sgout;
532 const char *e;
533 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200534 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800535 char *xbuf[XBUFSIZE];
536 char *xoutbuf[XBUFSIZE];
537 char *axbuf[XBUFSIZE];
538 unsigned int *b_size;
539 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100540 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800541
Cristian Stoica96692a732015-01-28 13:07:32 +0200542 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
543 if (!iv)
544 return;
545
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200546 if (aad_size >= PAGE_SIZE) {
547 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200548 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200549 }
550
Tim Chen53f52d72013-12-11 14:28:47 -0800551 if (enc == ENCRYPT)
552 e = "encryption";
553 else
554 e = "decryption";
555
556 if (testmgr_alloc_buf(xbuf))
557 goto out_noxbuf;
558 if (testmgr_alloc_buf(axbuf))
559 goto out_noaxbuf;
560 if (testmgr_alloc_buf(xoutbuf))
561 goto out_nooutbuf;
562
Herbert Xua3f21852015-05-27 16:03:51 +0800563 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800564 if (!sg)
565 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800566 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800567
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800568 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800569
570 if (IS_ERR(tfm)) {
571 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
572 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200573 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800574 }
575
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100576 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700577 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
578 get_driver_name(crypto_aead, tfm), e);
579
Tim Chen53f52d72013-12-11 14:28:47 -0800580 req = aead_request_alloc(tfm, GFP_KERNEL);
581 if (!req) {
582 pr_err("alg: aead: Failed to allocate request for %s\n",
583 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200584 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800585 }
586
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530587 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100588 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530589
Tim Chen53f52d72013-12-11 14:28:47 -0800590 i = 0;
591 do {
592 b_size = aead_sizes;
593 do {
594 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200595 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800596
597 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
598 pr_err("template (%u) too big for tvmem (%lu)\n",
599 *keysize + *b_size,
600 TVMEMSIZE * PAGE_SIZE);
601 goto out;
602 }
603
604 key = tvmem[0];
605 for (j = 0; j < tcount; j++) {
606 if (template[j].klen == *keysize) {
607 key = template[j].key;
608 break;
609 }
610 }
611 ret = crypto_aead_setkey(tfm, key, *keysize);
612 ret = crypto_aead_setauthsize(tfm, authsize);
613
614 iv_len = crypto_aead_ivsize(tfm);
615 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200616 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800617
618 crypto_aead_clear_flags(tfm, ~0);
619 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
620 i, *keysize * 8, *b_size);
621
622
623 memset(tvmem[0], 0xff, PAGE_SIZE);
624
625 if (ret) {
626 pr_err("setkey() failed flags=%x\n",
627 crypto_aead_get_flags(tfm));
628 goto out;
629 }
630
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200631 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
632 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800633
Herbert Xu31267272015-06-17 14:05:26 +0800634 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200635 *b_size + (enc ? authsize : 0), assoc,
636 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800637
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000638 aead_request_set_ad(req, aad_size);
639
640 if (!enc) {
641
642 /*
643 * For decryption we need a proper auth so
644 * we do the encryption path once with buffers
645 * reversed (input <-> output) to calculate it
646 */
647 aead_request_set_crypt(req, sgout, sg,
648 *b_size, iv);
649 ret = do_one_aead_op(req,
650 crypto_aead_encrypt(req));
651
652 if (ret) {
653 pr_err("calculating auth failed failed (%d)\n",
654 ret);
655 break;
656 }
657 }
658
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300659 aead_request_set_crypt(req, sg, sgout,
660 *b_size + (enc ? 0 : authsize),
661 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800662
Mark Rustad3e3dc252014-07-25 02:53:38 -0700663 if (secs)
664 ret = test_aead_jiffies(req, enc, *b_size,
665 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800666 else
667 ret = test_aead_cycles(req, enc, *b_size);
668
669 if (ret) {
670 pr_err("%s() failed return code=%d\n", e, ret);
671 break;
672 }
673 b_size++;
674 i++;
675 } while (*b_size);
676 keysize++;
677 } while (*keysize);
678
679out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200680 aead_request_free(req);
681out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800682 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200683out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800684 kfree(sg);
685out_nosg:
686 testmgr_free_buf(xoutbuf);
687out_nooutbuf:
688 testmgr_free_buf(axbuf);
689out_noaxbuf:
690 testmgr_free_buf(xbuf);
691out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200692 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800693}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800694
David S. Millerbeb63da2010-05-19 14:11:21 +1000695static void test_hash_sg_init(struct scatterlist *sg)
696{
697 int i;
698
699 sg_init_table(sg, TVMEMSIZE);
700 for (i = 0; i < TVMEMSIZE; i++) {
701 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
702 memset(tvmem[i], 0xff, PAGE_SIZE);
703 }
704}
705
David S. Millerbeb63da2010-05-19 14:11:21 +1000706static inline int do_one_ahash_op(struct ahash_request *req, int ret)
707{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100708 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000709
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100710 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000711}
712
Herbert Xu72259de2016-06-28 20:33:52 +0800713struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000714 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800715 char result[64];
716 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100717 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800718 char *xbuf[XBUFSIZE];
719};
Megha Dey087bcd22016-06-23 18:40:47 -0700720
Kees Cook4e234ee2018-04-26 19:57:28 -0700721static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
722 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000723{
Kees Cook4e234ee2018-04-26 19:57:28 -0700724 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000725
726 /* Fire up a bunch of concurrent requests */
727 for (i = 0; i < num_mb; i++)
728 rc[i] = crypto_ahash_digest(data[i].req);
729
730 /* Wait for all requests to finish */
731 for (i = 0; i < num_mb; i++) {
732 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
733
734 if (rc[i]) {
735 pr_info("concurrent request %d error %d\n", i, rc[i]);
736 err = rc[i];
737 }
738 }
739
740 return err;
741}
742
743static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
744 int secs, u32 num_mb)
745{
746 unsigned long start, end;
747 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700748 int ret = 0;
749 int *rc;
750
751 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
752 if (!rc)
753 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000754
755 for (start = jiffies, end = start + secs * HZ, bcount = 0;
756 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700757 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000758 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700759 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000760 }
761
762 pr_cont("%d operations in %d seconds (%ld bytes)\n",
763 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700764
765out:
766 kfree(rc);
767 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000768}
769
770static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
771 u32 num_mb)
772{
773 unsigned long cycles = 0;
774 int ret = 0;
775 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700776 int *rc;
777
778 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
779 if (!rc)
780 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000781
782 /* Warm-up run. */
783 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700784 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000785 if (ret)
786 goto out;
787 }
788
789 /* The real thing. */
790 for (i = 0; i < 8; i++) {
791 cycles_t start, end;
792
793 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700794 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000795 end = get_cycles();
796
797 if (ret)
798 goto out;
799
800 cycles += end - start;
801 }
802
Kees Cook4e234ee2018-04-26 19:57:28 -0700803 pr_cont("1 operation in %lu cycles (%d bytes)\n",
804 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000805
Kees Cook4e234ee2018-04-26 19:57:28 -0700806out:
807 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000808 return ret;
809}
810
811static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000812 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700813{
Herbert Xu72259de2016-06-28 20:33:52 +0800814 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700815 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800816 unsigned int i, j, k;
817 int ret;
818
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000819 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800820 if (!data)
821 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700822
823 tfm = crypto_alloc_ahash(algo, 0, 0);
824 if (IS_ERR(tfm)) {
825 pr_err("failed to load transform for %s: %ld\n",
826 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800827 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700828 }
Herbert Xu72259de2016-06-28 20:33:52 +0800829
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000830 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800831 if (testmgr_alloc_buf(data[i].xbuf))
832 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700833
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100834 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700835
Herbert Xu72259de2016-06-28 20:33:52 +0800836 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
837 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200838 pr_err("alg: hash: Failed to allocate request for %s\n",
839 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800840 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700841 }
Megha Dey087bcd22016-06-23 18:40:47 -0700842
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100843 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
844 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000845
846 sg_init_table(data[i].sg, XBUFSIZE);
847 for (j = 0; j < XBUFSIZE; j++) {
848 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
849 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
850 }
Megha Dey087bcd22016-06-23 18:40:47 -0700851 }
852
Herbert Xu72259de2016-06-28 20:33:52 +0800853 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
854 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700855
856 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800857 /* For some reason this only tests digests. */
858 if (speed[i].blen != speed[i].plen)
859 continue;
860
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000861 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200862 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000863 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200864 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700865 }
866
867 if (speed[i].klen)
868 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
869
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000870 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800871 ahash_request_set_crypt(data[k].req, data[k].sg,
872 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700873
Herbert Xu72259de2016-06-28 20:33:52 +0800874 pr_info("test%3u "
875 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700876 i, speed[i].blen, speed[i].plen,
877 speed[i].blen / speed[i].plen);
878
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000879 if (secs)
880 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
881 num_mb);
882 else
883 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Herbert Xu72259de2016-06-28 20:33:52 +0800884
Herbert Xu72259de2016-06-28 20:33:52 +0800885
886 if (ret) {
887 pr_err("At least one hashing failed ret=%d\n", ret);
888 break;
889 }
Megha Dey087bcd22016-06-23 18:40:47 -0700890 }
Megha Dey087bcd22016-06-23 18:40:47 -0700891
892out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000893 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800894 ahash_request_free(data[k].req);
895
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000896 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800897 testmgr_free_buf(data[k].xbuf);
898
899 crypto_free_ahash(tfm);
900
901free_data:
902 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700903}
904
David S. Millerbeb63da2010-05-19 14:11:21 +1000905static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700906 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000907{
908 unsigned long start, end;
909 int bcount;
910 int ret;
911
Mark Rustad3e3dc252014-07-25 02:53:38 -0700912 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000913 time_before(jiffies, end); bcount++) {
914 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
915 if (ret)
916 return ret;
917 }
918
919 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700920 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000921
922 return 0;
923}
924
925static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700926 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000927{
928 unsigned long start, end;
929 int bcount, pcount;
930 int ret;
931
932 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700933 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000934
Mark Rustad3e3dc252014-07-25 02:53:38 -0700935 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000936 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800937 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000938 if (ret)
939 return ret;
940 for (pcount = 0; pcount < blen; pcount += plen) {
941 ret = do_one_ahash_op(req, crypto_ahash_update(req));
942 if (ret)
943 return ret;
944 }
945 /* we assume there is enough space in 'out' for the result */
946 ret = do_one_ahash_op(req, crypto_ahash_final(req));
947 if (ret)
948 return ret;
949 }
950
951 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700952 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000953
954 return 0;
955}
956
957static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
958 char *out)
959{
960 unsigned long cycles = 0;
961 int ret, i;
962
963 /* Warm-up run. */
964 for (i = 0; i < 4; i++) {
965 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
966 if (ret)
967 goto out;
968 }
969
970 /* The real thing. */
971 for (i = 0; i < 8; i++) {
972 cycles_t start, end;
973
974 start = get_cycles();
975
976 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
977 if (ret)
978 goto out;
979
980 end = get_cycles();
981
982 cycles += end - start;
983 }
984
985out:
986 if (ret)
987 return ret;
988
989 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
990 cycles / 8, cycles / (8 * blen));
991
992 return 0;
993}
994
995static int test_ahash_cycles(struct ahash_request *req, int blen,
996 int plen, char *out)
997{
998 unsigned long cycles = 0;
999 int i, pcount, ret;
1000
1001 if (plen == blen)
1002 return test_ahash_cycles_digest(req, blen, out);
1003
1004 /* Warm-up run. */
1005 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001006 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001007 if (ret)
1008 goto out;
1009 for (pcount = 0; pcount < blen; pcount += plen) {
1010 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1011 if (ret)
1012 goto out;
1013 }
1014 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1015 if (ret)
1016 goto out;
1017 }
1018
1019 /* The real thing. */
1020 for (i = 0; i < 8; i++) {
1021 cycles_t start, end;
1022
1023 start = get_cycles();
1024
Herbert Xu43a96072015-04-22 11:02:27 +08001025 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001026 if (ret)
1027 goto out;
1028 for (pcount = 0; pcount < blen; pcount += plen) {
1029 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1030 if (ret)
1031 goto out;
1032 }
1033 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1034 if (ret)
1035 goto out;
1036
1037 end = get_cycles();
1038
1039 cycles += end - start;
1040 }
1041
1042out:
1043 if (ret)
1044 return ret;
1045
1046 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1047 cycles / 8, cycles / (8 * blen));
1048
1049 return 0;
1050}
1051
Herbert Xu06605112016-02-01 21:36:49 +08001052static void test_ahash_speed_common(const char *algo, unsigned int secs,
1053 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001054{
1055 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001056 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001057 struct ahash_request *req;
1058 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001059 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001060 int i, ret;
1061
Herbert Xu06605112016-02-01 21:36:49 +08001062 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001063 if (IS_ERR(tfm)) {
1064 pr_err("failed to load transform for %s: %ld\n",
1065 algo, PTR_ERR(tfm));
1066 return;
1067 }
1068
Luca Clementi263a8df2014-06-25 22:57:42 -07001069 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1070 get_driver_name(crypto_ahash, tfm));
1071
Horia Geant?f074f7b2015-08-27 18:38:36 +03001072 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1073 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1074 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001075 goto out;
1076 }
1077
1078 test_hash_sg_init(sg);
1079 req = ahash_request_alloc(tfm, GFP_KERNEL);
1080 if (!req) {
1081 pr_err("ahash request allocation failure\n");
1082 goto out;
1083 }
1084
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001085 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001086 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001087 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001088
Horia Geant?f074f7b2015-08-27 18:38:36 +03001089 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1090 if (!output)
1091 goto out_nomem;
1092
David S. Millerbeb63da2010-05-19 14:11:21 +10001093 for (i = 0; speed[i].blen != 0; i++) {
1094 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1095 pr_err("template (%u) too big for tvmem (%lu)\n",
1096 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1097 break;
1098 }
1099
1100 pr_info("test%3u "
1101 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1102 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1103
1104 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1105
Mark Rustad3e3dc252014-07-25 02:53:38 -07001106 if (secs)
David S. Millerbeb63da2010-05-19 14:11:21 +10001107 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001108 speed[i].plen, output, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +10001109 else
1110 ret = test_ahash_cycles(req, speed[i].blen,
1111 speed[i].plen, output);
1112
1113 if (ret) {
1114 pr_err("hashing failed ret=%d\n", ret);
1115 break;
1116 }
1117 }
1118
Horia Geant?f074f7b2015-08-27 18:38:36 +03001119 kfree(output);
1120
1121out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001122 ahash_request_free(req);
1123
1124out:
1125 crypto_free_ahash(tfm);
1126}
1127
Herbert Xu06605112016-02-01 21:36:49 +08001128static void test_ahash_speed(const char *algo, unsigned int secs,
1129 struct hash_speed *speed)
1130{
1131 return test_ahash_speed_common(algo, secs, speed, 0);
1132}
1133
1134static void test_hash_speed(const char *algo, unsigned int secs,
1135 struct hash_speed *speed)
1136{
1137 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1138}
1139
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001140struct test_mb_skcipher_data {
1141 struct scatterlist sg[XBUFSIZE];
1142 struct skcipher_request *req;
1143 struct crypto_wait wait;
1144 char *xbuf[XBUFSIZE];
1145};
1146
1147static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001148 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001149{
Kees Cook4e234ee2018-04-26 19:57:28 -07001150 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001151
1152 /* Fire up a bunch of concurrent requests */
1153 for (i = 0; i < num_mb; i++) {
1154 if (enc == ENCRYPT)
1155 rc[i] = crypto_skcipher_encrypt(data[i].req);
1156 else
1157 rc[i] = crypto_skcipher_decrypt(data[i].req);
1158 }
1159
1160 /* Wait for all requests to finish */
1161 for (i = 0; i < num_mb; i++) {
1162 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1163
1164 if (rc[i]) {
1165 pr_info("concurrent request %d error %d\n", i, rc[i]);
1166 err = rc[i];
1167 }
1168 }
1169
1170 return err;
1171}
1172
1173static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1174 int blen, int secs, u32 num_mb)
1175{
1176 unsigned long start, end;
1177 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001178 int ret = 0;
1179 int *rc;
1180
1181 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1182 if (!rc)
1183 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001184
1185 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1186 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001187 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001188 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001189 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001190 }
1191
1192 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1193 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001194
1195out:
1196 kfree(rc);
1197 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001198}
1199
1200static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1201 int blen, u32 num_mb)
1202{
1203 unsigned long cycles = 0;
1204 int ret = 0;
1205 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001206 int *rc;
1207
1208 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1209 if (!rc)
1210 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001211
1212 /* Warm-up run. */
1213 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001214 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001215 if (ret)
1216 goto out;
1217 }
1218
1219 /* The real thing. */
1220 for (i = 0; i < 8; i++) {
1221 cycles_t start, end;
1222
1223 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001224 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001225 end = get_cycles();
1226
1227 if (ret)
1228 goto out;
1229
1230 cycles += end - start;
1231 }
1232
Kees Cook4e234ee2018-04-26 19:57:28 -07001233 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1234 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001235
Kees Cook4e234ee2018-04-26 19:57:28 -07001236out:
1237 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001238 return ret;
1239}
1240
1241static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1242 struct cipher_speed_template *template,
1243 unsigned int tcount, u8 *keysize, u32 num_mb)
1244{
1245 struct test_mb_skcipher_data *data;
1246 struct crypto_skcipher *tfm;
1247 unsigned int i, j, iv_len;
1248 const char *key;
1249 const char *e;
1250 u32 *b_size;
1251 char iv[128];
1252 int ret;
1253
1254 if (enc == ENCRYPT)
1255 e = "encryption";
1256 else
1257 e = "decryption";
1258
1259 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1260 if (!data)
1261 return;
1262
1263 tfm = crypto_alloc_skcipher(algo, 0, 0);
1264 if (IS_ERR(tfm)) {
1265 pr_err("failed to load transform for %s: %ld\n",
1266 algo, PTR_ERR(tfm));
1267 goto out_free_data;
1268 }
1269
1270 for (i = 0; i < num_mb; ++i)
1271 if (testmgr_alloc_buf(data[i].xbuf)) {
1272 while (i--)
1273 testmgr_free_buf(data[i].xbuf);
1274 goto out_free_tfm;
1275 }
1276
1277
1278 for (i = 0; i < num_mb; ++i)
1279 if (testmgr_alloc_buf(data[i].xbuf)) {
1280 while (i--)
1281 testmgr_free_buf(data[i].xbuf);
1282 goto out_free_tfm;
1283 }
1284
1285
1286 for (i = 0; i < num_mb; ++i) {
1287 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1288 if (!data[i].req) {
1289 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1290 algo);
1291 while (i--)
1292 skcipher_request_free(data[i].req);
1293 goto out_free_xbuf;
1294 }
1295 }
1296
1297 for (i = 0; i < num_mb; ++i) {
1298 skcipher_request_set_callback(data[i].req,
1299 CRYPTO_TFM_REQ_MAY_BACKLOG,
1300 crypto_req_done, &data[i].wait);
1301 crypto_init_wait(&data[i].wait);
1302 }
1303
1304 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1305 get_driver_name(crypto_skcipher, tfm), e);
1306
1307 i = 0;
1308 do {
1309 b_size = block_sizes;
1310 do {
1311 if (*b_size > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001312 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001313 *b_size, XBUFSIZE * PAGE_SIZE);
1314 goto out;
1315 }
1316
1317 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1318 *keysize * 8, *b_size);
1319
1320 /* Set up tfm global state, i.e. the key */
1321
1322 memset(tvmem[0], 0xff, PAGE_SIZE);
1323 key = tvmem[0];
1324 for (j = 0; j < tcount; j++) {
1325 if (template[j].klen == *keysize) {
1326 key = template[j].key;
1327 break;
1328 }
1329 }
1330
1331 crypto_skcipher_clear_flags(tfm, ~0);
1332
1333 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1334 if (ret) {
1335 pr_err("setkey() failed flags=%x\n",
1336 crypto_skcipher_get_flags(tfm));
1337 goto out;
1338 }
1339
1340 iv_len = crypto_skcipher_ivsize(tfm);
1341 if (iv_len)
1342 memset(&iv, 0xff, iv_len);
1343
1344 /* Now setup per request stuff, i.e. buffers */
1345
1346 for (j = 0; j < num_mb; ++j) {
1347 struct test_mb_skcipher_data *cur = &data[j];
1348 unsigned int k = *b_size;
1349 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1350 unsigned int p = 0;
1351
1352 sg_init_table(cur->sg, pages);
1353
1354 while (k > PAGE_SIZE) {
1355 sg_set_buf(cur->sg + p, cur->xbuf[p],
1356 PAGE_SIZE);
1357 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1358 p++;
1359 k -= PAGE_SIZE;
1360 }
1361
1362 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1363 memset(cur->xbuf[p], 0xff, k);
1364
1365 skcipher_request_set_crypt(cur->req, cur->sg,
1366 cur->sg, *b_size,
1367 iv);
1368 }
1369
1370 if (secs)
1371 ret = test_mb_acipher_jiffies(data, enc,
1372 *b_size, secs,
1373 num_mb);
1374 else
1375 ret = test_mb_acipher_cycles(data, enc,
1376 *b_size, num_mb);
1377
1378 if (ret) {
1379 pr_err("%s() failed flags=%x\n", e,
1380 crypto_skcipher_get_flags(tfm));
1381 break;
1382 }
1383 b_size++;
1384 i++;
1385 } while (*b_size);
1386 keysize++;
1387 } while (*keysize);
1388
1389out:
1390 for (i = 0; i < num_mb; ++i)
1391 skcipher_request_free(data[i].req);
1392out_free_xbuf:
1393 for (i = 0; i < num_mb; ++i)
1394 testmgr_free_buf(data[i].xbuf);
1395out_free_tfm:
1396 crypto_free_skcipher(tfm);
1397out_free_data:
1398 kfree(data);
1399}
1400
Herbert Xu7166e582016-06-29 18:03:50 +08001401static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001402{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001403 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001404
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001405 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001406}
1407
Herbert Xu7166e582016-06-29 18:03:50 +08001408static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001409 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001410{
1411 unsigned long start, end;
1412 int bcount;
1413 int ret;
1414
Mark Rustad3e3dc252014-07-25 02:53:38 -07001415 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001416 time_before(jiffies, end); bcount++) {
1417 if (enc)
1418 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001419 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001420 else
1421 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001422 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001423
1424 if (ret)
1425 return ret;
1426 }
1427
1428 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001429 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001430 return 0;
1431}
1432
Herbert Xu7166e582016-06-29 18:03:50 +08001433static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001434 int blen)
1435{
1436 unsigned long cycles = 0;
1437 int ret = 0;
1438 int i;
1439
1440 /* Warm-up run. */
1441 for (i = 0; i < 4; i++) {
1442 if (enc)
1443 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001444 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001445 else
1446 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001447 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001448
1449 if (ret)
1450 goto out;
1451 }
1452
1453 /* The real thing. */
1454 for (i = 0; i < 8; i++) {
1455 cycles_t start, end;
1456
1457 start = get_cycles();
1458 if (enc)
1459 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001460 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001461 else
1462 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001463 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001464 end = get_cycles();
1465
1466 if (ret)
1467 goto out;
1468
1469 cycles += end - start;
1470 }
1471
1472out:
1473 if (ret == 0)
1474 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1475 (cycles + 4) / 8, blen);
1476
1477 return ret;
1478}
1479
Herbert Xu7166e582016-06-29 18:03:50 +08001480static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1481 struct cipher_speed_template *template,
1482 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001483{
Nicolas Royerde1975332012-07-01 19:19:47 +02001484 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001485 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001486 const char *key;
1487 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001488 struct skcipher_request *req;
1489 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001490 const char *e;
1491 u32 *b_size;
1492
1493 if (enc == ENCRYPT)
1494 e = "encryption";
1495 else
1496 e = "decryption";
1497
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001498 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001499
Herbert Xu7166e582016-06-29 18:03:50 +08001500 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001501
1502 if (IS_ERR(tfm)) {
1503 pr_err("failed to load transform for %s: %ld\n", algo,
1504 PTR_ERR(tfm));
1505 return;
1506 }
1507
Luca Clementi263a8df2014-06-25 22:57:42 -07001508 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
Herbert Xu7166e582016-06-29 18:03:50 +08001509 get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001510
Herbert Xu7166e582016-06-29 18:03:50 +08001511 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001512 if (!req) {
1513 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1514 algo);
1515 goto out;
1516 }
1517
Herbert Xu7166e582016-06-29 18:03:50 +08001518 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001519 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001520
1521 i = 0;
1522 do {
1523 b_size = block_sizes;
1524
1525 do {
1526 struct scatterlist sg[TVMEMSIZE];
1527
1528 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1529 pr_err("template (%u) too big for "
1530 "tvmem (%lu)\n", *keysize + *b_size,
1531 TVMEMSIZE * PAGE_SIZE);
1532 goto out_free_req;
1533 }
1534
1535 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1536 *keysize * 8, *b_size);
1537
1538 memset(tvmem[0], 0xff, PAGE_SIZE);
1539
1540 /* set key, plain text and IV */
1541 key = tvmem[0];
1542 for (j = 0; j < tcount; j++) {
1543 if (template[j].klen == *keysize) {
1544 key = template[j].key;
1545 break;
1546 }
1547 }
1548
Herbert Xu7166e582016-06-29 18:03:50 +08001549 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001550
Herbert Xu7166e582016-06-29 18:03:50 +08001551 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001552 if (ret) {
1553 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001554 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001555 goto out_free_req;
1556 }
1557
Nicolas Royerde1975332012-07-01 19:19:47 +02001558 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001559 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1560
Nicolas Royerde1975332012-07-01 19:19:47 +02001561 if (k > PAGE_SIZE) {
1562 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001563 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001564 k -= PAGE_SIZE;
1565 j = 1;
1566 while (k > PAGE_SIZE) {
1567 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1568 memset(tvmem[j], 0xff, PAGE_SIZE);
1569 j++;
1570 k -= PAGE_SIZE;
1571 }
1572 sg_set_buf(sg + j, tvmem[j], k);
1573 memset(tvmem[j], 0xff, k);
1574 } else {
1575 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001576 }
1577
Herbert Xu7166e582016-06-29 18:03:50 +08001578 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001579 if (iv_len)
1580 memset(&iv, 0xff, iv_len);
1581
Herbert Xu7166e582016-06-29 18:03:50 +08001582 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001583
Mark Rustad3e3dc252014-07-25 02:53:38 -07001584 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001585 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001586 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001587 else
1588 ret = test_acipher_cycles(req, enc,
1589 *b_size);
1590
1591 if (ret) {
1592 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001593 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001594 break;
1595 }
1596 b_size++;
1597 i++;
1598 } while (*b_size);
1599 keysize++;
1600 } while (*keysize);
1601
1602out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001603 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001604out:
Herbert Xu7166e582016-06-29 18:03:50 +08001605 crypto_free_skcipher(tfm);
1606}
1607
1608static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1609 struct cipher_speed_template *template,
1610 unsigned int tcount, u8 *keysize)
1611{
1612 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1613 true);
1614}
1615
1616static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1617 struct cipher_speed_template *template,
1618 unsigned int tcount, u8 *keysize)
1619{
1620 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1621 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001622}
1623
Herbert Xuef2736f2005-06-22 13:26:03 -07001624static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001625{
1626 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001627
Linus Torvalds1da177e2005-04-16 15:20:36 -07001628 while (*name) {
1629 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001630 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001631 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001632 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001633 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001634}
1635
Herbert Xu01b32322008-07-31 15:41:55 +08001636static inline int tcrypt_test(const char *alg)
1637{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001638 int ret;
1639
Rabin Vincent76512f22017-01-18 14:54:05 +01001640 pr_debug("testing %s\n", alg);
1641
Jarod Wilson4e033a62009-05-27 15:10:21 +10001642 ret = alg_test(alg, alg, 0, 0);
1643 /* non-fips algs return -EINVAL in fips mode */
1644 if (fips_enabled && ret == -EINVAL)
1645 ret = 0;
1646 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001647}
1648
Kees Cook4e234ee2018-04-26 19:57:28 -07001649static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001650{
1651 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001652 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001653
1654 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001655 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001656 if (alg) {
1657 if (!crypto_has_alg(alg, type,
1658 mask ?: CRYPTO_ALG_TYPE_MASK))
1659 ret = -ENOENT;
1660 break;
1661 }
1662
Herbert Xu01b32322008-07-31 15:41:55 +08001663 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001664 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001665 break;
1666
1667 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001668 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001669 break;
1670
1671 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001672 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001673 break;
1674
1675 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001676 ret += tcrypt_test("ecb(des)");
1677 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001678 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001679 break;
1680
1681 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001682 ret += tcrypt_test("ecb(des3_ede)");
1683 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001684 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001685 break;
1686
1687 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001688 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001689 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001690
Linus Torvalds1da177e2005-04-16 15:20:36 -07001691 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001692 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001693 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001694
Linus Torvalds1da177e2005-04-16 15:20:36 -07001695 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001696 ret += tcrypt_test("ecb(blowfish)");
1697 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001698 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001699 break;
1700
1701 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001702 ret += tcrypt_test("ecb(twofish)");
1703 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001704 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001705 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001706 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001707 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001708
Linus Torvalds1da177e2005-04-16 15:20:36 -07001709 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001710 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001711 ret += tcrypt_test("cbc(serpent)");
1712 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001713 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001714 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001715 break;
1716
1717 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001718 ret += tcrypt_test("ecb(aes)");
1719 ret += tcrypt_test("cbc(aes)");
1720 ret += tcrypt_test("lrw(aes)");
1721 ret += tcrypt_test("xts(aes)");
1722 ret += tcrypt_test("ctr(aes)");
1723 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001724 break;
1725
1726 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001727 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001728 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001729
Linus Torvalds1da177e2005-04-16 15:20:36 -07001730 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001731 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001732 break;
1733
1734 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001735 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001736 break;
1737
1738 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001739 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001740 ret += tcrypt_test("cbc(cast5)");
1741 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001742 break;
1743
1744 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001745 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001746 ret += tcrypt_test("cbc(cast6)");
1747 ret += tcrypt_test("ctr(cast6)");
1748 ret += tcrypt_test("lrw(cast6)");
1749 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001750 break;
1751
1752 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001753 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001754 break;
1755
1756 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001757 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001758 break;
1759
1760 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001761 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001762 break;
1763
1764 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001765 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001766 break;
1767
1768 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001769 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001770 break;
1771
1772 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001773 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001774 break;
1775
1776 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001777 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001778 break;
1779
1780 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001781 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001782 break;
1783
1784 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001785 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001786 break;
1787
1788 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001789 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001790 break;
1791
1792 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001793 ret += tcrypt_test("ecb(anubis)");
1794 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001795 break;
1796
1797 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001798 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001799 break;
1800
1801 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001802 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001803 break;
1804
1805 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001806 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001807 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001808
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001809 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001810 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001811 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001812
David Howells90831632006-12-16 12:13:14 +11001813 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001814 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001815 break;
1816
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001817 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001818 ret += tcrypt_test("ecb(camellia)");
1819 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001820 ret += tcrypt_test("ctr(camellia)");
1821 ret += tcrypt_test("lrw(camellia)");
1822 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001823 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001824
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001825 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001826 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001827 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001828
Tan Swee Heng2407d602007-11-23 19:45:00 +08001829 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001830 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001831 break;
1832
Herbert Xu8df213d2007-12-02 14:55:47 +11001833 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001834 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001835 break;
1836
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001837 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001838 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001839 break;
1840
Joy Latten93cc74e2007-12-12 20:24:22 +08001841 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001842 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001843 break;
1844
Kevin Coffman76cb9522008-03-24 21:26:16 +08001845 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001846 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001847 break;
1848
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001849 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001850 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001851 break;
1852
1853 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001854 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001855 break;
1856
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001857 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001858 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001859 break;
1860
1861 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001862 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001863 break;
1864
1865 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001866 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001867 break;
1868
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001869 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001870 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001871 break;
1872
Jarod Wilson5d667322009-05-04 19:23:40 +08001873 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001874 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001875 break;
1876
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001877 case 46:
1878 ret += tcrypt_test("ghash");
1879 break;
1880
Herbert Xu684115212013-09-07 12:56:26 +10001881 case 47:
1882 ret += tcrypt_test("crct10dif");
1883 break;
1884
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301885 case 48:
1886 ret += tcrypt_test("sha3-224");
1887 break;
1888
1889 case 49:
1890 ret += tcrypt_test("sha3-256");
1891 break;
1892
1893 case 50:
1894 ret += tcrypt_test("sha3-384");
1895 break;
1896
1897 case 51:
1898 ret += tcrypt_test("sha3-512");
1899 break;
1900
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001901 case 52:
1902 ret += tcrypt_test("sm3");
1903 break;
1904
Linus Torvalds1da177e2005-04-16 15:20:36 -07001905 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001906 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001907 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001908
Linus Torvalds1da177e2005-04-16 15:20:36 -07001909 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001910 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001911 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001912
Linus Torvalds1da177e2005-04-16 15:20:36 -07001913 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001914 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001915 break;
1916
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001917 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001918 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001919 break;
1920
1921 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001922 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001923 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001924
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001925 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001926 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001927 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001928
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001929 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001930 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001931 break;
1932
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001933 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001934 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001935 break;
1936
1937 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001938 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001939 break;
1940
Shane Wangf1939f72009-09-02 20:05:22 +10001941 case 109:
1942 ret += tcrypt_test("vmac(aes)");
1943 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001944
raveendra padasalagi98eca722016-07-01 11:16:54 +05301945 case 111:
1946 ret += tcrypt_test("hmac(sha3-224)");
1947 break;
1948
1949 case 112:
1950 ret += tcrypt_test("hmac(sha3-256)");
1951 break;
1952
1953 case 113:
1954 ret += tcrypt_test("hmac(sha3-384)");
1955 break;
1956
1957 case 114:
1958 ret += tcrypt_test("hmac(sha3-512)");
1959 break;
1960
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001961 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001962 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001963 break;
1964
Adrian Hoban69435b92010-11-04 15:02:04 -04001965 case 151:
1966 ret += tcrypt_test("rfc4106(gcm(aes))");
1967 break;
1968
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001969 case 152:
1970 ret += tcrypt_test("rfc4543(gcm(aes))");
1971 break;
1972
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001973 case 153:
1974 ret += tcrypt_test("cmac(aes)");
1975 break;
1976
1977 case 154:
1978 ret += tcrypt_test("cmac(des3_ede)");
1979 break;
1980
Horia Geantabbf9c892013-11-28 15:11:16 +02001981 case 155:
1982 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1983 break;
1984
Horia Geantabca4feb2014-03-14 17:46:51 +02001985 case 156:
1986 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1987 break;
1988
1989 case 157:
1990 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1991 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301992 case 181:
1993 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1994 break;
1995 case 182:
1996 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1997 break;
1998 case 183:
1999 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2000 break;
2001 case 184:
2002 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2003 break;
2004 case 185:
2005 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2006 break;
2007 case 186:
2008 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2009 break;
2010 case 187:
2011 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2012 break;
2013 case 188:
2014 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2015 break;
2016 case 189:
2017 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2018 break;
2019 case 190:
2020 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2021 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002022 case 191:
2023 ret += tcrypt_test("ecb(sm4)");
2024 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002025 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002026 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002027 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002028 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002029 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002030 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002031 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002032 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002033 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002034 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002035 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002036 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002037 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002038 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002039 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002040 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002041 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002042 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2043 speed_template_16_24_32);
2044 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2045 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002046 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2047 speed_template_16_24_32);
2048 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2049 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002050 break;
2051
2052 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002053 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002054 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002055 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002056 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002057 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002058 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002059 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002060 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002061 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002062 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002063 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002064 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002065 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2066 des3_speed_template, DES3_SPEED_VECTORS,
2067 speed_template_24);
2068 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2069 des3_speed_template, DES3_SPEED_VECTORS,
2070 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002071 break;
2072
2073 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002074 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002075 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002076 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002077 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002078 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002079 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002080 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002081 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002082 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2083 speed_template_16_24_32);
2084 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2085 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002086 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2087 speed_template_32_40_48);
2088 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2089 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002090 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2091 speed_template_32_48_64);
2092 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2093 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002094 break;
2095
2096 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002097 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002098 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002099 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002100 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002101 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002102 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002103 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002104 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002105 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2106 speed_template_8_32);
2107 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2108 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002109 break;
2110
2111 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002112 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002113 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002114 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002115 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002116 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002117 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002118 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002119 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002120 break;
2121
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002122 case 205:
2123 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002124 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002125 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002126 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002127 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002128 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002129 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002130 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002131 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2132 speed_template_16_24_32);
2133 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2134 speed_template_16_24_32);
2135 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2136 speed_template_32_40_48);
2137 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2138 speed_template_32_40_48);
2139 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2140 speed_template_32_48_64);
2141 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2142 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002143 break;
2144
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002145 case 206:
2146 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002147 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002148 break;
2149
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002150 case 207:
2151 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2152 speed_template_16_32);
2153 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2154 speed_template_16_32);
2155 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2156 speed_template_16_32);
2157 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2158 speed_template_16_32);
2159 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2160 speed_template_16_32);
2161 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2162 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002163 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2164 speed_template_32_48);
2165 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2166 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002167 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2168 speed_template_32_64);
2169 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2170 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002171 break;
2172
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002173 case 208:
2174 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2175 speed_template_8);
2176 break;
2177
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002178 case 209:
2179 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2180 speed_template_8_16);
2181 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2182 speed_template_8_16);
2183 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2184 speed_template_8_16);
2185 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2186 speed_template_8_16);
2187 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2188 speed_template_8_16);
2189 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2190 speed_template_8_16);
2191 break;
2192
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002193 case 210:
2194 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2195 speed_template_16_32);
2196 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2197 speed_template_16_32);
2198 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2199 speed_template_16_32);
2200 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2201 speed_template_16_32);
2202 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2203 speed_template_16_32);
2204 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2205 speed_template_16_32);
2206 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2207 speed_template_32_48);
2208 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2209 speed_template_32_48);
2210 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2211 speed_template_32_64);
2212 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2213 speed_template_32_64);
2214 break;
2215
Tim Chen53f52d72013-12-11 14:28:47 -08002216 case 211:
2217 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002218 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302219 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002220 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002221 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2222 NULL, 0, 16, 16, aead_speed_template_20);
2223 test_aead_speed("gcm(aes)", DECRYPT, sec,
2224 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002225 break;
2226
Herbert Xu4e4aab62015-06-17 14:04:21 +08002227 case 212:
2228 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002229 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002230 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2231 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002232 break;
2233
Martin Willi2dce0632015-07-16 19:13:59 +02002234 case 213:
2235 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2236 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002237 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2238 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002239 break;
2240
2241 case 214:
2242 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2243 speed_template_32);
2244 break;
2245
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002246 case 215:
2247 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2248 0, 16, 16, aead_speed_template_20, num_mb);
2249 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2250 speed_template_16_24_32, num_mb);
2251 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2252 0, 16, 16, aead_speed_template_20, num_mb);
2253 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2254 speed_template_16_24_32, num_mb);
2255 break;
2256
2257 case 216:
2258 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2259 16, 16, aead_speed_template_19, num_mb);
2260 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2261 16, 16, aead_speed_template_19, num_mb);
2262 break;
2263
2264 case 217:
2265 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2266 sec, NULL, 0, 16, 8, aead_speed_template_36,
2267 num_mb);
2268 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2269 sec, NULL, 0, 16, 8, aead_speed_template_36,
2270 num_mb);
2271 break;
2272
Michal Ludvige8057922006-05-30 22:04:19 +10002273 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002274 if (alg) {
2275 test_hash_speed(alg, sec, generic_hash_speed_template);
2276 break;
2277 }
Michal Ludvige8057922006-05-30 22:04:19 +10002278 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002279 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002280 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002281 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002282 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002283 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002284 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002285 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002286 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002287 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002288 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002289 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002290 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002291 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002292 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002293 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002294 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002295 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002296 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002297 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002298 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002299 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002300 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002301 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002302 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002303 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002304 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002305 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002306 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002307 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002308 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002309 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002310 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002311 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002312 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002313 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002314 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002315 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002316 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002317 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002318 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002319 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002320 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002321 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002322 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002323 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002324 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002325 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002326 /* fall through */
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08002327 case 313:
2328 test_hash_speed("sha224", sec, generic_hash_speed_template);
2329 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002330 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002331 case 314:
2332 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2333 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002334 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002335 case 315:
2336 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2337 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002338 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002339 case 316:
2340 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2341 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002342 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002343 case 317:
2344 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2345 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002346 /* fall through */
Huang Ying18bcc912010-03-10 18:30:32 +08002347 case 318:
2348 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
2349 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002350 /* fall through */
Tim Chene3899e42012-09-27 15:44:24 -07002351 case 319:
2352 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2353 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002354 /* fall through */
Herbert Xu684115212013-09-07 12:56:26 +10002355 case 320:
2356 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2357 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002358 /* fall through */
Martin Willi2dce0632015-07-16 19:13:59 +02002359 case 321:
2360 test_hash_speed("poly1305", sec, poly1305_speed_template);
2361 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002362 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302363 case 322:
2364 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2365 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002366 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302367 case 323:
2368 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2369 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002370 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302371 case 324:
2372 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2373 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002374 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302375 case 325:
2376 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2377 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002378 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002379 case 326:
2380 test_hash_speed("sm3", sec, generic_hash_speed_template);
2381 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002382 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002383 case 399:
2384 break;
2385
David S. Millerbeb63da2010-05-19 14:11:21 +10002386 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002387 if (alg) {
2388 test_ahash_speed(alg, sec, generic_hash_speed_template);
2389 break;
2390 }
David S. Millerbeb63da2010-05-19 14:11:21 +10002391 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002392 case 401:
2393 test_ahash_speed("md4", sec, generic_hash_speed_template);
2394 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002395 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002396 case 402:
2397 test_ahash_speed("md5", sec, generic_hash_speed_template);
2398 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002399 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002400 case 403:
2401 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2402 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002403 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002404 case 404:
2405 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2406 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002407 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002408 case 405:
2409 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2410 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002411 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002412 case 406:
2413 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2414 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002415 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002416 case 407:
2417 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2418 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002419 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002420 case 408:
2421 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2422 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002423 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002424 case 409:
2425 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2426 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002427 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002428 case 410:
2429 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2430 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002431 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002432 case 411:
2433 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2434 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002435 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002436 case 412:
2437 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2438 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002439 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002440 case 413:
2441 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2442 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002443 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002444 case 414:
2445 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2446 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002447 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002448 case 415:
2449 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2450 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002451 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002452 case 416:
2453 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2454 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002455 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002456 case 417:
2457 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2458 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002459 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302460 case 418:
2461 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2462 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002463 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302464 case 419:
2465 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2466 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002467 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302468 case 420:
2469 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2470 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002471 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302472 case 421:
2473 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2474 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002475 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002476 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002477 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2478 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002479 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002480 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002481 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002482 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2483 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002484 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002485 /* fall through */
Megha Dey14009c42016-06-27 10:20:09 -07002486 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002487 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2488 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002489 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002490 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002491 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002492 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2493 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002494 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002495 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002496 case 499:
2497 break;
2498
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002499 case 500:
2500 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2501 speed_template_16_24_32);
2502 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2503 speed_template_16_24_32);
2504 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2505 speed_template_16_24_32);
2506 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2507 speed_template_16_24_32);
2508 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2509 speed_template_32_40_48);
2510 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2511 speed_template_32_40_48);
2512 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002513 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002514 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002515 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002516 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2517 speed_template_16_24_32);
2518 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2519 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002520 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2521 speed_template_16_24_32);
2522 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2523 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002524 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2525 speed_template_16_24_32);
2526 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2527 speed_template_16_24_32);
2528 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2529 speed_template_16_24_32);
2530 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2531 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002532 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2533 speed_template_20_28_36);
2534 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2535 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002536 break;
2537
2538 case 501:
2539 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2540 des3_speed_template, DES3_SPEED_VECTORS,
2541 speed_template_24);
2542 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2543 des3_speed_template, DES3_SPEED_VECTORS,
2544 speed_template_24);
2545 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2546 des3_speed_template, DES3_SPEED_VECTORS,
2547 speed_template_24);
2548 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2549 des3_speed_template, DES3_SPEED_VECTORS,
2550 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002551 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2552 des3_speed_template, DES3_SPEED_VECTORS,
2553 speed_template_24);
2554 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2555 des3_speed_template, DES3_SPEED_VECTORS,
2556 speed_template_24);
2557 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2558 des3_speed_template, DES3_SPEED_VECTORS,
2559 speed_template_24);
2560 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2561 des3_speed_template, DES3_SPEED_VECTORS,
2562 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002563 break;
2564
2565 case 502:
2566 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2567 speed_template_8);
2568 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2569 speed_template_8);
2570 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2571 speed_template_8);
2572 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2573 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002574 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2575 speed_template_8);
2576 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2577 speed_template_8);
2578 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2579 speed_template_8);
2580 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2581 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002582 break;
2583
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002584 case 503:
2585 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2586 speed_template_16_32);
2587 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2588 speed_template_16_32);
2589 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2590 speed_template_16_32);
2591 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2592 speed_template_16_32);
2593 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2594 speed_template_16_32);
2595 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2596 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002597 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2598 speed_template_32_48);
2599 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2600 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002601 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2602 speed_template_32_64);
2603 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2604 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002605 break;
2606
Johannes Goetzfried107778b2012-05-28 15:54:24 +02002607 case 504:
2608 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2609 speed_template_16_24_32);
2610 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2611 speed_template_16_24_32);
2612 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2613 speed_template_16_24_32);
2614 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2615 speed_template_16_24_32);
2616 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2617 speed_template_16_24_32);
2618 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2619 speed_template_16_24_32);
2620 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2621 speed_template_32_40_48);
2622 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2623 speed_template_32_40_48);
2624 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2625 speed_template_32_48_64);
2626 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2627 speed_template_32_48_64);
2628 break;
2629
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002630 case 505:
2631 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2632 speed_template_8);
2633 break;
2634
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002635 case 506:
2636 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2637 speed_template_8_16);
2638 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2639 speed_template_8_16);
2640 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2641 speed_template_8_16);
2642 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2643 speed_template_8_16);
2644 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2645 speed_template_8_16);
2646 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2647 speed_template_8_16);
2648 break;
2649
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002650 case 507:
2651 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2652 speed_template_16_32);
2653 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2654 speed_template_16_32);
2655 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2656 speed_template_16_32);
2657 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2658 speed_template_16_32);
2659 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2660 speed_template_16_32);
2661 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2662 speed_template_16_32);
2663 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2664 speed_template_32_48);
2665 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2666 speed_template_32_48);
2667 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2668 speed_template_32_64);
2669 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2670 speed_template_32_64);
2671 break;
2672
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002673 case 508:
2674 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2675 speed_template_16_32);
2676 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2677 speed_template_16_32);
2678 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2679 speed_template_16_32);
2680 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2681 speed_template_16_32);
2682 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2683 speed_template_16_32);
2684 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2685 speed_template_16_32);
2686 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2687 speed_template_32_48);
2688 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2689 speed_template_32_48);
2690 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2691 speed_template_32_64);
2692 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2693 speed_template_32_64);
2694 break;
2695
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002696 case 509:
2697 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2698 speed_template_8_32);
2699 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2700 speed_template_8_32);
2701 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2702 speed_template_8_32);
2703 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2704 speed_template_8_32);
2705 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2706 speed_template_8_32);
2707 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2708 speed_template_8_32);
2709 break;
2710
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002711 case 600:
2712 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2713 speed_template_16_24_32, num_mb);
2714 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2715 speed_template_16_24_32, num_mb);
2716 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2717 speed_template_16_24_32, num_mb);
2718 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2719 speed_template_16_24_32, num_mb);
2720 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2721 speed_template_32_40_48, num_mb);
2722 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2723 speed_template_32_40_48, num_mb);
2724 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2725 speed_template_32_64, num_mb);
2726 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2727 speed_template_32_64, num_mb);
2728 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2729 speed_template_16_24_32, num_mb);
2730 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2731 speed_template_16_24_32, num_mb);
2732 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2733 speed_template_16_24_32, num_mb);
2734 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2735 speed_template_16_24_32, num_mb);
2736 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2737 speed_template_16_24_32, num_mb);
2738 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2739 speed_template_16_24_32, num_mb);
2740 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2741 speed_template_16_24_32, num_mb);
2742 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2743 speed_template_16_24_32, num_mb);
2744 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2745 0, speed_template_20_28_36, num_mb);
2746 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2747 0, speed_template_20_28_36, num_mb);
2748 break;
2749
2750 case 601:
2751 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2752 des3_speed_template, DES3_SPEED_VECTORS,
2753 speed_template_24, num_mb);
2754 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2755 des3_speed_template, DES3_SPEED_VECTORS,
2756 speed_template_24, num_mb);
2757 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2758 des3_speed_template, DES3_SPEED_VECTORS,
2759 speed_template_24, num_mb);
2760 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2761 des3_speed_template, DES3_SPEED_VECTORS,
2762 speed_template_24, num_mb);
2763 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2764 des3_speed_template, DES3_SPEED_VECTORS,
2765 speed_template_24, num_mb);
2766 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2767 des3_speed_template, DES3_SPEED_VECTORS,
2768 speed_template_24, num_mb);
2769 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2770 des3_speed_template, DES3_SPEED_VECTORS,
2771 speed_template_24, num_mb);
2772 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2773 des3_speed_template, DES3_SPEED_VECTORS,
2774 speed_template_24, num_mb);
2775 break;
2776
2777 case 602:
2778 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2779 speed_template_8, num_mb);
2780 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2781 speed_template_8, num_mb);
2782 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2783 speed_template_8, num_mb);
2784 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2785 speed_template_8, num_mb);
2786 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2787 speed_template_8, num_mb);
2788 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2789 speed_template_8, num_mb);
2790 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2791 speed_template_8, num_mb);
2792 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2793 speed_template_8, num_mb);
2794 break;
2795
2796 case 603:
2797 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2798 speed_template_16_32, num_mb);
2799 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2800 speed_template_16_32, num_mb);
2801 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2802 speed_template_16_32, num_mb);
2803 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2804 speed_template_16_32, num_mb);
2805 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2806 speed_template_16_32, num_mb);
2807 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2808 speed_template_16_32, num_mb);
2809 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2810 speed_template_32_48, num_mb);
2811 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2812 speed_template_32_48, num_mb);
2813 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2814 speed_template_32_64, num_mb);
2815 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2816 speed_template_32_64, num_mb);
2817 break;
2818
2819 case 604:
2820 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2821 speed_template_16_24_32, num_mb);
2822 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2823 speed_template_16_24_32, num_mb);
2824 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2825 speed_template_16_24_32, num_mb);
2826 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2827 speed_template_16_24_32, num_mb);
2828 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2829 speed_template_16_24_32, num_mb);
2830 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2831 speed_template_16_24_32, num_mb);
2832 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2833 speed_template_32_40_48, num_mb);
2834 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2835 speed_template_32_40_48, num_mb);
2836 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2837 speed_template_32_48_64, num_mb);
2838 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2839 speed_template_32_48_64, num_mb);
2840 break;
2841
2842 case 605:
2843 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2844 speed_template_8, num_mb);
2845 break;
2846
2847 case 606:
2848 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2849 speed_template_8_16, num_mb);
2850 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2851 speed_template_8_16, num_mb);
2852 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2853 speed_template_8_16, num_mb);
2854 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2855 speed_template_8_16, num_mb);
2856 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2857 speed_template_8_16, num_mb);
2858 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2859 speed_template_8_16, num_mb);
2860 break;
2861
2862 case 607:
2863 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2864 speed_template_16_32, num_mb);
2865 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2866 speed_template_16_32, num_mb);
2867 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2868 speed_template_16_32, num_mb);
2869 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2870 speed_template_16_32, num_mb);
2871 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2872 speed_template_16_32, num_mb);
2873 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2874 speed_template_16_32, num_mb);
2875 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2876 speed_template_32_48, num_mb);
2877 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2878 speed_template_32_48, num_mb);
2879 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2880 speed_template_32_64, num_mb);
2881 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2882 speed_template_32_64, num_mb);
2883 break;
2884
2885 case 608:
2886 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2887 speed_template_16_32, num_mb);
2888 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2889 speed_template_16_32, num_mb);
2890 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2891 speed_template_16_32, num_mb);
2892 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2893 speed_template_16_32, num_mb);
2894 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2895 speed_template_16_32, num_mb);
2896 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2897 speed_template_16_32, num_mb);
2898 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2899 speed_template_32_48, num_mb);
2900 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2901 speed_template_32_48, num_mb);
2902 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2903 speed_template_32_64, num_mb);
2904 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2905 speed_template_32_64, num_mb);
2906 break;
2907
2908 case 609:
2909 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2910 speed_template_8_32, num_mb);
2911 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2912 speed_template_8_32, num_mb);
2913 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2914 speed_template_8_32, num_mb);
2915 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2916 speed_template_8_32, num_mb);
2917 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2918 speed_template_8_32, num_mb);
2919 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2920 speed_template_8_32, num_mb);
2921 break;
2922
Linus Torvalds1da177e2005-04-16 15:20:36 -07002923 case 1000:
2924 test_available();
2925 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002926 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002927
2928 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002929}
2930
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002931static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002932{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002933 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002934 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002935
Herbert Xuf139cfa2008-07-31 12:23:53 +08002936 for (i = 0; i < TVMEMSIZE; i++) {
2937 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2938 if (!tvmem[i])
2939 goto err_free_tv;
2940 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002941
Kees Cook4e234ee2018-04-26 19:57:28 -07002942 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002943
Jarod Wilson4e033a62009-05-27 15:10:21 +10002944 if (err) {
2945 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2946 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01002947 } else {
2948 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10002949 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002950
Jarod Wilson4e033a62009-05-27 15:10:21 +10002951 /* We intentionaly return -EAGAIN to prevent keeping the module,
2952 * unless we're running in fips mode. It does all its work from
2953 * init() and doesn't offer any runtime functionality, but in
2954 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002955 * => we don't need it in the memory, do we?
2956 * -- mludvig
2957 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002958 if (!fips_enabled)
2959 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002960
Herbert Xuf139cfa2008-07-31 12:23:53 +08002961err_free_tv:
2962 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2963 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002964
2965 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002966}
2967
2968/*
2969 * If an init function is provided, an exit function must also be provided
2970 * to allow module unload.
2971 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002972static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002973
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002974module_init(tcrypt_mod_init);
2975module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002976
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002977module_param(alg, charp, 0);
2978module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002979module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002980module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002981module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002982MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2983 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002984module_param(num_mb, uint, 0000);
2985MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002986
2987MODULE_LICENSE("GPL");
2988MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2989MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");