blob: 2bff6130d80674f77266d159e3816c066862a3a8 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Herbert Xu1ce5a042015-04-22 15:06:30 +080025#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080026#include <crypto/hash.h>
Herbert Xucba83562006-08-13 08:26:09 +100027#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080028#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090030#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100032#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070033#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070035#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070036#include <linux/timex.h>
37#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070038#include "tcrypt.h"
39
40/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080041 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070042 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080043#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070044
45/*
Herbert Xuda7f0332008-07-31 17:08:25 +080046* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070047*/
48#define ENCRYPT 1
49#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070050
Harald Welteebfd9bc2005-06-22 13:27:23 -070051/*
Luca Clementi263a8df2014-06-25 22:57:42 -070052 * return a string with the driver name
53 */
54#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
55
56/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070057 * Used by test_cipher_speed()
58 */
Herbert Xu6a179442005-06-22 13:29:03 -070059static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070060
Steffen Klasserta873a5f2009-06-19 19:46:53 +080061static char *alg = NULL;
62static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080063static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070064static int mode;
Herbert Xuf139cfa2008-07-31 12:23:53 +080065static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070066
67static char *check[] = {
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080068 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
69 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
70 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110071 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080072 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +080073 "lzo", "cts", "zlib", NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070074};
75
Herbert Xuf139cfa2008-07-31 12:23:53 +080076static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -070077 struct scatterlist *sg, int blen, int secs)
Herbert Xu6a179442005-06-22 13:29:03 -070078{
Herbert Xu6a179442005-06-22 13:29:03 -070079 unsigned long start, end;
80 int bcount;
81 int ret;
82
Mark Rustad3e3dc252014-07-25 02:53:38 -070083 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xu6a179442005-06-22 13:29:03 -070084 time_before(jiffies, end); bcount++) {
85 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +100086 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -070087 else
Herbert Xucba83562006-08-13 08:26:09 +100088 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -070089
90 if (ret)
91 return ret;
92 }
93
94 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -070095 bcount, secs, (long)bcount * blen);
Herbert Xu6a179442005-06-22 13:29:03 -070096 return 0;
97}
98
Herbert Xuf139cfa2008-07-31 12:23:53 +080099static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
100 struct scatterlist *sg, int blen)
Herbert Xu6a179442005-06-22 13:29:03 -0700101{
Herbert Xu6a179442005-06-22 13:29:03 -0700102 unsigned long cycles = 0;
103 int ret = 0;
104 int i;
105
Herbert Xu6a179442005-06-22 13:29:03 -0700106 local_irq_disable();
107
108 /* Warm-up run. */
109 for (i = 0; i < 4; i++) {
110 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000111 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700112 else
Herbert Xucba83562006-08-13 08:26:09 +1000113 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700114
115 if (ret)
116 goto out;
117 }
118
119 /* The real thing. */
120 for (i = 0; i < 8; i++) {
121 cycles_t start, end;
122
123 start = get_cycles();
124 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000125 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700126 else
Herbert Xucba83562006-08-13 08:26:09 +1000127 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700128 end = get_cycles();
129
130 if (ret)
131 goto out;
132
133 cycles += end - start;
134 }
135
136out:
137 local_irq_enable();
Herbert Xu6a179442005-06-22 13:29:03 -0700138
139 if (ret == 0)
140 printk("1 operation in %lu cycles (%d bytes)\n",
141 (cycles + 4) / 8, blen);
142
143 return ret;
144}
145
Tim Chen53f52d72013-12-11 14:28:47 -0800146static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700147 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800148{
149 unsigned long start, end;
150 int bcount;
151 int ret;
152
Mark Rustad3e3dc252014-07-25 02:53:38 -0700153 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800154 time_before(jiffies, end); bcount++) {
155 if (enc)
156 ret = crypto_aead_encrypt(req);
157 else
158 ret = crypto_aead_decrypt(req);
159
160 if (ret)
161 return ret;
162 }
163
164 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700165 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800166 return 0;
167}
168
169static int test_aead_cycles(struct aead_request *req, int enc, int blen)
170{
171 unsigned long cycles = 0;
172 int ret = 0;
173 int i;
174
175 local_irq_disable();
176
177 /* Warm-up run. */
178 for (i = 0; i < 4; i++) {
179 if (enc)
180 ret = crypto_aead_encrypt(req);
181 else
182 ret = crypto_aead_decrypt(req);
183
184 if (ret)
185 goto out;
186 }
187
188 /* The real thing. */
189 for (i = 0; i < 8; i++) {
190 cycles_t start, end;
191
192 start = get_cycles();
193 if (enc)
194 ret = crypto_aead_encrypt(req);
195 else
196 ret = crypto_aead_decrypt(req);
197 end = get_cycles();
198
199 if (ret)
200 goto out;
201
202 cycles += end - start;
203 }
204
205out:
206 local_irq_enable();
207
208 if (ret == 0)
209 printk("1 operation in %lu cycles (%d bytes)\n",
210 (cycles + 4) / 8, blen);
211
212 return ret;
213}
214
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800215static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
Tim Chen53f52d72013-12-11 14:28:47 -0800216static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
217
218#define XBUFSIZE 8
219#define MAX_IVLEN 32
220
221static int testmgr_alloc_buf(char *buf[XBUFSIZE])
222{
223 int i;
224
225 for (i = 0; i < XBUFSIZE; i++) {
226 buf[i] = (void *)__get_free_page(GFP_KERNEL);
227 if (!buf[i])
228 goto err_free_buf;
229 }
230
231 return 0;
232
233err_free_buf:
234 while (i-- > 0)
235 free_page((unsigned long)buf[i]);
236
237 return -ENOMEM;
238}
239
240static void testmgr_free_buf(char *buf[XBUFSIZE])
241{
242 int i;
243
244 for (i = 0; i < XBUFSIZE; i++)
245 free_page((unsigned long)buf[i]);
246}
247
248static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
249 unsigned int buflen)
250{
251 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
252 int k, rem;
253
Tim Chen53f52d72013-12-11 14:28:47 -0800254 if (np > XBUFSIZE) {
255 rem = PAGE_SIZE;
256 np = XBUFSIZE;
Cristian Stoicac4768992015-01-27 11:54:27 +0200257 } else {
258 rem = buflen % PAGE_SIZE;
Tim Chen53f52d72013-12-11 14:28:47 -0800259 }
Cristian Stoicac4768992015-01-27 11:54:27 +0200260
Tim Chen53f52d72013-12-11 14:28:47 -0800261 sg_init_table(sg, np);
Cristian Stoicac4768992015-01-27 11:54:27 +0200262 np--;
263 for (k = 0; k < np; k++)
264 sg_set_buf(&sg[k], xbuf[k], PAGE_SIZE);
265
266 sg_set_buf(&sg[k], xbuf[k], rem);
Tim Chen53f52d72013-12-11 14:28:47 -0800267}
268
Mark Rustad3e3dc252014-07-25 02:53:38 -0700269static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800270 struct aead_speed_template *template,
271 unsigned int tcount, u8 authsize,
272 unsigned int aad_size, u8 *keysize)
273{
274 unsigned int i, j;
275 struct crypto_aead *tfm;
276 int ret = -ENOMEM;
277 const char *key;
278 struct aead_request *req;
279 struct scatterlist *sg;
280 struct scatterlist *asg;
281 struct scatterlist *sgout;
282 const char *e;
283 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200284 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800285 char *xbuf[XBUFSIZE];
286 char *xoutbuf[XBUFSIZE];
287 char *axbuf[XBUFSIZE];
288 unsigned int *b_size;
289 unsigned int iv_len;
290
Cristian Stoica96692a732015-01-28 13:07:32 +0200291 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
292 if (!iv)
293 return;
294
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200295 if (aad_size >= PAGE_SIZE) {
296 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200297 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200298 }
299
Tim Chen53f52d72013-12-11 14:28:47 -0800300 if (enc == ENCRYPT)
301 e = "encryption";
302 else
303 e = "decryption";
304
305 if (testmgr_alloc_buf(xbuf))
306 goto out_noxbuf;
307 if (testmgr_alloc_buf(axbuf))
308 goto out_noaxbuf;
309 if (testmgr_alloc_buf(xoutbuf))
310 goto out_nooutbuf;
311
312 sg = kmalloc(sizeof(*sg) * 8 * 3, GFP_KERNEL);
313 if (!sg)
314 goto out_nosg;
315 asg = &sg[8];
316 sgout = &asg[8];
317
Tim Chen53f52d72013-12-11 14:28:47 -0800318 tfm = crypto_alloc_aead(algo, 0, 0);
319
320 if (IS_ERR(tfm)) {
321 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
322 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200323 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800324 }
325
Luca Clementi263a8df2014-06-25 22:57:42 -0700326 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
327 get_driver_name(crypto_aead, tfm), e);
328
Tim Chen53f52d72013-12-11 14:28:47 -0800329 req = aead_request_alloc(tfm, GFP_KERNEL);
330 if (!req) {
331 pr_err("alg: aead: Failed to allocate request for %s\n",
332 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200333 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800334 }
335
336 i = 0;
337 do {
338 b_size = aead_sizes;
339 do {
340 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200341 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800342 sg_init_one(&asg[0], assoc, aad_size);
343
344 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
345 pr_err("template (%u) too big for tvmem (%lu)\n",
346 *keysize + *b_size,
347 TVMEMSIZE * PAGE_SIZE);
348 goto out;
349 }
350
351 key = tvmem[0];
352 for (j = 0; j < tcount; j++) {
353 if (template[j].klen == *keysize) {
354 key = template[j].key;
355 break;
356 }
357 }
358 ret = crypto_aead_setkey(tfm, key, *keysize);
359 ret = crypto_aead_setauthsize(tfm, authsize);
360
361 iv_len = crypto_aead_ivsize(tfm);
362 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200363 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800364
365 crypto_aead_clear_flags(tfm, ~0);
366 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
367 i, *keysize * 8, *b_size);
368
369
370 memset(tvmem[0], 0xff, PAGE_SIZE);
371
372 if (ret) {
373 pr_err("setkey() failed flags=%x\n",
374 crypto_aead_get_flags(tfm));
375 goto out;
376 }
377
378 sg_init_aead(&sg[0], xbuf,
379 *b_size + (enc ? authsize : 0));
380
381 sg_init_aead(&sgout[0], xoutbuf,
382 *b_size + (enc ? authsize : 0));
383
384 aead_request_set_crypt(req, sg, sgout, *b_size, iv);
385 aead_request_set_assoc(req, asg, aad_size);
386
Mark Rustad3e3dc252014-07-25 02:53:38 -0700387 if (secs)
388 ret = test_aead_jiffies(req, enc, *b_size,
389 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800390 else
391 ret = test_aead_cycles(req, enc, *b_size);
392
393 if (ret) {
394 pr_err("%s() failed return code=%d\n", e, ret);
395 break;
396 }
397 b_size++;
398 i++;
399 } while (*b_size);
400 keysize++;
401 } while (*keysize);
402
403out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200404 aead_request_free(req);
405out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800406 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200407out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800408 kfree(sg);
409out_nosg:
410 testmgr_free_buf(xoutbuf);
411out_nooutbuf:
412 testmgr_free_buf(axbuf);
413out_noaxbuf:
414 testmgr_free_buf(xbuf);
415out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200416 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800417 return;
418}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800419
Mark Rustad3e3dc252014-07-25 02:53:38 -0700420static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
Herbert Xuda7f0332008-07-31 17:08:25 +0800421 struct cipher_speed_template *template,
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800422 unsigned int tcount, u8 *keysize)
Harald Welteebfd9bc2005-06-22 13:27:23 -0700423{
Herbert Xudce907c2005-06-22 13:27:51 -0700424 unsigned int ret, i, j, iv_len;
David Sterbaf07ef1d2011-03-04 15:28:52 +0800425 const char *key;
426 char iv[128];
Herbert Xucba83562006-08-13 08:26:09 +1000427 struct crypto_blkcipher *tfm;
428 struct blkcipher_desc desc;
429 const char *e;
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800430 u32 *b_size;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700431
432 if (enc == ENCRYPT)
433 e = "encryption";
434 else
435 e = "decryption";
Harald Welteebfd9bc2005-06-22 13:27:23 -0700436
Herbert Xucba83562006-08-13 08:26:09 +1000437 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700438
Herbert Xucba83562006-08-13 08:26:09 +1000439 if (IS_ERR(tfm)) {
440 printk("failed to load transform for %s: %ld\n", algo,
441 PTR_ERR(tfm));
Harald Welteebfd9bc2005-06-22 13:27:23 -0700442 return;
443 }
Herbert Xucba83562006-08-13 08:26:09 +1000444 desc.tfm = tfm;
445 desc.flags = 0;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700446
Luca Clementi263a8df2014-06-25 22:57:42 -0700447 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
448 get_driver_name(crypto_blkcipher, tfm), e);
449
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800450 i = 0;
451 do {
Harald Welteebfd9bc2005-06-22 13:27:23 -0700452
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800453 b_size = block_sizes;
454 do {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800455 struct scatterlist sg[TVMEMSIZE];
Harald Welteebfd9bc2005-06-22 13:27:23 -0700456
Herbert Xuf139cfa2008-07-31 12:23:53 +0800457 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
458 printk("template (%u) too big for "
459 "tvmem (%lu)\n", *keysize + *b_size,
460 TVMEMSIZE * PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800461 goto out;
462 }
Harald Welteebfd9bc2005-06-22 13:27:23 -0700463
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800464 printk("test %u (%d bit key, %d byte blocks): ", i,
465 *keysize * 8, *b_size);
466
Herbert Xuf139cfa2008-07-31 12:23:53 +0800467 memset(tvmem[0], 0xff, PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800468
469 /* set key, plain text and IV */
Herbert Xuda7f0332008-07-31 17:08:25 +0800470 key = tvmem[0];
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800471 for (j = 0; j < tcount; j++) {
472 if (template[j].klen == *keysize) {
473 key = template[j].key;
474 break;
475 }
476 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800477
478 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
479 if (ret) {
480 printk("setkey() failed flags=%x\n",
481 crypto_blkcipher_get_flags(tfm));
482 goto out;
483 }
484
Herbert Xuf139cfa2008-07-31 12:23:53 +0800485 sg_init_table(sg, TVMEMSIZE);
486 sg_set_buf(sg, tvmem[0] + *keysize,
487 PAGE_SIZE - *keysize);
488 for (j = 1; j < TVMEMSIZE; j++) {
489 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
490 memset (tvmem[j], 0xff, PAGE_SIZE);
491 }
492
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800493 iv_len = crypto_blkcipher_ivsize(tfm);
494 if (iv_len) {
495 memset(&iv, 0xff, iv_len);
496 crypto_blkcipher_set_iv(tfm, iv, iv_len);
497 }
498
Mark Rustad3e3dc252014-07-25 02:53:38 -0700499 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800500 ret = test_cipher_jiffies(&desc, enc, sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700501 *b_size, secs);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800502 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800503 ret = test_cipher_cycles(&desc, enc, sg,
504 *b_size);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800505
506 if (ret) {
507 printk("%s() failed flags=%x\n", e, desc.flags);
Herbert Xudce907c2005-06-22 13:27:51 -0700508 break;
509 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800510 b_size++;
511 i++;
512 } while (*b_size);
513 keysize++;
514 } while (*keysize);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700515
516out:
Herbert Xucba83562006-08-13 08:26:09 +1000517 crypto_free_blkcipher(tfm);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700518}
519
Herbert Xuf139cfa2008-07-31 12:23:53 +0800520static int test_hash_jiffies_digest(struct hash_desc *desc,
521 struct scatterlist *sg, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700522 char *out, int secs)
Michal Ludvige8057922006-05-30 22:04:19 +1000523{
Michal Ludvige8057922006-05-30 22:04:19 +1000524 unsigned long start, end;
Herbert Xue9d41162006-08-19 21:38:49 +1000525 int bcount;
526 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000527
Mark Rustad3e3dc252014-07-25 02:53:38 -0700528 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000529 time_before(jiffies, end); bcount++) {
Herbert Xue9d41162006-08-19 21:38:49 +1000530 ret = crypto_hash_digest(desc, sg, blen, out);
531 if (ret)
532 return ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000533 }
534
535 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700536 bcount / secs, ((long)bcount * blen) / secs);
Michal Ludvige8057922006-05-30 22:04:19 +1000537
Herbert Xue9d41162006-08-19 21:38:49 +1000538 return 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000539}
540
Herbert Xuf139cfa2008-07-31 12:23:53 +0800541static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700542 int blen, int plen, char *out, int secs)
Herbert Xue9d41162006-08-19 21:38:49 +1000543{
Herbert Xue9d41162006-08-19 21:38:49 +1000544 unsigned long start, end;
545 int bcount, pcount;
546 int ret;
547
548 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700549 return test_hash_jiffies_digest(desc, sg, blen, out, secs);
Herbert Xua5a613a2007-10-27 00:51:21 -0700550
Mark Rustad3e3dc252014-07-25 02:53:38 -0700551 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xue9d41162006-08-19 21:38:49 +1000552 time_before(jiffies, end); bcount++) {
553 ret = crypto_hash_init(desc);
554 if (ret)
555 return ret;
556 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000557 ret = crypto_hash_update(desc, sg, plen);
558 if (ret)
559 return ret;
560 }
561 /* we assume there is enough space in 'out' for the result */
562 ret = crypto_hash_final(desc, out);
563 if (ret)
564 return ret;
565 }
566
567 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700568 bcount / secs, ((long)bcount * blen) / secs);
Herbert Xue9d41162006-08-19 21:38:49 +1000569
570 return 0;
571}
572
Herbert Xuf139cfa2008-07-31 12:23:53 +0800573static int test_hash_cycles_digest(struct hash_desc *desc,
574 struct scatterlist *sg, int blen, char *out)
Michal Ludvige8057922006-05-30 22:04:19 +1000575{
Michal Ludvige8057922006-05-30 22:04:19 +1000576 unsigned long cycles = 0;
Herbert Xue9d41162006-08-19 21:38:49 +1000577 int i;
578 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000579
Michal Ludvige8057922006-05-30 22:04:19 +1000580 local_irq_disable();
581
582 /* Warm-up run. */
583 for (i = 0; i < 4; i++) {
Herbert Xue9d41162006-08-19 21:38:49 +1000584 ret = crypto_hash_digest(desc, sg, blen, out);
585 if (ret)
586 goto out;
Michal Ludvige8057922006-05-30 22:04:19 +1000587 }
588
589 /* The real thing. */
590 for (i = 0; i < 8; i++) {
591 cycles_t start, end;
592
Michal Ludvige8057922006-05-30 22:04:19 +1000593 start = get_cycles();
594
Herbert Xue9d41162006-08-19 21:38:49 +1000595 ret = crypto_hash_digest(desc, sg, blen, out);
596 if (ret)
597 goto out;
Michal Ludvige8057922006-05-30 22:04:19 +1000598
599 end = get_cycles();
600
601 cycles += end - start;
602 }
603
Herbert Xue9d41162006-08-19 21:38:49 +1000604out:
Michal Ludvige8057922006-05-30 22:04:19 +1000605 local_irq_enable();
Michal Ludvige8057922006-05-30 22:04:19 +1000606
Herbert Xue9d41162006-08-19 21:38:49 +1000607 if (ret)
608 return ret;
609
Michal Ludvige8057922006-05-30 22:04:19 +1000610 printk("%6lu cycles/operation, %4lu cycles/byte\n",
611 cycles / 8, cycles / (8 * blen));
612
Herbert Xue9d41162006-08-19 21:38:49 +1000613 return 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000614}
615
Herbert Xuf139cfa2008-07-31 12:23:53 +0800616static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
617 int blen, int plen, char *out)
Michal Ludvige8057922006-05-30 22:04:19 +1000618{
Herbert Xue9d41162006-08-19 21:38:49 +1000619 unsigned long cycles = 0;
620 int i, pcount;
621 int ret;
622
623 if (plen == blen)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800624 return test_hash_cycles_digest(desc, sg, blen, out);
Herbert Xua5a613a2007-10-27 00:51:21 -0700625
Herbert Xue9d41162006-08-19 21:38:49 +1000626 local_irq_disable();
627
628 /* Warm-up run. */
629 for (i = 0; i < 4; i++) {
630 ret = crypto_hash_init(desc);
631 if (ret)
632 goto out;
633 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000634 ret = crypto_hash_update(desc, sg, plen);
635 if (ret)
636 goto out;
637 }
Herbert Xu29059d12007-05-18 16:25:19 +1000638 ret = crypto_hash_final(desc, out);
Herbert Xue9d41162006-08-19 21:38:49 +1000639 if (ret)
640 goto out;
641 }
642
643 /* The real thing. */
644 for (i = 0; i < 8; i++) {
645 cycles_t start, end;
646
647 start = get_cycles();
648
649 ret = crypto_hash_init(desc);
650 if (ret)
651 goto out;
652 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000653 ret = crypto_hash_update(desc, sg, plen);
654 if (ret)
655 goto out;
656 }
657 ret = crypto_hash_final(desc, out);
658 if (ret)
659 goto out;
660
661 end = get_cycles();
662
663 cycles += end - start;
664 }
665
666out:
667 local_irq_enable();
Herbert Xue9d41162006-08-19 21:38:49 +1000668
669 if (ret)
670 return ret;
671
672 printk("%6lu cycles/operation, %4lu cycles/byte\n",
673 cycles / 8, cycles / (8 * blen));
674
675 return 0;
676}
677
David S. Millerbeb63da2010-05-19 14:11:21 +1000678static void test_hash_sg_init(struct scatterlist *sg)
679{
680 int i;
681
682 sg_init_table(sg, TVMEMSIZE);
683 for (i = 0; i < TVMEMSIZE; i++) {
684 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
685 memset(tvmem[i], 0xff, PAGE_SIZE);
686 }
687}
688
Mark Rustad3e3dc252014-07-25 02:53:38 -0700689static void test_hash_speed(const char *algo, unsigned int secs,
Herbert Xu01b32322008-07-31 15:41:55 +0800690 struct hash_speed *speed)
Herbert Xue9d41162006-08-19 21:38:49 +1000691{
Herbert Xuf139cfa2008-07-31 12:23:53 +0800692 struct scatterlist sg[TVMEMSIZE];
Herbert Xue9d41162006-08-19 21:38:49 +1000693 struct crypto_hash *tfm;
694 struct hash_desc desc;
Frank Seidel376bacb2009-03-29 15:18:39 +0800695 static char output[1024];
Michal Ludvige8057922006-05-30 22:04:19 +1000696 int i;
Herbert Xue9d41162006-08-19 21:38:49 +1000697 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000698
Herbert Xue9d41162006-08-19 21:38:49 +1000699 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
Michal Ludvige8057922006-05-30 22:04:19 +1000700
Herbert Xue9d41162006-08-19 21:38:49 +1000701 if (IS_ERR(tfm)) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800702 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
Herbert Xue9d41162006-08-19 21:38:49 +1000703 PTR_ERR(tfm));
Michal Ludvige8057922006-05-30 22:04:19 +1000704 return;
705 }
706
Luca Clementi263a8df2014-06-25 22:57:42 -0700707 printk(KERN_INFO "\ntesting speed of %s (%s)\n", algo,
708 get_driver_name(crypto_hash, tfm));
709
Herbert Xue9d41162006-08-19 21:38:49 +1000710 desc.tfm = tfm;
711 desc.flags = 0;
712
713 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800714 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
Herbert Xue9d41162006-08-19 21:38:49 +1000715 crypto_hash_digestsize(tfm), sizeof(output));
Michal Ludvige8057922006-05-30 22:04:19 +1000716 goto out;
717 }
718
David S. Millerbeb63da2010-05-19 14:11:21 +1000719 test_hash_sg_init(sg);
Michal Ludvige8057922006-05-30 22:04:19 +1000720 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800721 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800722 printk(KERN_ERR
723 "template (%u) too big for tvmem (%lu)\n",
Herbert Xuf139cfa2008-07-31 12:23:53 +0800724 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
Michal Ludvige8057922006-05-30 22:04:19 +1000725 goto out;
726 }
727
Huang Ying18bcc912010-03-10 18:30:32 +0800728 if (speed[i].klen)
729 crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
730
Frank Seidel376bacb2009-03-29 15:18:39 +0800731 printk(KERN_INFO "test%3u "
732 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Michal Ludvige8057922006-05-30 22:04:19 +1000733 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
734
Mark Rustad3e3dc252014-07-25 02:53:38 -0700735 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800736 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700737 speed[i].plen, output, secs);
Michal Ludvige8057922006-05-30 22:04:19 +1000738 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800739 ret = test_hash_cycles(&desc, sg, speed[i].blen,
Herbert Xue9d41162006-08-19 21:38:49 +1000740 speed[i].plen, output);
741
742 if (ret) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800743 printk(KERN_ERR "hashing failed ret=%d\n", ret);
Herbert Xue9d41162006-08-19 21:38:49 +1000744 break;
745 }
Michal Ludvige8057922006-05-30 22:04:19 +1000746 }
747
748out:
Herbert Xue9d41162006-08-19 21:38:49 +1000749 crypto_free_hash(tfm);
Michal Ludvige8057922006-05-30 22:04:19 +1000750}
751
David S. Millerbeb63da2010-05-19 14:11:21 +1000752struct tcrypt_result {
753 struct completion completion;
754 int err;
755};
756
757static void tcrypt_complete(struct crypto_async_request *req, int err)
758{
759 struct tcrypt_result *res = req->data;
760
761 if (err == -EINPROGRESS)
762 return;
763
764 res->err = err;
765 complete(&res->completion);
766}
767
768static inline int do_one_ahash_op(struct ahash_request *req, int ret)
769{
770 if (ret == -EINPROGRESS || ret == -EBUSY) {
771 struct tcrypt_result *tr = req->base.data;
772
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100773 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800774 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100775 ret = tr->err;
David S. Millerbeb63da2010-05-19 14:11:21 +1000776 }
777 return ret;
778}
779
780static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700781 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000782{
783 unsigned long start, end;
784 int bcount;
785 int ret;
786
Mark Rustad3e3dc252014-07-25 02:53:38 -0700787 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000788 time_before(jiffies, end); bcount++) {
789 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
790 if (ret)
791 return ret;
792 }
793
794 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700795 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000796
797 return 0;
798}
799
800static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700801 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000802{
803 unsigned long start, end;
804 int bcount, pcount;
805 int ret;
806
807 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700808 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000809
Mark Rustad3e3dc252014-07-25 02:53:38 -0700810 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000811 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800812 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000813 if (ret)
814 return ret;
815 for (pcount = 0; pcount < blen; pcount += plen) {
816 ret = do_one_ahash_op(req, crypto_ahash_update(req));
817 if (ret)
818 return ret;
819 }
820 /* we assume there is enough space in 'out' for the result */
821 ret = do_one_ahash_op(req, crypto_ahash_final(req));
822 if (ret)
823 return ret;
824 }
825
826 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700827 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000828
829 return 0;
830}
831
832static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
833 char *out)
834{
835 unsigned long cycles = 0;
836 int ret, i;
837
838 /* Warm-up run. */
839 for (i = 0; i < 4; i++) {
840 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
841 if (ret)
842 goto out;
843 }
844
845 /* The real thing. */
846 for (i = 0; i < 8; i++) {
847 cycles_t start, end;
848
849 start = get_cycles();
850
851 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
852 if (ret)
853 goto out;
854
855 end = get_cycles();
856
857 cycles += end - start;
858 }
859
860out:
861 if (ret)
862 return ret;
863
864 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
865 cycles / 8, cycles / (8 * blen));
866
867 return 0;
868}
869
870static int test_ahash_cycles(struct ahash_request *req, int blen,
871 int plen, char *out)
872{
873 unsigned long cycles = 0;
874 int i, pcount, ret;
875
876 if (plen == blen)
877 return test_ahash_cycles_digest(req, blen, out);
878
879 /* Warm-up run. */
880 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800881 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000882 if (ret)
883 goto out;
884 for (pcount = 0; pcount < blen; pcount += plen) {
885 ret = do_one_ahash_op(req, crypto_ahash_update(req));
886 if (ret)
887 goto out;
888 }
889 ret = do_one_ahash_op(req, crypto_ahash_final(req));
890 if (ret)
891 goto out;
892 }
893
894 /* The real thing. */
895 for (i = 0; i < 8; i++) {
896 cycles_t start, end;
897
898 start = get_cycles();
899
Herbert Xu43a96072015-04-22 11:02:27 +0800900 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000901 if (ret)
902 goto out;
903 for (pcount = 0; pcount < blen; pcount += plen) {
904 ret = do_one_ahash_op(req, crypto_ahash_update(req));
905 if (ret)
906 goto out;
907 }
908 ret = do_one_ahash_op(req, crypto_ahash_final(req));
909 if (ret)
910 goto out;
911
912 end = get_cycles();
913
914 cycles += end - start;
915 }
916
917out:
918 if (ret)
919 return ret;
920
921 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
922 cycles / 8, cycles / (8 * blen));
923
924 return 0;
925}
926
Mark Rustad3e3dc252014-07-25 02:53:38 -0700927static void test_ahash_speed(const char *algo, unsigned int secs,
David S. Millerbeb63da2010-05-19 14:11:21 +1000928 struct hash_speed *speed)
929{
930 struct scatterlist sg[TVMEMSIZE];
931 struct tcrypt_result tresult;
932 struct ahash_request *req;
933 struct crypto_ahash *tfm;
934 static char output[1024];
935 int i, ret;
936
David S. Millerbeb63da2010-05-19 14:11:21 +1000937 tfm = crypto_alloc_ahash(algo, 0, 0);
938 if (IS_ERR(tfm)) {
939 pr_err("failed to load transform for %s: %ld\n",
940 algo, PTR_ERR(tfm));
941 return;
942 }
943
Luca Clementi263a8df2014-06-25 22:57:42 -0700944 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
945 get_driver_name(crypto_ahash, tfm));
946
David S. Millerbeb63da2010-05-19 14:11:21 +1000947 if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
948 pr_err("digestsize(%u) > outputbuffer(%zu)\n",
949 crypto_ahash_digestsize(tfm), sizeof(output));
950 goto out;
951 }
952
953 test_hash_sg_init(sg);
954 req = ahash_request_alloc(tfm, GFP_KERNEL);
955 if (!req) {
956 pr_err("ahash request allocation failure\n");
957 goto out;
958 }
959
960 init_completion(&tresult.completion);
961 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
962 tcrypt_complete, &tresult);
963
964 for (i = 0; speed[i].blen != 0; i++) {
965 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
966 pr_err("template (%u) too big for tvmem (%lu)\n",
967 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
968 break;
969 }
970
971 pr_info("test%3u "
972 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
973 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
974
975 ahash_request_set_crypt(req, sg, output, speed[i].plen);
976
Mark Rustad3e3dc252014-07-25 02:53:38 -0700977 if (secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000978 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700979 speed[i].plen, output, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000980 else
981 ret = test_ahash_cycles(req, speed[i].blen,
982 speed[i].plen, output);
983
984 if (ret) {
985 pr_err("hashing failed ret=%d\n", ret);
986 break;
987 }
988 }
989
990 ahash_request_free(req);
991
992out:
993 crypto_free_ahash(tfm);
994}
995
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300996static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret)
997{
998 if (ret == -EINPROGRESS || ret == -EBUSY) {
999 struct tcrypt_result *tr = req->base.data;
1000
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001001 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -08001002 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001003 ret = tr->err;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001004 }
1005
1006 return ret;
1007}
1008
1009static int test_acipher_jiffies(struct ablkcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001010 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001011{
1012 unsigned long start, end;
1013 int bcount;
1014 int ret;
1015
Mark Rustad3e3dc252014-07-25 02:53:38 -07001016 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001017 time_before(jiffies, end); bcount++) {
1018 if (enc)
1019 ret = do_one_acipher_op(req,
1020 crypto_ablkcipher_encrypt(req));
1021 else
1022 ret = do_one_acipher_op(req,
1023 crypto_ablkcipher_decrypt(req));
1024
1025 if (ret)
1026 return ret;
1027 }
1028
1029 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001030 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001031 return 0;
1032}
1033
1034static int test_acipher_cycles(struct ablkcipher_request *req, int enc,
1035 int blen)
1036{
1037 unsigned long cycles = 0;
1038 int ret = 0;
1039 int i;
1040
1041 /* Warm-up run. */
1042 for (i = 0; i < 4; i++) {
1043 if (enc)
1044 ret = do_one_acipher_op(req,
1045 crypto_ablkcipher_encrypt(req));
1046 else
1047 ret = do_one_acipher_op(req,
1048 crypto_ablkcipher_decrypt(req));
1049
1050 if (ret)
1051 goto out;
1052 }
1053
1054 /* The real thing. */
1055 for (i = 0; i < 8; i++) {
1056 cycles_t start, end;
1057
1058 start = get_cycles();
1059 if (enc)
1060 ret = do_one_acipher_op(req,
1061 crypto_ablkcipher_encrypt(req));
1062 else
1063 ret = do_one_acipher_op(req,
1064 crypto_ablkcipher_decrypt(req));
1065 end = get_cycles();
1066
1067 if (ret)
1068 goto out;
1069
1070 cycles += end - start;
1071 }
1072
1073out:
1074 if (ret == 0)
1075 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1076 (cycles + 4) / 8, blen);
1077
1078 return ret;
1079}
1080
Mark Rustad3e3dc252014-07-25 02:53:38 -07001081static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001082 struct cipher_speed_template *template,
1083 unsigned int tcount, u8 *keysize)
1084{
Nicolas Royerde1975332012-07-01 19:19:47 +02001085 unsigned int ret, i, j, k, iv_len;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001086 struct tcrypt_result tresult;
1087 const char *key;
1088 char iv[128];
1089 struct ablkcipher_request *req;
1090 struct crypto_ablkcipher *tfm;
1091 const char *e;
1092 u32 *b_size;
1093
1094 if (enc == ENCRYPT)
1095 e = "encryption";
1096 else
1097 e = "decryption";
1098
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001099 init_completion(&tresult.completion);
1100
1101 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
1102
1103 if (IS_ERR(tfm)) {
1104 pr_err("failed to load transform for %s: %ld\n", algo,
1105 PTR_ERR(tfm));
1106 return;
1107 }
1108
Luca Clementi263a8df2014-06-25 22:57:42 -07001109 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
1110 get_driver_name(crypto_ablkcipher, tfm), e);
1111
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001112 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1113 if (!req) {
1114 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1115 algo);
1116 goto out;
1117 }
1118
1119 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1120 tcrypt_complete, &tresult);
1121
1122 i = 0;
1123 do {
1124 b_size = block_sizes;
1125
1126 do {
1127 struct scatterlist sg[TVMEMSIZE];
1128
1129 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1130 pr_err("template (%u) too big for "
1131 "tvmem (%lu)\n", *keysize + *b_size,
1132 TVMEMSIZE * PAGE_SIZE);
1133 goto out_free_req;
1134 }
1135
1136 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1137 *keysize * 8, *b_size);
1138
1139 memset(tvmem[0], 0xff, PAGE_SIZE);
1140
1141 /* set key, plain text and IV */
1142 key = tvmem[0];
1143 for (j = 0; j < tcount; j++) {
1144 if (template[j].klen == *keysize) {
1145 key = template[j].key;
1146 break;
1147 }
1148 }
1149
1150 crypto_ablkcipher_clear_flags(tfm, ~0);
1151
1152 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1153 if (ret) {
1154 pr_err("setkey() failed flags=%x\n",
1155 crypto_ablkcipher_get_flags(tfm));
1156 goto out_free_req;
1157 }
1158
Nicolas Royerde1975332012-07-01 19:19:47 +02001159 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001160 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1161
Nicolas Royerde1975332012-07-01 19:19:47 +02001162 if (k > PAGE_SIZE) {
1163 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001164 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001165 k -= PAGE_SIZE;
1166 j = 1;
1167 while (k > PAGE_SIZE) {
1168 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1169 memset(tvmem[j], 0xff, PAGE_SIZE);
1170 j++;
1171 k -= PAGE_SIZE;
1172 }
1173 sg_set_buf(sg + j, tvmem[j], k);
1174 memset(tvmem[j], 0xff, k);
1175 } else {
1176 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001177 }
1178
1179 iv_len = crypto_ablkcipher_ivsize(tfm);
1180 if (iv_len)
1181 memset(&iv, 0xff, iv_len);
1182
1183 ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv);
1184
Mark Rustad3e3dc252014-07-25 02:53:38 -07001185 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001186 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001187 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001188 else
1189 ret = test_acipher_cycles(req, enc,
1190 *b_size);
1191
1192 if (ret) {
1193 pr_err("%s() failed flags=%x\n", e,
1194 crypto_ablkcipher_get_flags(tfm));
1195 break;
1196 }
1197 b_size++;
1198 i++;
1199 } while (*b_size);
1200 keysize++;
1201 } while (*keysize);
1202
1203out_free_req:
1204 ablkcipher_request_free(req);
1205out:
1206 crypto_free_ablkcipher(tfm);
1207}
1208
Herbert Xuef2736f2005-06-22 13:26:03 -07001209static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001210{
1211 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001212
Linus Torvalds1da177e2005-04-16 15:20:36 -07001213 while (*name) {
1214 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001215 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001216 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001217 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001218 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001219}
1220
Herbert Xu01b32322008-07-31 15:41:55 +08001221static inline int tcrypt_test(const char *alg)
1222{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001223 int ret;
1224
1225 ret = alg_test(alg, alg, 0, 0);
1226 /* non-fips algs return -EINVAL in fips mode */
1227 if (fips_enabled && ret == -EINVAL)
1228 ret = 0;
1229 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001230}
1231
Herbert Xu86068132014-12-04 16:43:29 +08001232static int do_test(const char *alg, u32 type, u32 mask, int m)
Herbert Xu01b32322008-07-31 15:41:55 +08001233{
1234 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001235 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001236
1237 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001238 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001239 if (alg) {
1240 if (!crypto_has_alg(alg, type,
1241 mask ?: CRYPTO_ALG_TYPE_MASK))
1242 ret = -ENOENT;
1243 break;
1244 }
1245
Herbert Xu01b32322008-07-31 15:41:55 +08001246 for (i = 1; i < 200; i++)
Herbert Xu86068132014-12-04 16:43:29 +08001247 ret += do_test(NULL, 0, 0, i);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001248 break;
1249
1250 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001251 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001252 break;
1253
1254 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001255 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001256 break;
1257
1258 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001259 ret += tcrypt_test("ecb(des)");
1260 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001261 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001262 break;
1263
1264 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001265 ret += tcrypt_test("ecb(des3_ede)");
1266 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001267 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001268 break;
1269
1270 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001271 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001272 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001273
Linus Torvalds1da177e2005-04-16 15:20:36 -07001274 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001275 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001276 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001277
Linus Torvalds1da177e2005-04-16 15:20:36 -07001278 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001279 ret += tcrypt_test("ecb(blowfish)");
1280 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001281 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001282 break;
1283
1284 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001285 ret += tcrypt_test("ecb(twofish)");
1286 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001287 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001288 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001289 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001290 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001291
Linus Torvalds1da177e2005-04-16 15:20:36 -07001292 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001293 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001294 ret += tcrypt_test("cbc(serpent)");
1295 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001296 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001297 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001298 break;
1299
1300 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001301 ret += tcrypt_test("ecb(aes)");
1302 ret += tcrypt_test("cbc(aes)");
1303 ret += tcrypt_test("lrw(aes)");
1304 ret += tcrypt_test("xts(aes)");
1305 ret += tcrypt_test("ctr(aes)");
1306 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001307 break;
1308
1309 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001310 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001311 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001312
Linus Torvalds1da177e2005-04-16 15:20:36 -07001313 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001314 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001315 break;
1316
1317 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001318 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001319 break;
1320
1321 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001322 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001323 ret += tcrypt_test("cbc(cast5)");
1324 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001325 break;
1326
1327 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001328 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001329 ret += tcrypt_test("cbc(cast6)");
1330 ret += tcrypt_test("ctr(cast6)");
1331 ret += tcrypt_test("lrw(cast6)");
1332 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001333 break;
1334
1335 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001336 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001337 break;
1338
1339 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001340 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001341 break;
1342
1343 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001344 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001345 break;
1346
1347 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001348 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001349 break;
1350
1351 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001352 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001353 break;
1354
1355 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001356 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001357 break;
1358
1359 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001360 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001361 break;
1362
1363 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001364 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001365 break;
1366
1367 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001368 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001369 break;
1370
1371 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001372 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001373 break;
1374
1375 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001376 ret += tcrypt_test("ecb(anubis)");
1377 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001378 break;
1379
1380 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001381 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001382 break;
1383
1384 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001385 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001386 break;
1387
1388 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001389 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001390 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001391
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001392 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001393 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001394 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001395
David Howells90831632006-12-16 12:13:14 +11001396 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001397 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001398 break;
1399
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001400 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001401 ret += tcrypt_test("ecb(camellia)");
1402 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001403 ret += tcrypt_test("ctr(camellia)");
1404 ret += tcrypt_test("lrw(camellia)");
1405 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001406 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001407
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001408 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001409 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001410 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001411
Tan Swee Heng2407d602007-11-23 19:45:00 +08001412 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001413 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001414 break;
1415
Herbert Xu8df213d2007-12-02 14:55:47 +11001416 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001417 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001418 break;
1419
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001420 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001421 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001422 break;
1423
Joy Latten93cc74e2007-12-12 20:24:22 +08001424 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001425 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001426 break;
1427
Kevin Coffman76cb9522008-03-24 21:26:16 +08001428 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001429 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001430 break;
1431
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001432 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001433 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001434 break;
1435
1436 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001437 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001438 break;
1439
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001440 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001441 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001442 break;
1443
1444 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001445 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001446 break;
1447
1448 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001449 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001450 break;
1451
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001452 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001453 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001454 break;
1455
Jarod Wilson5d667322009-05-04 19:23:40 +08001456 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001457 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001458 break;
1459
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001460 case 46:
1461 ret += tcrypt_test("ghash");
1462 break;
1463
Herbert Xu684115212013-09-07 12:56:26 +10001464 case 47:
1465 ret += tcrypt_test("crct10dif");
1466 break;
1467
Linus Torvalds1da177e2005-04-16 15:20:36 -07001468 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001469 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001470 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001471
Linus Torvalds1da177e2005-04-16 15:20:36 -07001472 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001473 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001474 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001475
Linus Torvalds1da177e2005-04-16 15:20:36 -07001476 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001477 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001478 break;
1479
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001480 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001481 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001482 break;
1483
1484 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001485 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001486 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001487
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001488 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001489 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001490 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001491
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001492 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001493 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001494 break;
1495
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001496 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001497 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001498 break;
1499
1500 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001501 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001502 break;
1503
Shane Wangf1939f72009-09-02 20:05:22 +10001504 case 109:
1505 ret += tcrypt_test("vmac(aes)");
1506 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001507
Sonic Zhanga482b082012-05-25 17:54:13 +08001508 case 110:
1509 ret += tcrypt_test("hmac(crc32)");
1510 break;
Shane Wangf1939f72009-09-02 20:05:22 +10001511
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001512 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001513 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001514 break;
1515
Adrian Hoban69435b92010-11-04 15:02:04 -04001516 case 151:
1517 ret += tcrypt_test("rfc4106(gcm(aes))");
1518 break;
1519
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001520 case 152:
1521 ret += tcrypt_test("rfc4543(gcm(aes))");
1522 break;
1523
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001524 case 153:
1525 ret += tcrypt_test("cmac(aes)");
1526 break;
1527
1528 case 154:
1529 ret += tcrypt_test("cmac(des3_ede)");
1530 break;
1531
Horia Geantabbf9c892013-11-28 15:11:16 +02001532 case 155:
1533 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1534 break;
1535
Horia Geantabca4feb2014-03-14 17:46:51 +02001536 case 156:
1537 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1538 break;
1539
1540 case 157:
1541 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1542 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301543 case 181:
1544 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1545 break;
1546 case 182:
1547 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1548 break;
1549 case 183:
1550 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
1551 break;
1552 case 184:
1553 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
1554 break;
1555 case 185:
1556 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
1557 break;
1558 case 186:
1559 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
1560 break;
1561 case 187:
1562 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
1563 break;
1564 case 188:
1565 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
1566 break;
1567 case 189:
1568 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
1569 break;
1570 case 190:
1571 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1572 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07001573 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10001574 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001575 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001576 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001577 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001578 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001579 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001580 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001581 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11001582 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001583 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11001584 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001585 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08001586 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001587 speed_template_32_48_64);
Rik Snelf19f5112007-09-19 20:23:13 +08001588 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001589 speed_template_32_48_64);
Jan Glauber9996e342011-04-26 16:34:01 +10001590 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1591 speed_template_16_24_32);
1592 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1593 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001594 break;
1595
1596 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10001597 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001598 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001599 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001600 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001601 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001602 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001603 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001604 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001605 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001606 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001607 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001608 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03001609 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
1610 des3_speed_template, DES3_SPEED_VECTORS,
1611 speed_template_24);
1612 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
1613 des3_speed_template, DES3_SPEED_VECTORS,
1614 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001615 break;
1616
1617 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10001618 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001619 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001620 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001621 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001622 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001623 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001624 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001625 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03001626 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1627 speed_template_16_24_32);
1628 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1629 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001630 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1631 speed_template_32_40_48);
1632 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1633 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001634 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1635 speed_template_32_48_64);
1636 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1637 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001638 break;
1639
1640 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10001641 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001642 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001643 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001644 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001645 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001646 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001647 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001648 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03001649 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1650 speed_template_8_32);
1651 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1652 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001653 break;
1654
1655 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10001656 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001657 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001658 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001659 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001660 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001661 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001662 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001663 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001664 break;
1665
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001666 case 205:
1667 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001668 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001669 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001670 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001671 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001672 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001673 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001674 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02001675 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1676 speed_template_16_24_32);
1677 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1678 speed_template_16_24_32);
1679 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1680 speed_template_32_40_48);
1681 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1682 speed_template_32_40_48);
1683 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1684 speed_template_32_48_64);
1685 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1686 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001687 break;
1688
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001689 case 206:
1690 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001691 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001692 break;
1693
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001694 case 207:
1695 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1696 speed_template_16_32);
1697 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1698 speed_template_16_32);
1699 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1700 speed_template_16_32);
1701 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1702 speed_template_16_32);
1703 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1704 speed_template_16_32);
1705 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1706 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001707 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1708 speed_template_32_48);
1709 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1710 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001711 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1712 speed_template_32_64);
1713 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1714 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001715 break;
1716
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001717 case 208:
1718 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1719 speed_template_8);
1720 break;
1721
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001722 case 209:
1723 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1724 speed_template_8_16);
1725 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1726 speed_template_8_16);
1727 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1728 speed_template_8_16);
1729 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1730 speed_template_8_16);
1731 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1732 speed_template_8_16);
1733 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1734 speed_template_8_16);
1735 break;
1736
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001737 case 210:
1738 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1739 speed_template_16_32);
1740 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1741 speed_template_16_32);
1742 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1743 speed_template_16_32);
1744 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1745 speed_template_16_32);
1746 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1747 speed_template_16_32);
1748 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1749 speed_template_16_32);
1750 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1751 speed_template_32_48);
1752 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1753 speed_template_32_48);
1754 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1755 speed_template_32_64);
1756 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1757 speed_template_32_64);
1758 break;
1759
Tim Chen53f52d72013-12-11 14:28:47 -08001760 case 211:
1761 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
1762 NULL, 0, 16, 8, aead_speed_template_20);
1763 break;
1764
Michal Ludvige8057922006-05-30 22:04:19 +10001765 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08001766 if (alg) {
1767 test_hash_speed(alg, sec, generic_hash_speed_template);
1768 break;
1769 }
1770
Michal Ludvige8057922006-05-30 22:04:19 +10001771 /* fall through */
1772
1773 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10001774 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001775 if (mode > 300 && mode < 400) break;
1776
1777 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10001778 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001779 if (mode > 300 && mode < 400) break;
1780
1781 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10001782 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001783 if (mode > 300 && mode < 400) break;
1784
1785 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10001786 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001787 if (mode > 300 && mode < 400) break;
1788
1789 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10001790 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001791 if (mode > 300 && mode < 400) break;
1792
1793 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10001794 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001795 if (mode > 300 && mode < 400) break;
1796
1797 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10001798 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001799 if (mode > 300 && mode < 400) break;
1800
1801 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10001802 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001803 if (mode > 300 && mode < 400) break;
1804
1805 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10001806 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001807 if (mode > 300 && mode < 400) break;
1808
1809 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10001810 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001811 if (mode > 300 && mode < 400) break;
1812
1813 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10001814 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001815 if (mode > 300 && mode < 400) break;
1816
1817 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10001818 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001819 if (mode > 300 && mode < 400) break;
1820
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001821 case 313:
1822 test_hash_speed("sha224", sec, generic_hash_speed_template);
1823 if (mode > 300 && mode < 400) break;
1824
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001825 case 314:
1826 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1827 if (mode > 300 && mode < 400) break;
1828
1829 case 315:
1830 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1831 if (mode > 300 && mode < 400) break;
1832
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001833 case 316:
1834 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1835 if (mode > 300 && mode < 400) break;
1836
1837 case 317:
1838 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1839 if (mode > 300 && mode < 400) break;
1840
Huang Ying18bcc912010-03-10 18:30:32 +08001841 case 318:
1842 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1843 if (mode > 300 && mode < 400) break;
1844
Tim Chene3899e42012-09-27 15:44:24 -07001845 case 319:
1846 test_hash_speed("crc32c", sec, generic_hash_speed_template);
1847 if (mode > 300 && mode < 400) break;
1848
Herbert Xu684115212013-09-07 12:56:26 +10001849 case 320:
1850 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
1851 if (mode > 300 && mode < 400) break;
1852
Michal Ludvige8057922006-05-30 22:04:19 +10001853 case 399:
1854 break;
1855
David S. Millerbeb63da2010-05-19 14:11:21 +10001856 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08001857 if (alg) {
1858 test_ahash_speed(alg, sec, generic_hash_speed_template);
1859 break;
1860 }
1861
David S. Millerbeb63da2010-05-19 14:11:21 +10001862 /* fall through */
1863
1864 case 401:
1865 test_ahash_speed("md4", sec, generic_hash_speed_template);
1866 if (mode > 400 && mode < 500) break;
1867
1868 case 402:
1869 test_ahash_speed("md5", sec, generic_hash_speed_template);
1870 if (mode > 400 && mode < 500) break;
1871
1872 case 403:
1873 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1874 if (mode > 400 && mode < 500) break;
1875
1876 case 404:
1877 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1878 if (mode > 400 && mode < 500) break;
1879
1880 case 405:
1881 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1882 if (mode > 400 && mode < 500) break;
1883
1884 case 406:
1885 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1886 if (mode > 400 && mode < 500) break;
1887
1888 case 407:
1889 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1890 if (mode > 400 && mode < 500) break;
1891
1892 case 408:
1893 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1894 if (mode > 400 && mode < 500) break;
1895
1896 case 409:
1897 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1898 if (mode > 400 && mode < 500) break;
1899
1900 case 410:
1901 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1902 if (mode > 400 && mode < 500) break;
1903
1904 case 411:
1905 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1906 if (mode > 400 && mode < 500) break;
1907
1908 case 412:
1909 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1910 if (mode > 400 && mode < 500) break;
1911
1912 case 413:
1913 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1914 if (mode > 400 && mode < 500) break;
1915
1916 case 414:
1917 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1918 if (mode > 400 && mode < 500) break;
1919
1920 case 415:
1921 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1922 if (mode > 400 && mode < 500) break;
1923
1924 case 416:
1925 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1926 if (mode > 400 && mode < 500) break;
1927
1928 case 417:
1929 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1930 if (mode > 400 && mode < 500) break;
1931
1932 case 499:
1933 break;
1934
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001935 case 500:
1936 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1937 speed_template_16_24_32);
1938 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1939 speed_template_16_24_32);
1940 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1941 speed_template_16_24_32);
1942 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1943 speed_template_16_24_32);
1944 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1945 speed_template_32_40_48);
1946 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1947 speed_template_32_40_48);
1948 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1949 speed_template_32_48_64);
1950 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1951 speed_template_32_48_64);
1952 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1953 speed_template_16_24_32);
1954 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1955 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02001956 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
1957 speed_template_16_24_32);
1958 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
1959 speed_template_16_24_32);
1960 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
1961 speed_template_16_24_32);
1962 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1963 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02001964 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1965 speed_template_20_28_36);
1966 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1967 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001968 break;
1969
1970 case 501:
1971 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1972 des3_speed_template, DES3_SPEED_VECTORS,
1973 speed_template_24);
1974 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
1975 des3_speed_template, DES3_SPEED_VECTORS,
1976 speed_template_24);
1977 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1978 des3_speed_template, DES3_SPEED_VECTORS,
1979 speed_template_24);
1980 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
1981 des3_speed_template, DES3_SPEED_VECTORS,
1982 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02001983 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
1984 des3_speed_template, DES3_SPEED_VECTORS,
1985 speed_template_24);
1986 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
1987 des3_speed_template, DES3_SPEED_VECTORS,
1988 speed_template_24);
1989 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
1990 des3_speed_template, DES3_SPEED_VECTORS,
1991 speed_template_24);
1992 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
1993 des3_speed_template, DES3_SPEED_VECTORS,
1994 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001995 break;
1996
1997 case 502:
1998 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1999 speed_template_8);
2000 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2001 speed_template_8);
2002 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2003 speed_template_8);
2004 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2005 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002006 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2007 speed_template_8);
2008 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2009 speed_template_8);
2010 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2011 speed_template_8);
2012 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2013 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002014 break;
2015
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002016 case 503:
2017 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2018 speed_template_16_32);
2019 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2020 speed_template_16_32);
2021 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2022 speed_template_16_32);
2023 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2024 speed_template_16_32);
2025 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2026 speed_template_16_32);
2027 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2028 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002029 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2030 speed_template_32_48);
2031 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2032 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002033 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2034 speed_template_32_64);
2035 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2036 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002037 break;
2038
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002039 case 504:
2040 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2041 speed_template_16_24_32);
2042 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2043 speed_template_16_24_32);
2044 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2045 speed_template_16_24_32);
2046 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2047 speed_template_16_24_32);
2048 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2049 speed_template_16_24_32);
2050 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2051 speed_template_16_24_32);
2052 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2053 speed_template_32_40_48);
2054 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2055 speed_template_32_40_48);
2056 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2057 speed_template_32_48_64);
2058 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2059 speed_template_32_48_64);
2060 break;
2061
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002062 case 505:
2063 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2064 speed_template_8);
2065 break;
2066
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002067 case 506:
2068 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2069 speed_template_8_16);
2070 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2071 speed_template_8_16);
2072 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2073 speed_template_8_16);
2074 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2075 speed_template_8_16);
2076 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2077 speed_template_8_16);
2078 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2079 speed_template_8_16);
2080 break;
2081
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002082 case 507:
2083 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2084 speed_template_16_32);
2085 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2086 speed_template_16_32);
2087 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2088 speed_template_16_32);
2089 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2090 speed_template_16_32);
2091 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2092 speed_template_16_32);
2093 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2094 speed_template_16_32);
2095 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2096 speed_template_32_48);
2097 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2098 speed_template_32_48);
2099 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2100 speed_template_32_64);
2101 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2102 speed_template_32_64);
2103 break;
2104
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002105 case 508:
2106 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2107 speed_template_16_32);
2108 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2109 speed_template_16_32);
2110 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2111 speed_template_16_32);
2112 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2113 speed_template_16_32);
2114 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2115 speed_template_16_32);
2116 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2117 speed_template_16_32);
2118 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2119 speed_template_32_48);
2120 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2121 speed_template_32_48);
2122 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2123 speed_template_32_64);
2124 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2125 speed_template_32_64);
2126 break;
2127
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002128 case 509:
2129 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2130 speed_template_8_32);
2131 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2132 speed_template_8_32);
2133 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2134 speed_template_8_32);
2135 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2136 speed_template_8_32);
2137 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2138 speed_template_8_32);
2139 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2140 speed_template_8_32);
2141 break;
2142
Linus Torvalds1da177e2005-04-16 15:20:36 -07002143 case 1000:
2144 test_available();
2145 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002146 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002147
2148 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002149}
2150
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002151static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002152{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002153 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002154 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002155
Herbert Xuf139cfa2008-07-31 12:23:53 +08002156 for (i = 0; i < TVMEMSIZE; i++) {
2157 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2158 if (!tvmem[i])
2159 goto err_free_tv;
2160 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002161
Herbert Xu86068132014-12-04 16:43:29 +08002162 err = do_test(alg, type, mask, mode);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002163
Jarod Wilson4e033a62009-05-27 15:10:21 +10002164 if (err) {
2165 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2166 goto err_free_tv;
2167 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002168
Jarod Wilson4e033a62009-05-27 15:10:21 +10002169 /* We intentionaly return -EAGAIN to prevent keeping the module,
2170 * unless we're running in fips mode. It does all its work from
2171 * init() and doesn't offer any runtime functionality, but in
2172 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002173 * => we don't need it in the memory, do we?
2174 * -- mludvig
2175 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002176 if (!fips_enabled)
2177 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002178
Herbert Xuf139cfa2008-07-31 12:23:53 +08002179err_free_tv:
2180 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2181 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002182
2183 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002184}
2185
2186/*
2187 * If an init function is provided, an exit function must also be provided
2188 * to allow module unload.
2189 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002190static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002191
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002192module_init(tcrypt_mod_init);
2193module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002194
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002195module_param(alg, charp, 0);
2196module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002197module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002198module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002199module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002200MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2201 "(defaults to zero which uses CPU cycles instead)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002202
2203MODULE_LICENSE("GPL");
2204MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2205MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");