blob: bf41c34da7201059b4c64b02b45572aff1f829fe [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Herbert Xu18e33e62008-07-10 16:01:22 +080025#include <crypto/hash.h>
Herbert Xucba83562006-08-13 08:26:09 +100026#include <linux/err.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070027#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090028#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100030#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070033#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070034#include <linux/timex.h>
35#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include "tcrypt.h"
Jarod Wilson4e033a62009-05-27 15:10:21 +100037#include "internal.h"
Linus Torvalds1da177e2005-04-16 15:20:36 -070038
39/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080040 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070041 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080042#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070043
44/*
Herbert Xuda7f0332008-07-31 17:08:25 +080045* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070046*/
47#define ENCRYPT 1
48#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070049
Harald Welteebfd9bc2005-06-22 13:27:23 -070050/*
Luca Clementi263a8df2014-06-25 22:57:42 -070051 * return a string with the driver name
52 */
53#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
54
55/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070056 * Used by test_cipher_speed()
57 */
Herbert Xu6a179442005-06-22 13:29:03 -070058static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070059
Steffen Klasserta873a5f2009-06-19 19:46:53 +080060static char *alg = NULL;
61static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080062static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070063static int mode;
Herbert Xuf139cfa2008-07-31 12:23:53 +080064static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070065
66static char *check[] = {
Jonathan Lynchcd12fb902007-11-10 20:08:25 +080067 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
68 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
69 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110070 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080071 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +080072 "lzo", "cts", "zlib", NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070073};
74
Herbert Xuf139cfa2008-07-31 12:23:53 +080075static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -070076 struct scatterlist *sg, int blen, int secs)
Herbert Xu6a179442005-06-22 13:29:03 -070077{
Herbert Xu6a179442005-06-22 13:29:03 -070078 unsigned long start, end;
79 int bcount;
80 int ret;
81
Mark Rustad3e3dc252014-07-25 02:53:38 -070082 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xu6a179442005-06-22 13:29:03 -070083 time_before(jiffies, end); bcount++) {
84 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +100085 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -070086 else
Herbert Xucba83562006-08-13 08:26:09 +100087 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -070088
89 if (ret)
90 return ret;
91 }
92
93 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -070094 bcount, secs, (long)bcount * blen);
Herbert Xu6a179442005-06-22 13:29:03 -070095 return 0;
96}
97
Herbert Xuf139cfa2008-07-31 12:23:53 +080098static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
99 struct scatterlist *sg, int blen)
Herbert Xu6a179442005-06-22 13:29:03 -0700100{
Herbert Xu6a179442005-06-22 13:29:03 -0700101 unsigned long cycles = 0;
102 int ret = 0;
103 int i;
104
Herbert Xu6a179442005-06-22 13:29:03 -0700105 local_irq_disable();
106
107 /* Warm-up run. */
108 for (i = 0; i < 4; i++) {
109 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000110 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700111 else
Herbert Xucba83562006-08-13 08:26:09 +1000112 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700113
114 if (ret)
115 goto out;
116 }
117
118 /* The real thing. */
119 for (i = 0; i < 8; i++) {
120 cycles_t start, end;
121
122 start = get_cycles();
123 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000124 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700125 else
Herbert Xucba83562006-08-13 08:26:09 +1000126 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700127 end = get_cycles();
128
129 if (ret)
130 goto out;
131
132 cycles += end - start;
133 }
134
135out:
136 local_irq_enable();
Herbert Xu6a179442005-06-22 13:29:03 -0700137
138 if (ret == 0)
139 printk("1 operation in %lu cycles (%d bytes)\n",
140 (cycles + 4) / 8, blen);
141
142 return ret;
143}
144
Tim Chen53f52d72013-12-11 14:28:47 -0800145static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700146 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800147{
148 unsigned long start, end;
149 int bcount;
150 int ret;
151
Mark Rustad3e3dc252014-07-25 02:53:38 -0700152 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800153 time_before(jiffies, end); bcount++) {
154 if (enc)
155 ret = crypto_aead_encrypt(req);
156 else
157 ret = crypto_aead_decrypt(req);
158
159 if (ret)
160 return ret;
161 }
162
163 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700164 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800165 return 0;
166}
167
168static int test_aead_cycles(struct aead_request *req, int enc, int blen)
169{
170 unsigned long cycles = 0;
171 int ret = 0;
172 int i;
173
174 local_irq_disable();
175
176 /* Warm-up run. */
177 for (i = 0; i < 4; i++) {
178 if (enc)
179 ret = crypto_aead_encrypt(req);
180 else
181 ret = crypto_aead_decrypt(req);
182
183 if (ret)
184 goto out;
185 }
186
187 /* The real thing. */
188 for (i = 0; i < 8; i++) {
189 cycles_t start, end;
190
191 start = get_cycles();
192 if (enc)
193 ret = crypto_aead_encrypt(req);
194 else
195 ret = crypto_aead_decrypt(req);
196 end = get_cycles();
197
198 if (ret)
199 goto out;
200
201 cycles += end - start;
202 }
203
204out:
205 local_irq_enable();
206
207 if (ret == 0)
208 printk("1 operation in %lu cycles (%d bytes)\n",
209 (cycles + 4) / 8, blen);
210
211 return ret;
212}
213
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800214static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
Tim Chen53f52d72013-12-11 14:28:47 -0800215static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
216
217#define XBUFSIZE 8
218#define MAX_IVLEN 32
219
220static int testmgr_alloc_buf(char *buf[XBUFSIZE])
221{
222 int i;
223
224 for (i = 0; i < XBUFSIZE; i++) {
225 buf[i] = (void *)__get_free_page(GFP_KERNEL);
226 if (!buf[i])
227 goto err_free_buf;
228 }
229
230 return 0;
231
232err_free_buf:
233 while (i-- > 0)
234 free_page((unsigned long)buf[i]);
235
236 return -ENOMEM;
237}
238
239static void testmgr_free_buf(char *buf[XBUFSIZE])
240{
241 int i;
242
243 for (i = 0; i < XBUFSIZE; i++)
244 free_page((unsigned long)buf[i]);
245}
246
247static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
248 unsigned int buflen)
249{
250 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
251 int k, rem;
252
Tim Chen53f52d72013-12-11 14:28:47 -0800253 if (np > XBUFSIZE) {
254 rem = PAGE_SIZE;
255 np = XBUFSIZE;
Cristian Stoicac4768992015-01-27 11:54:27 +0200256 } else {
257 rem = buflen % PAGE_SIZE;
Tim Chen53f52d72013-12-11 14:28:47 -0800258 }
Cristian Stoicac4768992015-01-27 11:54:27 +0200259
Tim Chen53f52d72013-12-11 14:28:47 -0800260 sg_init_table(sg, np);
Cristian Stoicac4768992015-01-27 11:54:27 +0200261 np--;
262 for (k = 0; k < np; k++)
263 sg_set_buf(&sg[k], xbuf[k], PAGE_SIZE);
264
265 sg_set_buf(&sg[k], xbuf[k], rem);
Tim Chen53f52d72013-12-11 14:28:47 -0800266}
267
Mark Rustad3e3dc252014-07-25 02:53:38 -0700268static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800269 struct aead_speed_template *template,
270 unsigned int tcount, u8 authsize,
271 unsigned int aad_size, u8 *keysize)
272{
273 unsigned int i, j;
274 struct crypto_aead *tfm;
275 int ret = -ENOMEM;
276 const char *key;
277 struct aead_request *req;
278 struct scatterlist *sg;
279 struct scatterlist *asg;
280 struct scatterlist *sgout;
281 const char *e;
282 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200283 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800284 char *xbuf[XBUFSIZE];
285 char *xoutbuf[XBUFSIZE];
286 char *axbuf[XBUFSIZE];
287 unsigned int *b_size;
288 unsigned int iv_len;
289
Cristian Stoica96692a732015-01-28 13:07:32 +0200290 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
291 if (!iv)
292 return;
293
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200294 if (aad_size >= PAGE_SIZE) {
295 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200296 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200297 }
298
Tim Chen53f52d72013-12-11 14:28:47 -0800299 if (enc == ENCRYPT)
300 e = "encryption";
301 else
302 e = "decryption";
303
304 if (testmgr_alloc_buf(xbuf))
305 goto out_noxbuf;
306 if (testmgr_alloc_buf(axbuf))
307 goto out_noaxbuf;
308 if (testmgr_alloc_buf(xoutbuf))
309 goto out_nooutbuf;
310
311 sg = kmalloc(sizeof(*sg) * 8 * 3, GFP_KERNEL);
312 if (!sg)
313 goto out_nosg;
314 asg = &sg[8];
315 sgout = &asg[8];
316
Tim Chen53f52d72013-12-11 14:28:47 -0800317 tfm = crypto_alloc_aead(algo, 0, 0);
318
319 if (IS_ERR(tfm)) {
320 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
321 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200322 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800323 }
324
Luca Clementi263a8df2014-06-25 22:57:42 -0700325 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
326 get_driver_name(crypto_aead, tfm), e);
327
Tim Chen53f52d72013-12-11 14:28:47 -0800328 req = aead_request_alloc(tfm, GFP_KERNEL);
329 if (!req) {
330 pr_err("alg: aead: Failed to allocate request for %s\n",
331 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200332 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800333 }
334
335 i = 0;
336 do {
337 b_size = aead_sizes;
338 do {
339 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200340 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800341 sg_init_one(&asg[0], assoc, aad_size);
342
343 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
344 pr_err("template (%u) too big for tvmem (%lu)\n",
345 *keysize + *b_size,
346 TVMEMSIZE * PAGE_SIZE);
347 goto out;
348 }
349
350 key = tvmem[0];
351 for (j = 0; j < tcount; j++) {
352 if (template[j].klen == *keysize) {
353 key = template[j].key;
354 break;
355 }
356 }
357 ret = crypto_aead_setkey(tfm, key, *keysize);
358 ret = crypto_aead_setauthsize(tfm, authsize);
359
360 iv_len = crypto_aead_ivsize(tfm);
361 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200362 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800363
364 crypto_aead_clear_flags(tfm, ~0);
365 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
366 i, *keysize * 8, *b_size);
367
368
369 memset(tvmem[0], 0xff, PAGE_SIZE);
370
371 if (ret) {
372 pr_err("setkey() failed flags=%x\n",
373 crypto_aead_get_flags(tfm));
374 goto out;
375 }
376
377 sg_init_aead(&sg[0], xbuf,
378 *b_size + (enc ? authsize : 0));
379
380 sg_init_aead(&sgout[0], xoutbuf,
381 *b_size + (enc ? authsize : 0));
382
383 aead_request_set_crypt(req, sg, sgout, *b_size, iv);
384 aead_request_set_assoc(req, asg, aad_size);
385
Mark Rustad3e3dc252014-07-25 02:53:38 -0700386 if (secs)
387 ret = test_aead_jiffies(req, enc, *b_size,
388 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800389 else
390 ret = test_aead_cycles(req, enc, *b_size);
391
392 if (ret) {
393 pr_err("%s() failed return code=%d\n", e, ret);
394 break;
395 }
396 b_size++;
397 i++;
398 } while (*b_size);
399 keysize++;
400 } while (*keysize);
401
402out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200403 aead_request_free(req);
404out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800405 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200406out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800407 kfree(sg);
408out_nosg:
409 testmgr_free_buf(xoutbuf);
410out_nooutbuf:
411 testmgr_free_buf(axbuf);
412out_noaxbuf:
413 testmgr_free_buf(xbuf);
414out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200415 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800416 return;
417}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800418
Mark Rustad3e3dc252014-07-25 02:53:38 -0700419static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
Herbert Xuda7f0332008-07-31 17:08:25 +0800420 struct cipher_speed_template *template,
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800421 unsigned int tcount, u8 *keysize)
Harald Welteebfd9bc2005-06-22 13:27:23 -0700422{
Herbert Xudce907c2005-06-22 13:27:51 -0700423 unsigned int ret, i, j, iv_len;
David Sterbaf07ef1d2011-03-04 15:28:52 +0800424 const char *key;
425 char iv[128];
Herbert Xucba83562006-08-13 08:26:09 +1000426 struct crypto_blkcipher *tfm;
427 struct blkcipher_desc desc;
428 const char *e;
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800429 u32 *b_size;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700430
431 if (enc == ENCRYPT)
432 e = "encryption";
433 else
434 e = "decryption";
Harald Welteebfd9bc2005-06-22 13:27:23 -0700435
Herbert Xucba83562006-08-13 08:26:09 +1000436 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700437
Herbert Xucba83562006-08-13 08:26:09 +1000438 if (IS_ERR(tfm)) {
439 printk("failed to load transform for %s: %ld\n", algo,
440 PTR_ERR(tfm));
Harald Welteebfd9bc2005-06-22 13:27:23 -0700441 return;
442 }
Herbert Xucba83562006-08-13 08:26:09 +1000443 desc.tfm = tfm;
444 desc.flags = 0;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700445
Luca Clementi263a8df2014-06-25 22:57:42 -0700446 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
447 get_driver_name(crypto_blkcipher, tfm), e);
448
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800449 i = 0;
450 do {
Harald Welteebfd9bc2005-06-22 13:27:23 -0700451
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800452 b_size = block_sizes;
453 do {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800454 struct scatterlist sg[TVMEMSIZE];
Harald Welteebfd9bc2005-06-22 13:27:23 -0700455
Herbert Xuf139cfa2008-07-31 12:23:53 +0800456 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
457 printk("template (%u) too big for "
458 "tvmem (%lu)\n", *keysize + *b_size,
459 TVMEMSIZE * PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800460 goto out;
461 }
Harald Welteebfd9bc2005-06-22 13:27:23 -0700462
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800463 printk("test %u (%d bit key, %d byte blocks): ", i,
464 *keysize * 8, *b_size);
465
Herbert Xuf139cfa2008-07-31 12:23:53 +0800466 memset(tvmem[0], 0xff, PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800467
468 /* set key, plain text and IV */
Herbert Xuda7f0332008-07-31 17:08:25 +0800469 key = tvmem[0];
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800470 for (j = 0; j < tcount; j++) {
471 if (template[j].klen == *keysize) {
472 key = template[j].key;
473 break;
474 }
475 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800476
477 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
478 if (ret) {
479 printk("setkey() failed flags=%x\n",
480 crypto_blkcipher_get_flags(tfm));
481 goto out;
482 }
483
Herbert Xuf139cfa2008-07-31 12:23:53 +0800484 sg_init_table(sg, TVMEMSIZE);
485 sg_set_buf(sg, tvmem[0] + *keysize,
486 PAGE_SIZE - *keysize);
487 for (j = 1; j < TVMEMSIZE; j++) {
488 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
489 memset (tvmem[j], 0xff, PAGE_SIZE);
490 }
491
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800492 iv_len = crypto_blkcipher_ivsize(tfm);
493 if (iv_len) {
494 memset(&iv, 0xff, iv_len);
495 crypto_blkcipher_set_iv(tfm, iv, iv_len);
496 }
497
Mark Rustad3e3dc252014-07-25 02:53:38 -0700498 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800499 ret = test_cipher_jiffies(&desc, enc, sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700500 *b_size, secs);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800501 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800502 ret = test_cipher_cycles(&desc, enc, sg,
503 *b_size);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800504
505 if (ret) {
506 printk("%s() failed flags=%x\n", e, desc.flags);
Herbert Xudce907c2005-06-22 13:27:51 -0700507 break;
508 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800509 b_size++;
510 i++;
511 } while (*b_size);
512 keysize++;
513 } while (*keysize);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700514
515out:
Herbert Xucba83562006-08-13 08:26:09 +1000516 crypto_free_blkcipher(tfm);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700517}
518
Herbert Xuf139cfa2008-07-31 12:23:53 +0800519static int test_hash_jiffies_digest(struct hash_desc *desc,
520 struct scatterlist *sg, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700521 char *out, int secs)
Michal Ludvige8057922006-05-30 22:04:19 +1000522{
Michal Ludvige8057922006-05-30 22:04:19 +1000523 unsigned long start, end;
Herbert Xue9d41162006-08-19 21:38:49 +1000524 int bcount;
525 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000526
Mark Rustad3e3dc252014-07-25 02:53:38 -0700527 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000528 time_before(jiffies, end); bcount++) {
Herbert Xue9d41162006-08-19 21:38:49 +1000529 ret = crypto_hash_digest(desc, sg, blen, out);
530 if (ret)
531 return ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000532 }
533
534 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700535 bcount / secs, ((long)bcount * blen) / secs);
Michal Ludvige8057922006-05-30 22:04:19 +1000536
Herbert Xue9d41162006-08-19 21:38:49 +1000537 return 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000538}
539
Herbert Xuf139cfa2008-07-31 12:23:53 +0800540static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700541 int blen, int plen, char *out, int secs)
Herbert Xue9d41162006-08-19 21:38:49 +1000542{
Herbert Xue9d41162006-08-19 21:38:49 +1000543 unsigned long start, end;
544 int bcount, pcount;
545 int ret;
546
547 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700548 return test_hash_jiffies_digest(desc, sg, blen, out, secs);
Herbert Xua5a613a2007-10-27 00:51:21 -0700549
Mark Rustad3e3dc252014-07-25 02:53:38 -0700550 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xue9d41162006-08-19 21:38:49 +1000551 time_before(jiffies, end); bcount++) {
552 ret = crypto_hash_init(desc);
553 if (ret)
554 return ret;
555 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000556 ret = crypto_hash_update(desc, sg, plen);
557 if (ret)
558 return ret;
559 }
560 /* we assume there is enough space in 'out' for the result */
561 ret = crypto_hash_final(desc, out);
562 if (ret)
563 return ret;
564 }
565
566 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700567 bcount / secs, ((long)bcount * blen) / secs);
Herbert Xue9d41162006-08-19 21:38:49 +1000568
569 return 0;
570}
571
Herbert Xuf139cfa2008-07-31 12:23:53 +0800572static int test_hash_cycles_digest(struct hash_desc *desc,
573 struct scatterlist *sg, int blen, char *out)
Michal Ludvige8057922006-05-30 22:04:19 +1000574{
Michal Ludvige8057922006-05-30 22:04:19 +1000575 unsigned long cycles = 0;
Herbert Xue9d41162006-08-19 21:38:49 +1000576 int i;
577 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000578
Michal Ludvige8057922006-05-30 22:04:19 +1000579 local_irq_disable();
580
581 /* Warm-up run. */
582 for (i = 0; i < 4; i++) {
Herbert Xue9d41162006-08-19 21:38:49 +1000583 ret = crypto_hash_digest(desc, sg, blen, out);
584 if (ret)
585 goto out;
Michal Ludvige8057922006-05-30 22:04:19 +1000586 }
587
588 /* The real thing. */
589 for (i = 0; i < 8; i++) {
590 cycles_t start, end;
591
Michal Ludvige8057922006-05-30 22:04:19 +1000592 start = get_cycles();
593
Herbert Xue9d41162006-08-19 21:38:49 +1000594 ret = crypto_hash_digest(desc, sg, blen, out);
595 if (ret)
596 goto out;
Michal Ludvige8057922006-05-30 22:04:19 +1000597
598 end = get_cycles();
599
600 cycles += end - start;
601 }
602
Herbert Xue9d41162006-08-19 21:38:49 +1000603out:
Michal Ludvige8057922006-05-30 22:04:19 +1000604 local_irq_enable();
Michal Ludvige8057922006-05-30 22:04:19 +1000605
Herbert Xue9d41162006-08-19 21:38:49 +1000606 if (ret)
607 return ret;
608
Michal Ludvige8057922006-05-30 22:04:19 +1000609 printk("%6lu cycles/operation, %4lu cycles/byte\n",
610 cycles / 8, cycles / (8 * blen));
611
Herbert Xue9d41162006-08-19 21:38:49 +1000612 return 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000613}
614
Herbert Xuf139cfa2008-07-31 12:23:53 +0800615static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
616 int blen, int plen, char *out)
Michal Ludvige8057922006-05-30 22:04:19 +1000617{
Herbert Xue9d41162006-08-19 21:38:49 +1000618 unsigned long cycles = 0;
619 int i, pcount;
620 int ret;
621
622 if (plen == blen)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800623 return test_hash_cycles_digest(desc, sg, blen, out);
Herbert Xua5a613a2007-10-27 00:51:21 -0700624
Herbert Xue9d41162006-08-19 21:38:49 +1000625 local_irq_disable();
626
627 /* Warm-up run. */
628 for (i = 0; i < 4; i++) {
629 ret = crypto_hash_init(desc);
630 if (ret)
631 goto out;
632 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000633 ret = crypto_hash_update(desc, sg, plen);
634 if (ret)
635 goto out;
636 }
Herbert Xu29059d12007-05-18 16:25:19 +1000637 ret = crypto_hash_final(desc, out);
Herbert Xue9d41162006-08-19 21:38:49 +1000638 if (ret)
639 goto out;
640 }
641
642 /* The real thing. */
643 for (i = 0; i < 8; i++) {
644 cycles_t start, end;
645
646 start = get_cycles();
647
648 ret = crypto_hash_init(desc);
649 if (ret)
650 goto out;
651 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000652 ret = crypto_hash_update(desc, sg, plen);
653 if (ret)
654 goto out;
655 }
656 ret = crypto_hash_final(desc, out);
657 if (ret)
658 goto out;
659
660 end = get_cycles();
661
662 cycles += end - start;
663 }
664
665out:
666 local_irq_enable();
Herbert Xue9d41162006-08-19 21:38:49 +1000667
668 if (ret)
669 return ret;
670
671 printk("%6lu cycles/operation, %4lu cycles/byte\n",
672 cycles / 8, cycles / (8 * blen));
673
674 return 0;
675}
676
David S. Millerbeb63da2010-05-19 14:11:21 +1000677static void test_hash_sg_init(struct scatterlist *sg)
678{
679 int i;
680
681 sg_init_table(sg, TVMEMSIZE);
682 for (i = 0; i < TVMEMSIZE; i++) {
683 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
684 memset(tvmem[i], 0xff, PAGE_SIZE);
685 }
686}
687
Mark Rustad3e3dc252014-07-25 02:53:38 -0700688static void test_hash_speed(const char *algo, unsigned int secs,
Herbert Xu01b32322008-07-31 15:41:55 +0800689 struct hash_speed *speed)
Herbert Xue9d41162006-08-19 21:38:49 +1000690{
Herbert Xuf139cfa2008-07-31 12:23:53 +0800691 struct scatterlist sg[TVMEMSIZE];
Herbert Xue9d41162006-08-19 21:38:49 +1000692 struct crypto_hash *tfm;
693 struct hash_desc desc;
Frank Seidel376bacb2009-03-29 15:18:39 +0800694 static char output[1024];
Michal Ludvige8057922006-05-30 22:04:19 +1000695 int i;
Herbert Xue9d41162006-08-19 21:38:49 +1000696 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000697
Herbert Xue9d41162006-08-19 21:38:49 +1000698 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
Michal Ludvige8057922006-05-30 22:04:19 +1000699
Herbert Xue9d41162006-08-19 21:38:49 +1000700 if (IS_ERR(tfm)) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800701 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
Herbert Xue9d41162006-08-19 21:38:49 +1000702 PTR_ERR(tfm));
Michal Ludvige8057922006-05-30 22:04:19 +1000703 return;
704 }
705
Luca Clementi263a8df2014-06-25 22:57:42 -0700706 printk(KERN_INFO "\ntesting speed of %s (%s)\n", algo,
707 get_driver_name(crypto_hash, tfm));
708
Herbert Xue9d41162006-08-19 21:38:49 +1000709 desc.tfm = tfm;
710 desc.flags = 0;
711
712 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800713 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
Herbert Xue9d41162006-08-19 21:38:49 +1000714 crypto_hash_digestsize(tfm), sizeof(output));
Michal Ludvige8057922006-05-30 22:04:19 +1000715 goto out;
716 }
717
David S. Millerbeb63da2010-05-19 14:11:21 +1000718 test_hash_sg_init(sg);
Michal Ludvige8057922006-05-30 22:04:19 +1000719 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800720 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800721 printk(KERN_ERR
722 "template (%u) too big for tvmem (%lu)\n",
Herbert Xuf139cfa2008-07-31 12:23:53 +0800723 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
Michal Ludvige8057922006-05-30 22:04:19 +1000724 goto out;
725 }
726
Huang Ying18bcc912010-03-10 18:30:32 +0800727 if (speed[i].klen)
728 crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
729
Frank Seidel376bacb2009-03-29 15:18:39 +0800730 printk(KERN_INFO "test%3u "
731 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Michal Ludvige8057922006-05-30 22:04:19 +1000732 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
733
Mark Rustad3e3dc252014-07-25 02:53:38 -0700734 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800735 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700736 speed[i].plen, output, secs);
Michal Ludvige8057922006-05-30 22:04:19 +1000737 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800738 ret = test_hash_cycles(&desc, sg, speed[i].blen,
Herbert Xue9d41162006-08-19 21:38:49 +1000739 speed[i].plen, output);
740
741 if (ret) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800742 printk(KERN_ERR "hashing failed ret=%d\n", ret);
Herbert Xue9d41162006-08-19 21:38:49 +1000743 break;
744 }
Michal Ludvige8057922006-05-30 22:04:19 +1000745 }
746
747out:
Herbert Xue9d41162006-08-19 21:38:49 +1000748 crypto_free_hash(tfm);
Michal Ludvige8057922006-05-30 22:04:19 +1000749}
750
David S. Millerbeb63da2010-05-19 14:11:21 +1000751struct tcrypt_result {
752 struct completion completion;
753 int err;
754};
755
756static void tcrypt_complete(struct crypto_async_request *req, int err)
757{
758 struct tcrypt_result *res = req->data;
759
760 if (err == -EINPROGRESS)
761 return;
762
763 res->err = err;
764 complete(&res->completion);
765}
766
767static inline int do_one_ahash_op(struct ahash_request *req, int ret)
768{
769 if (ret == -EINPROGRESS || ret == -EBUSY) {
770 struct tcrypt_result *tr = req->base.data;
771
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100772 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800773 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100774 ret = tr->err;
David S. Millerbeb63da2010-05-19 14:11:21 +1000775 }
776 return ret;
777}
778
779static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700780 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000781{
782 unsigned long start, end;
783 int bcount;
784 int ret;
785
Mark Rustad3e3dc252014-07-25 02:53:38 -0700786 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000787 time_before(jiffies, end); bcount++) {
788 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
789 if (ret)
790 return ret;
791 }
792
793 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700794 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000795
796 return 0;
797}
798
799static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700800 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000801{
802 unsigned long start, end;
803 int bcount, pcount;
804 int ret;
805
806 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700807 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000808
Mark Rustad3e3dc252014-07-25 02:53:38 -0700809 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000810 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800811 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000812 if (ret)
813 return ret;
814 for (pcount = 0; pcount < blen; pcount += plen) {
815 ret = do_one_ahash_op(req, crypto_ahash_update(req));
816 if (ret)
817 return ret;
818 }
819 /* we assume there is enough space in 'out' for the result */
820 ret = do_one_ahash_op(req, crypto_ahash_final(req));
821 if (ret)
822 return ret;
823 }
824
825 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700826 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000827
828 return 0;
829}
830
831static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
832 char *out)
833{
834 unsigned long cycles = 0;
835 int ret, i;
836
837 /* Warm-up run. */
838 for (i = 0; i < 4; i++) {
839 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
840 if (ret)
841 goto out;
842 }
843
844 /* The real thing. */
845 for (i = 0; i < 8; i++) {
846 cycles_t start, end;
847
848 start = get_cycles();
849
850 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
851 if (ret)
852 goto out;
853
854 end = get_cycles();
855
856 cycles += end - start;
857 }
858
859out:
860 if (ret)
861 return ret;
862
863 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
864 cycles / 8, cycles / (8 * blen));
865
866 return 0;
867}
868
869static int test_ahash_cycles(struct ahash_request *req, int blen,
870 int plen, char *out)
871{
872 unsigned long cycles = 0;
873 int i, pcount, ret;
874
875 if (plen == blen)
876 return test_ahash_cycles_digest(req, blen, out);
877
878 /* Warm-up run. */
879 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800880 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000881 if (ret)
882 goto out;
883 for (pcount = 0; pcount < blen; pcount += plen) {
884 ret = do_one_ahash_op(req, crypto_ahash_update(req));
885 if (ret)
886 goto out;
887 }
888 ret = do_one_ahash_op(req, crypto_ahash_final(req));
889 if (ret)
890 goto out;
891 }
892
893 /* The real thing. */
894 for (i = 0; i < 8; i++) {
895 cycles_t start, end;
896
897 start = get_cycles();
898
Herbert Xu43a96072015-04-22 11:02:27 +0800899 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000900 if (ret)
901 goto out;
902 for (pcount = 0; pcount < blen; pcount += plen) {
903 ret = do_one_ahash_op(req, crypto_ahash_update(req));
904 if (ret)
905 goto out;
906 }
907 ret = do_one_ahash_op(req, crypto_ahash_final(req));
908 if (ret)
909 goto out;
910
911 end = get_cycles();
912
913 cycles += end - start;
914 }
915
916out:
917 if (ret)
918 return ret;
919
920 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
921 cycles / 8, cycles / (8 * blen));
922
923 return 0;
924}
925
Mark Rustad3e3dc252014-07-25 02:53:38 -0700926static void test_ahash_speed(const char *algo, unsigned int secs,
David S. Millerbeb63da2010-05-19 14:11:21 +1000927 struct hash_speed *speed)
928{
929 struct scatterlist sg[TVMEMSIZE];
930 struct tcrypt_result tresult;
931 struct ahash_request *req;
932 struct crypto_ahash *tfm;
933 static char output[1024];
934 int i, ret;
935
David S. Millerbeb63da2010-05-19 14:11:21 +1000936 tfm = crypto_alloc_ahash(algo, 0, 0);
937 if (IS_ERR(tfm)) {
938 pr_err("failed to load transform for %s: %ld\n",
939 algo, PTR_ERR(tfm));
940 return;
941 }
942
Luca Clementi263a8df2014-06-25 22:57:42 -0700943 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
944 get_driver_name(crypto_ahash, tfm));
945
David S. Millerbeb63da2010-05-19 14:11:21 +1000946 if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
947 pr_err("digestsize(%u) > outputbuffer(%zu)\n",
948 crypto_ahash_digestsize(tfm), sizeof(output));
949 goto out;
950 }
951
952 test_hash_sg_init(sg);
953 req = ahash_request_alloc(tfm, GFP_KERNEL);
954 if (!req) {
955 pr_err("ahash request allocation failure\n");
956 goto out;
957 }
958
959 init_completion(&tresult.completion);
960 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
961 tcrypt_complete, &tresult);
962
963 for (i = 0; speed[i].blen != 0; i++) {
964 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
965 pr_err("template (%u) too big for tvmem (%lu)\n",
966 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
967 break;
968 }
969
970 pr_info("test%3u "
971 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
972 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
973
974 ahash_request_set_crypt(req, sg, output, speed[i].plen);
975
Mark Rustad3e3dc252014-07-25 02:53:38 -0700976 if (secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000977 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700978 speed[i].plen, output, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000979 else
980 ret = test_ahash_cycles(req, speed[i].blen,
981 speed[i].plen, output);
982
983 if (ret) {
984 pr_err("hashing failed ret=%d\n", ret);
985 break;
986 }
987 }
988
989 ahash_request_free(req);
990
991out:
992 crypto_free_ahash(tfm);
993}
994
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300995static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret)
996{
997 if (ret == -EINPROGRESS || ret == -EBUSY) {
998 struct tcrypt_result *tr = req->base.data;
999
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001000 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -08001001 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001002 ret = tr->err;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001003 }
1004
1005 return ret;
1006}
1007
1008static int test_acipher_jiffies(struct ablkcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001009 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001010{
1011 unsigned long start, end;
1012 int bcount;
1013 int ret;
1014
Mark Rustad3e3dc252014-07-25 02:53:38 -07001015 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001016 time_before(jiffies, end); bcount++) {
1017 if (enc)
1018 ret = do_one_acipher_op(req,
1019 crypto_ablkcipher_encrypt(req));
1020 else
1021 ret = do_one_acipher_op(req,
1022 crypto_ablkcipher_decrypt(req));
1023
1024 if (ret)
1025 return ret;
1026 }
1027
1028 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001029 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001030 return 0;
1031}
1032
1033static int test_acipher_cycles(struct ablkcipher_request *req, int enc,
1034 int blen)
1035{
1036 unsigned long cycles = 0;
1037 int ret = 0;
1038 int i;
1039
1040 /* Warm-up run. */
1041 for (i = 0; i < 4; i++) {
1042 if (enc)
1043 ret = do_one_acipher_op(req,
1044 crypto_ablkcipher_encrypt(req));
1045 else
1046 ret = do_one_acipher_op(req,
1047 crypto_ablkcipher_decrypt(req));
1048
1049 if (ret)
1050 goto out;
1051 }
1052
1053 /* The real thing. */
1054 for (i = 0; i < 8; i++) {
1055 cycles_t start, end;
1056
1057 start = get_cycles();
1058 if (enc)
1059 ret = do_one_acipher_op(req,
1060 crypto_ablkcipher_encrypt(req));
1061 else
1062 ret = do_one_acipher_op(req,
1063 crypto_ablkcipher_decrypt(req));
1064 end = get_cycles();
1065
1066 if (ret)
1067 goto out;
1068
1069 cycles += end - start;
1070 }
1071
1072out:
1073 if (ret == 0)
1074 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1075 (cycles + 4) / 8, blen);
1076
1077 return ret;
1078}
1079
Mark Rustad3e3dc252014-07-25 02:53:38 -07001080static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001081 struct cipher_speed_template *template,
1082 unsigned int tcount, u8 *keysize)
1083{
Nicolas Royerde1975332012-07-01 19:19:47 +02001084 unsigned int ret, i, j, k, iv_len;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001085 struct tcrypt_result tresult;
1086 const char *key;
1087 char iv[128];
1088 struct ablkcipher_request *req;
1089 struct crypto_ablkcipher *tfm;
1090 const char *e;
1091 u32 *b_size;
1092
1093 if (enc == ENCRYPT)
1094 e = "encryption";
1095 else
1096 e = "decryption";
1097
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001098 init_completion(&tresult.completion);
1099
1100 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
1101
1102 if (IS_ERR(tfm)) {
1103 pr_err("failed to load transform for %s: %ld\n", algo,
1104 PTR_ERR(tfm));
1105 return;
1106 }
1107
Luca Clementi263a8df2014-06-25 22:57:42 -07001108 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
1109 get_driver_name(crypto_ablkcipher, tfm), e);
1110
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001111 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1112 if (!req) {
1113 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1114 algo);
1115 goto out;
1116 }
1117
1118 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1119 tcrypt_complete, &tresult);
1120
1121 i = 0;
1122 do {
1123 b_size = block_sizes;
1124
1125 do {
1126 struct scatterlist sg[TVMEMSIZE];
1127
1128 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1129 pr_err("template (%u) too big for "
1130 "tvmem (%lu)\n", *keysize + *b_size,
1131 TVMEMSIZE * PAGE_SIZE);
1132 goto out_free_req;
1133 }
1134
1135 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1136 *keysize * 8, *b_size);
1137
1138 memset(tvmem[0], 0xff, PAGE_SIZE);
1139
1140 /* set key, plain text and IV */
1141 key = tvmem[0];
1142 for (j = 0; j < tcount; j++) {
1143 if (template[j].klen == *keysize) {
1144 key = template[j].key;
1145 break;
1146 }
1147 }
1148
1149 crypto_ablkcipher_clear_flags(tfm, ~0);
1150
1151 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1152 if (ret) {
1153 pr_err("setkey() failed flags=%x\n",
1154 crypto_ablkcipher_get_flags(tfm));
1155 goto out_free_req;
1156 }
1157
Nicolas Royerde1975332012-07-01 19:19:47 +02001158 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001159 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1160
Nicolas Royerde1975332012-07-01 19:19:47 +02001161 if (k > PAGE_SIZE) {
1162 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001163 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001164 k -= PAGE_SIZE;
1165 j = 1;
1166 while (k > PAGE_SIZE) {
1167 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1168 memset(tvmem[j], 0xff, PAGE_SIZE);
1169 j++;
1170 k -= PAGE_SIZE;
1171 }
1172 sg_set_buf(sg + j, tvmem[j], k);
1173 memset(tvmem[j], 0xff, k);
1174 } else {
1175 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001176 }
1177
1178 iv_len = crypto_ablkcipher_ivsize(tfm);
1179 if (iv_len)
1180 memset(&iv, 0xff, iv_len);
1181
1182 ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv);
1183
Mark Rustad3e3dc252014-07-25 02:53:38 -07001184 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001185 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001186 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001187 else
1188 ret = test_acipher_cycles(req, enc,
1189 *b_size);
1190
1191 if (ret) {
1192 pr_err("%s() failed flags=%x\n", e,
1193 crypto_ablkcipher_get_flags(tfm));
1194 break;
1195 }
1196 b_size++;
1197 i++;
1198 } while (*b_size);
1199 keysize++;
1200 } while (*keysize);
1201
1202out_free_req:
1203 ablkcipher_request_free(req);
1204out:
1205 crypto_free_ablkcipher(tfm);
1206}
1207
Herbert Xuef2736f2005-06-22 13:26:03 -07001208static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001209{
1210 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001211
Linus Torvalds1da177e2005-04-16 15:20:36 -07001212 while (*name) {
1213 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001214 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001215 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001216 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001217 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001218}
1219
Herbert Xu01b32322008-07-31 15:41:55 +08001220static inline int tcrypt_test(const char *alg)
1221{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001222 int ret;
1223
1224 ret = alg_test(alg, alg, 0, 0);
1225 /* non-fips algs return -EINVAL in fips mode */
1226 if (fips_enabled && ret == -EINVAL)
1227 ret = 0;
1228 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001229}
1230
Herbert Xu86068132014-12-04 16:43:29 +08001231static int do_test(const char *alg, u32 type, u32 mask, int m)
Herbert Xu01b32322008-07-31 15:41:55 +08001232{
1233 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001234 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001235
1236 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001237 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001238 if (alg) {
1239 if (!crypto_has_alg(alg, type,
1240 mask ?: CRYPTO_ALG_TYPE_MASK))
1241 ret = -ENOENT;
1242 break;
1243 }
1244
Herbert Xu01b32322008-07-31 15:41:55 +08001245 for (i = 1; i < 200; i++)
Herbert Xu86068132014-12-04 16:43:29 +08001246 ret += do_test(NULL, 0, 0, i);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001247 break;
1248
1249 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001250 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001251 break;
1252
1253 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001254 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001255 break;
1256
1257 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001258 ret += tcrypt_test("ecb(des)");
1259 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001260 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001261 break;
1262
1263 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001264 ret += tcrypt_test("ecb(des3_ede)");
1265 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001266 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001267 break;
1268
1269 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001270 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001271 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001272
Linus Torvalds1da177e2005-04-16 15:20:36 -07001273 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001274 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001275 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001276
Linus Torvalds1da177e2005-04-16 15:20:36 -07001277 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001278 ret += tcrypt_test("ecb(blowfish)");
1279 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001280 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001281 break;
1282
1283 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001284 ret += tcrypt_test("ecb(twofish)");
1285 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001286 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001287 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001288 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001289 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001290
Linus Torvalds1da177e2005-04-16 15:20:36 -07001291 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001292 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001293 ret += tcrypt_test("cbc(serpent)");
1294 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001295 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001296 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001297 break;
1298
1299 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001300 ret += tcrypt_test("ecb(aes)");
1301 ret += tcrypt_test("cbc(aes)");
1302 ret += tcrypt_test("lrw(aes)");
1303 ret += tcrypt_test("xts(aes)");
1304 ret += tcrypt_test("ctr(aes)");
1305 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001306 break;
1307
1308 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001309 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001310 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001311
Linus Torvalds1da177e2005-04-16 15:20:36 -07001312 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001313 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001314 break;
1315
1316 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001317 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001318 break;
1319
1320 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001321 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001322 ret += tcrypt_test("cbc(cast5)");
1323 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001324 break;
1325
1326 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001327 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001328 ret += tcrypt_test("cbc(cast6)");
1329 ret += tcrypt_test("ctr(cast6)");
1330 ret += tcrypt_test("lrw(cast6)");
1331 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001332 break;
1333
1334 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001335 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001336 break;
1337
1338 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001339 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001340 break;
1341
1342 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001343 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001344 break;
1345
1346 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001347 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001348 break;
1349
1350 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001351 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001352 break;
1353
1354 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001355 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001356 break;
1357
1358 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001359 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001360 break;
1361
1362 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001363 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001364 break;
1365
1366 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001367 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001368 break;
1369
1370 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001371 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001372 break;
1373
1374 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001375 ret += tcrypt_test("ecb(anubis)");
1376 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001377 break;
1378
1379 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001380 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001381 break;
1382
1383 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001384 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001385 break;
1386
1387 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001388 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001389 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001390
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001391 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001392 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001393 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001394
David Howells90831632006-12-16 12:13:14 +11001395 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001396 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001397 break;
1398
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001399 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001400 ret += tcrypt_test("ecb(camellia)");
1401 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001402 ret += tcrypt_test("ctr(camellia)");
1403 ret += tcrypt_test("lrw(camellia)");
1404 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001405 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001406
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001407 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001408 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001409 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001410
Tan Swee Heng2407d602007-11-23 19:45:00 +08001411 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001412 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001413 break;
1414
Herbert Xu8df213d2007-12-02 14:55:47 +11001415 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001416 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001417 break;
1418
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001419 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001420 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001421 break;
1422
Joy Latten93cc74e2007-12-12 20:24:22 +08001423 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001424 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001425 break;
1426
Kevin Coffman76cb9522008-03-24 21:26:16 +08001427 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001428 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001429 break;
1430
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001431 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001432 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001433 break;
1434
1435 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001436 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001437 break;
1438
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001439 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001440 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001441 break;
1442
1443 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001444 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001445 break;
1446
1447 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001448 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001449 break;
1450
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001451 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001452 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001453 break;
1454
Jarod Wilson5d667322009-05-04 19:23:40 +08001455 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001456 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001457 break;
1458
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001459 case 46:
1460 ret += tcrypt_test("ghash");
1461 break;
1462
Herbert Xu684115212013-09-07 12:56:26 +10001463 case 47:
1464 ret += tcrypt_test("crct10dif");
1465 break;
1466
Linus Torvalds1da177e2005-04-16 15:20:36 -07001467 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001468 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001469 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001470
Linus Torvalds1da177e2005-04-16 15:20:36 -07001471 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001472 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001473 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001474
Linus Torvalds1da177e2005-04-16 15:20:36 -07001475 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001476 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001477 break;
1478
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001479 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001480 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001481 break;
1482
1483 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001484 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001485 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001486
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001487 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001488 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001489 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001490
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001491 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001492 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001493 break;
1494
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001495 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001496 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001497 break;
1498
1499 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001500 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001501 break;
1502
Shane Wangf1939f72009-09-02 20:05:22 +10001503 case 109:
1504 ret += tcrypt_test("vmac(aes)");
1505 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001506
Sonic Zhanga482b082012-05-25 17:54:13 +08001507 case 110:
1508 ret += tcrypt_test("hmac(crc32)");
1509 break;
Shane Wangf1939f72009-09-02 20:05:22 +10001510
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001511 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001512 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001513 break;
1514
Adrian Hoban69435b92010-11-04 15:02:04 -04001515 case 151:
1516 ret += tcrypt_test("rfc4106(gcm(aes))");
1517 break;
1518
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001519 case 152:
1520 ret += tcrypt_test("rfc4543(gcm(aes))");
1521 break;
1522
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001523 case 153:
1524 ret += tcrypt_test("cmac(aes)");
1525 break;
1526
1527 case 154:
1528 ret += tcrypt_test("cmac(des3_ede)");
1529 break;
1530
Horia Geantabbf9c892013-11-28 15:11:16 +02001531 case 155:
1532 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1533 break;
1534
Horia Geantabca4feb2014-03-14 17:46:51 +02001535 case 156:
1536 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1537 break;
1538
1539 case 157:
1540 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1541 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301542 case 181:
1543 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1544 break;
1545 case 182:
1546 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1547 break;
1548 case 183:
1549 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
1550 break;
1551 case 184:
1552 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
1553 break;
1554 case 185:
1555 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
1556 break;
1557 case 186:
1558 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
1559 break;
1560 case 187:
1561 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
1562 break;
1563 case 188:
1564 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
1565 break;
1566 case 189:
1567 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
1568 break;
1569 case 190:
1570 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1571 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07001572 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10001573 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001574 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001575 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001576 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001577 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001578 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001579 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001580 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11001581 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001582 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11001583 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001584 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08001585 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001586 speed_template_32_48_64);
Rik Snelf19f5112007-09-19 20:23:13 +08001587 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001588 speed_template_32_48_64);
Jan Glauber9996e342011-04-26 16:34:01 +10001589 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1590 speed_template_16_24_32);
1591 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1592 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001593 break;
1594
1595 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10001596 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001597 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001598 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001599 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001600 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001601 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001602 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001603 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001604 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001605 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001606 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001607 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03001608 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
1609 des3_speed_template, DES3_SPEED_VECTORS,
1610 speed_template_24);
1611 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
1612 des3_speed_template, DES3_SPEED_VECTORS,
1613 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001614 break;
1615
1616 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10001617 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001618 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001619 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001620 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001621 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001622 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001623 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001624 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03001625 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1626 speed_template_16_24_32);
1627 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1628 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001629 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1630 speed_template_32_40_48);
1631 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1632 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001633 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1634 speed_template_32_48_64);
1635 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1636 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001637 break;
1638
1639 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10001640 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001641 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001642 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001643 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001644 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001645 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001646 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001647 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03001648 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1649 speed_template_8_32);
1650 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1651 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001652 break;
1653
1654 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10001655 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001656 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001657 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001658 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001659 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001660 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001661 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001662 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001663 break;
1664
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001665 case 205:
1666 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001667 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001668 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001669 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001670 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001671 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001672 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001673 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02001674 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1675 speed_template_16_24_32);
1676 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1677 speed_template_16_24_32);
1678 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1679 speed_template_32_40_48);
1680 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1681 speed_template_32_40_48);
1682 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1683 speed_template_32_48_64);
1684 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1685 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001686 break;
1687
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001688 case 206:
1689 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001690 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001691 break;
1692
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001693 case 207:
1694 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1695 speed_template_16_32);
1696 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1697 speed_template_16_32);
1698 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1699 speed_template_16_32);
1700 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1701 speed_template_16_32);
1702 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1703 speed_template_16_32);
1704 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1705 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001706 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1707 speed_template_32_48);
1708 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1709 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001710 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1711 speed_template_32_64);
1712 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1713 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001714 break;
1715
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001716 case 208:
1717 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1718 speed_template_8);
1719 break;
1720
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001721 case 209:
1722 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1723 speed_template_8_16);
1724 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1725 speed_template_8_16);
1726 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1727 speed_template_8_16);
1728 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1729 speed_template_8_16);
1730 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1731 speed_template_8_16);
1732 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1733 speed_template_8_16);
1734 break;
1735
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001736 case 210:
1737 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1738 speed_template_16_32);
1739 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1740 speed_template_16_32);
1741 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1742 speed_template_16_32);
1743 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1744 speed_template_16_32);
1745 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1746 speed_template_16_32);
1747 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1748 speed_template_16_32);
1749 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1750 speed_template_32_48);
1751 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1752 speed_template_32_48);
1753 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1754 speed_template_32_64);
1755 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1756 speed_template_32_64);
1757 break;
1758
Tim Chen53f52d72013-12-11 14:28:47 -08001759 case 211:
1760 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
1761 NULL, 0, 16, 8, aead_speed_template_20);
1762 break;
1763
Michal Ludvige8057922006-05-30 22:04:19 +10001764 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08001765 if (alg) {
1766 test_hash_speed(alg, sec, generic_hash_speed_template);
1767 break;
1768 }
1769
Michal Ludvige8057922006-05-30 22:04:19 +10001770 /* fall through */
1771
1772 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10001773 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001774 if (mode > 300 && mode < 400) break;
1775
1776 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10001777 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001778 if (mode > 300 && mode < 400) break;
1779
1780 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10001781 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001782 if (mode > 300 && mode < 400) break;
1783
1784 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10001785 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001786 if (mode > 300 && mode < 400) break;
1787
1788 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10001789 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001790 if (mode > 300 && mode < 400) break;
1791
1792 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10001793 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001794 if (mode > 300 && mode < 400) break;
1795
1796 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10001797 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001798 if (mode > 300 && mode < 400) break;
1799
1800 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10001801 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001802 if (mode > 300 && mode < 400) break;
1803
1804 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10001805 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001806 if (mode > 300 && mode < 400) break;
1807
1808 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10001809 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001810 if (mode > 300 && mode < 400) break;
1811
1812 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10001813 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001814 if (mode > 300 && mode < 400) break;
1815
1816 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10001817 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001818 if (mode > 300 && mode < 400) break;
1819
Jonathan Lynchcd12fb902007-11-10 20:08:25 +08001820 case 313:
1821 test_hash_speed("sha224", sec, generic_hash_speed_template);
1822 if (mode > 300 && mode < 400) break;
1823
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001824 case 314:
1825 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1826 if (mode > 300 && mode < 400) break;
1827
1828 case 315:
1829 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1830 if (mode > 300 && mode < 400) break;
1831
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001832 case 316:
1833 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1834 if (mode > 300 && mode < 400) break;
1835
1836 case 317:
1837 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1838 if (mode > 300 && mode < 400) break;
1839
Huang Ying18bcc912010-03-10 18:30:32 +08001840 case 318:
1841 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1842 if (mode > 300 && mode < 400) break;
1843
Tim Chene3899e42012-09-27 15:44:24 -07001844 case 319:
1845 test_hash_speed("crc32c", sec, generic_hash_speed_template);
1846 if (mode > 300 && mode < 400) break;
1847
Herbert Xu684115212013-09-07 12:56:26 +10001848 case 320:
1849 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
1850 if (mode > 300 && mode < 400) break;
1851
Michal Ludvige8057922006-05-30 22:04:19 +10001852 case 399:
1853 break;
1854
David S. Millerbeb63da2010-05-19 14:11:21 +10001855 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08001856 if (alg) {
1857 test_ahash_speed(alg, sec, generic_hash_speed_template);
1858 break;
1859 }
1860
David S. Millerbeb63da2010-05-19 14:11:21 +10001861 /* fall through */
1862
1863 case 401:
1864 test_ahash_speed("md4", sec, generic_hash_speed_template);
1865 if (mode > 400 && mode < 500) break;
1866
1867 case 402:
1868 test_ahash_speed("md5", sec, generic_hash_speed_template);
1869 if (mode > 400 && mode < 500) break;
1870
1871 case 403:
1872 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1873 if (mode > 400 && mode < 500) break;
1874
1875 case 404:
1876 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1877 if (mode > 400 && mode < 500) break;
1878
1879 case 405:
1880 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1881 if (mode > 400 && mode < 500) break;
1882
1883 case 406:
1884 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1885 if (mode > 400 && mode < 500) break;
1886
1887 case 407:
1888 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1889 if (mode > 400 && mode < 500) break;
1890
1891 case 408:
1892 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1893 if (mode > 400 && mode < 500) break;
1894
1895 case 409:
1896 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1897 if (mode > 400 && mode < 500) break;
1898
1899 case 410:
1900 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1901 if (mode > 400 && mode < 500) break;
1902
1903 case 411:
1904 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1905 if (mode > 400 && mode < 500) break;
1906
1907 case 412:
1908 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1909 if (mode > 400 && mode < 500) break;
1910
1911 case 413:
1912 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1913 if (mode > 400 && mode < 500) break;
1914
1915 case 414:
1916 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1917 if (mode > 400 && mode < 500) break;
1918
1919 case 415:
1920 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1921 if (mode > 400 && mode < 500) break;
1922
1923 case 416:
1924 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1925 if (mode > 400 && mode < 500) break;
1926
1927 case 417:
1928 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1929 if (mode > 400 && mode < 500) break;
1930
1931 case 499:
1932 break;
1933
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001934 case 500:
1935 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1936 speed_template_16_24_32);
1937 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1938 speed_template_16_24_32);
1939 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1940 speed_template_16_24_32);
1941 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1942 speed_template_16_24_32);
1943 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1944 speed_template_32_40_48);
1945 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1946 speed_template_32_40_48);
1947 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1948 speed_template_32_48_64);
1949 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1950 speed_template_32_48_64);
1951 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1952 speed_template_16_24_32);
1953 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1954 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02001955 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
1956 speed_template_16_24_32);
1957 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
1958 speed_template_16_24_32);
1959 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
1960 speed_template_16_24_32);
1961 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1962 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02001963 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1964 speed_template_20_28_36);
1965 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1966 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001967 break;
1968
1969 case 501:
1970 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1971 des3_speed_template, DES3_SPEED_VECTORS,
1972 speed_template_24);
1973 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
1974 des3_speed_template, DES3_SPEED_VECTORS,
1975 speed_template_24);
1976 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1977 des3_speed_template, DES3_SPEED_VECTORS,
1978 speed_template_24);
1979 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
1980 des3_speed_template, DES3_SPEED_VECTORS,
1981 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02001982 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
1983 des3_speed_template, DES3_SPEED_VECTORS,
1984 speed_template_24);
1985 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
1986 des3_speed_template, DES3_SPEED_VECTORS,
1987 speed_template_24);
1988 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
1989 des3_speed_template, DES3_SPEED_VECTORS,
1990 speed_template_24);
1991 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
1992 des3_speed_template, DES3_SPEED_VECTORS,
1993 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001994 break;
1995
1996 case 502:
1997 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1998 speed_template_8);
1999 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2000 speed_template_8);
2001 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2002 speed_template_8);
2003 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2004 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002005 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2006 speed_template_8);
2007 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2008 speed_template_8);
2009 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2010 speed_template_8);
2011 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2012 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002013 break;
2014
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002015 case 503:
2016 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2017 speed_template_16_32);
2018 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2019 speed_template_16_32);
2020 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2021 speed_template_16_32);
2022 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2023 speed_template_16_32);
2024 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2025 speed_template_16_32);
2026 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2027 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002028 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2029 speed_template_32_48);
2030 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2031 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002032 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2033 speed_template_32_64);
2034 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2035 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002036 break;
2037
Johannes Goetzfried107778b2012-05-28 15:54:24 +02002038 case 504:
2039 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2040 speed_template_16_24_32);
2041 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2042 speed_template_16_24_32);
2043 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2044 speed_template_16_24_32);
2045 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2046 speed_template_16_24_32);
2047 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2048 speed_template_16_24_32);
2049 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2050 speed_template_16_24_32);
2051 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2052 speed_template_32_40_48);
2053 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2054 speed_template_32_40_48);
2055 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2056 speed_template_32_48_64);
2057 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2058 speed_template_32_48_64);
2059 break;
2060
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002061 case 505:
2062 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2063 speed_template_8);
2064 break;
2065
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002066 case 506:
2067 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2068 speed_template_8_16);
2069 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2070 speed_template_8_16);
2071 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2072 speed_template_8_16);
2073 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2074 speed_template_8_16);
2075 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2076 speed_template_8_16);
2077 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2078 speed_template_8_16);
2079 break;
2080
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002081 case 507:
2082 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2083 speed_template_16_32);
2084 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2085 speed_template_16_32);
2086 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2087 speed_template_16_32);
2088 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2089 speed_template_16_32);
2090 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2091 speed_template_16_32);
2092 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2093 speed_template_16_32);
2094 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2095 speed_template_32_48);
2096 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2097 speed_template_32_48);
2098 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2099 speed_template_32_64);
2100 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2101 speed_template_32_64);
2102 break;
2103
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002104 case 508:
2105 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2106 speed_template_16_32);
2107 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2108 speed_template_16_32);
2109 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2110 speed_template_16_32);
2111 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2112 speed_template_16_32);
2113 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2114 speed_template_16_32);
2115 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2116 speed_template_16_32);
2117 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2118 speed_template_32_48);
2119 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2120 speed_template_32_48);
2121 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2122 speed_template_32_64);
2123 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2124 speed_template_32_64);
2125 break;
2126
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002127 case 509:
2128 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2129 speed_template_8_32);
2130 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2131 speed_template_8_32);
2132 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2133 speed_template_8_32);
2134 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2135 speed_template_8_32);
2136 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2137 speed_template_8_32);
2138 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2139 speed_template_8_32);
2140 break;
2141
Linus Torvalds1da177e2005-04-16 15:20:36 -07002142 case 1000:
2143 test_available();
2144 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002145 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002146
2147 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002148}
2149
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002150static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002151{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002152 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002153 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002154
Herbert Xuf139cfa2008-07-31 12:23:53 +08002155 for (i = 0; i < TVMEMSIZE; i++) {
2156 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2157 if (!tvmem[i])
2158 goto err_free_tv;
2159 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002160
Herbert Xu86068132014-12-04 16:43:29 +08002161 err = do_test(alg, type, mask, mode);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002162
Jarod Wilson4e033a62009-05-27 15:10:21 +10002163 if (err) {
2164 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2165 goto err_free_tv;
2166 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002167
Jarod Wilson4e033a62009-05-27 15:10:21 +10002168 /* We intentionaly return -EAGAIN to prevent keeping the module,
2169 * unless we're running in fips mode. It does all its work from
2170 * init() and doesn't offer any runtime functionality, but in
2171 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002172 * => we don't need it in the memory, do we?
2173 * -- mludvig
2174 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002175 if (!fips_enabled)
2176 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002177
Herbert Xuf139cfa2008-07-31 12:23:53 +08002178err_free_tv:
2179 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2180 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002181
2182 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002183}
2184
2185/*
2186 * If an init function is provided, an exit function must also be provided
2187 * to allow module unload.
2188 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002189static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002190
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002191module_init(tcrypt_mod_init);
2192module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002193
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002194module_param(alg, charp, 0);
2195module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002196module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002197module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002198module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002199MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2200 "(defaults to zero which uses CPU cycles instead)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002201
2202MODULE_LICENSE("GPL");
2203MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2204MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");