blob: 5183ec5a451774607691a528c1269e364ee24053 [file] [log] [blame]
Herbert Xuda7f0332008-07-31 17:08:25 +08001/*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <crypto/hash.h>
17#include <linux/err.h>
18#include <linux/module.h>
19#include <linux/scatterlist.h>
20#include <linux/slab.h>
21#include <linux/string.h>
Jarod Wilson7647d6c2009-05-04 19:44:50 +080022#include <crypto/rng.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080023
24#include "internal.h"
25#include "testmgr.h"
26
27/*
28 * Need slab memory for testing (size in number of pages).
29 */
30#define XBUFSIZE 8
31
32/*
33 * Indexes into the xbuf to simulate cross-page access.
34 */
35#define IDX1 32
36#define IDX2 32400
37#define IDX3 1
38#define IDX4 8193
39#define IDX5 22222
40#define IDX6 17101
41#define IDX7 27333
42#define IDX8 3000
43
44/*
45* Used by test_cipher()
46*/
47#define ENCRYPT 1
48#define DECRYPT 0
49
50struct tcrypt_result {
51 struct completion completion;
52 int err;
53};
54
55struct aead_test_suite {
56 struct {
57 struct aead_testvec *vecs;
58 unsigned int count;
59 } enc, dec;
60};
61
62struct cipher_test_suite {
63 struct {
64 struct cipher_testvec *vecs;
65 unsigned int count;
66 } enc, dec;
67};
68
69struct comp_test_suite {
70 struct {
71 struct comp_testvec *vecs;
72 unsigned int count;
73 } comp, decomp;
74};
75
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +080076struct pcomp_test_suite {
77 struct {
78 struct pcomp_testvec *vecs;
79 unsigned int count;
80 } comp, decomp;
81};
82
Herbert Xuda7f0332008-07-31 17:08:25 +080083struct hash_test_suite {
84 struct hash_testvec *vecs;
85 unsigned int count;
86};
87
Jarod Wilson7647d6c2009-05-04 19:44:50 +080088struct cprng_test_suite {
89 struct cprng_testvec *vecs;
90 unsigned int count;
91};
92
Herbert Xuda7f0332008-07-31 17:08:25 +080093struct alg_test_desc {
94 const char *alg;
95 int (*test)(const struct alg_test_desc *desc, const char *driver,
96 u32 type, u32 mask);
97
98 union {
99 struct aead_test_suite aead;
100 struct cipher_test_suite cipher;
101 struct comp_test_suite comp;
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +0800102 struct pcomp_test_suite pcomp;
Herbert Xuda7f0332008-07-31 17:08:25 +0800103 struct hash_test_suite hash;
Jarod Wilson7647d6c2009-05-04 19:44:50 +0800104 struct cprng_test_suite cprng;
Herbert Xuda7f0332008-07-31 17:08:25 +0800105 } suite;
106};
107
108static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
109
110static char *xbuf[XBUFSIZE];
111static char *axbuf[XBUFSIZE];
112
113static void hexdump(unsigned char *buf, unsigned int len)
114{
115 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
116 16, 1,
117 buf, len, false);
118}
119
120static void tcrypt_complete(struct crypto_async_request *req, int err)
121{
122 struct tcrypt_result *res = req->data;
123
124 if (err == -EINPROGRESS)
125 return;
126
127 res->err = err;
128 complete(&res->completion);
129}
130
131static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
132 unsigned int tcount)
133{
134 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
135 unsigned int i, j, k, temp;
136 struct scatterlist sg[8];
137 char result[64];
138 struct ahash_request *req;
139 struct tcrypt_result tresult;
140 int ret;
141 void *hash_buff;
142
143 init_completion(&tresult.completion);
144
145 req = ahash_request_alloc(tfm, GFP_KERNEL);
146 if (!req) {
147 printk(KERN_ERR "alg: hash: Failed to allocate request for "
148 "%s\n", algo);
149 ret = -ENOMEM;
150 goto out_noreq;
151 }
152 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
153 tcrypt_complete, &tresult);
154
155 for (i = 0; i < tcount; i++) {
156 memset(result, 0, 64);
157
158 hash_buff = xbuf[0];
159
160 memcpy(hash_buff, template[i].plaintext, template[i].psize);
161 sg_init_one(&sg[0], hash_buff, template[i].psize);
162
163 if (template[i].ksize) {
164 crypto_ahash_clear_flags(tfm, ~0);
165 ret = crypto_ahash_setkey(tfm, template[i].key,
166 template[i].ksize);
167 if (ret) {
168 printk(KERN_ERR "alg: hash: setkey failed on "
169 "test %d for %s: ret=%d\n", i + 1, algo,
170 -ret);
171 goto out;
172 }
173 }
174
175 ahash_request_set_crypt(req, sg, result, template[i].psize);
176 ret = crypto_ahash_digest(req);
177 switch (ret) {
178 case 0:
179 break;
180 case -EINPROGRESS:
181 case -EBUSY:
182 ret = wait_for_completion_interruptible(
183 &tresult.completion);
184 if (!ret && !(ret = tresult.err)) {
185 INIT_COMPLETION(tresult.completion);
186 break;
187 }
188 /* fall through */
189 default:
190 printk(KERN_ERR "alg: hash: digest failed on test %d "
191 "for %s: ret=%d\n", i + 1, algo, -ret);
192 goto out;
193 }
194
195 if (memcmp(result, template[i].digest,
196 crypto_ahash_digestsize(tfm))) {
197 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
198 i + 1, algo);
199 hexdump(result, crypto_ahash_digestsize(tfm));
200 ret = -EINVAL;
201 goto out;
202 }
203 }
204
205 j = 0;
206 for (i = 0; i < tcount; i++) {
207 if (template[i].np) {
208 j++;
209 memset(result, 0, 64);
210
211 temp = 0;
212 sg_init_table(sg, template[i].np);
213 for (k = 0; k < template[i].np; k++) {
214 sg_set_buf(&sg[k],
215 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
216 offset_in_page(IDX[k]),
217 template[i].plaintext + temp,
218 template[i].tap[k]),
219 template[i].tap[k]);
220 temp += template[i].tap[k];
221 }
222
223 if (template[i].ksize) {
224 crypto_ahash_clear_flags(tfm, ~0);
225 ret = crypto_ahash_setkey(tfm, template[i].key,
226 template[i].ksize);
227
228 if (ret) {
229 printk(KERN_ERR "alg: hash: setkey "
230 "failed on chunking test %d "
231 "for %s: ret=%d\n", j, algo,
232 -ret);
233 goto out;
234 }
235 }
236
237 ahash_request_set_crypt(req, sg, result,
238 template[i].psize);
239 ret = crypto_ahash_digest(req);
240 switch (ret) {
241 case 0:
242 break;
243 case -EINPROGRESS:
244 case -EBUSY:
245 ret = wait_for_completion_interruptible(
246 &tresult.completion);
247 if (!ret && !(ret = tresult.err)) {
248 INIT_COMPLETION(tresult.completion);
249 break;
250 }
251 /* fall through */
252 default:
253 printk(KERN_ERR "alg: hash: digest failed "
254 "on chunking test %d for %s: "
255 "ret=%d\n", j, algo, -ret);
256 goto out;
257 }
258
259 if (memcmp(result, template[i].digest,
260 crypto_ahash_digestsize(tfm))) {
261 printk(KERN_ERR "alg: hash: Chunking test %d "
262 "failed for %s\n", j, algo);
263 hexdump(result, crypto_ahash_digestsize(tfm));
264 ret = -EINVAL;
265 goto out;
266 }
267 }
268 }
269
270 ret = 0;
271
272out:
273 ahash_request_free(req);
274out_noreq:
275 return ret;
276}
277
278static int test_aead(struct crypto_aead *tfm, int enc,
279 struct aead_testvec *template, unsigned int tcount)
280{
281 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
282 unsigned int i, j, k, n, temp;
283 int ret = 0;
284 char *q;
285 char *key;
286 struct aead_request *req;
287 struct scatterlist sg[8];
288 struct scatterlist asg[8];
289 const char *e;
290 struct tcrypt_result result;
291 unsigned int authsize;
292 void *input;
293 void *assoc;
294 char iv[MAX_IVLEN];
295
296 if (enc == ENCRYPT)
297 e = "encryption";
298 else
299 e = "decryption";
300
301 init_completion(&result.completion);
302
303 req = aead_request_alloc(tfm, GFP_KERNEL);
304 if (!req) {
305 printk(KERN_ERR "alg: aead: Failed to allocate request for "
306 "%s\n", algo);
307 ret = -ENOMEM;
308 goto out;
309 }
310
311 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
312 tcrypt_complete, &result);
313
314 for (i = 0, j = 0; i < tcount; i++) {
315 if (!template[i].np) {
316 j++;
317
318 /* some tepmplates have no input data but they will
319 * touch input
320 */
321 input = xbuf[0];
322 assoc = axbuf[0];
323
324 memcpy(input, template[i].input, template[i].ilen);
325 memcpy(assoc, template[i].assoc, template[i].alen);
326 if (template[i].iv)
327 memcpy(iv, template[i].iv, MAX_IVLEN);
328 else
329 memset(iv, 0, MAX_IVLEN);
330
331 crypto_aead_clear_flags(tfm, ~0);
332 if (template[i].wk)
333 crypto_aead_set_flags(
334 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
335
336 key = template[i].key;
337
338 ret = crypto_aead_setkey(tfm, key,
339 template[i].klen);
340 if (!ret == template[i].fail) {
341 printk(KERN_ERR "alg: aead: setkey failed on "
342 "test %d for %s: flags=%x\n", j, algo,
343 crypto_aead_get_flags(tfm));
344 goto out;
345 } else if (ret)
346 continue;
347
348 authsize = abs(template[i].rlen - template[i].ilen);
349 ret = crypto_aead_setauthsize(tfm, authsize);
350 if (ret) {
351 printk(KERN_ERR "alg: aead: Failed to set "
352 "authsize to %u on test %d for %s\n",
353 authsize, j, algo);
354 goto out;
355 }
356
357 sg_init_one(&sg[0], input,
358 template[i].ilen + (enc ? authsize : 0));
359
360 sg_init_one(&asg[0], assoc, template[i].alen);
361
362 aead_request_set_crypt(req, sg, sg,
363 template[i].ilen, iv);
364
365 aead_request_set_assoc(req, asg, template[i].alen);
366
367 ret = enc ?
368 crypto_aead_encrypt(req) :
369 crypto_aead_decrypt(req);
370
371 switch (ret) {
372 case 0:
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800373 if (template[i].novrfy) {
374 /* verification was supposed to fail */
375 printk(KERN_ERR "alg: aead: %s failed "
376 "on test %d for %s: ret was 0, "
377 "expected -EBADMSG\n",
378 e, j, algo);
379 /* so really, we got a bad message */
380 ret = -EBADMSG;
381 goto out;
382 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800383 break;
384 case -EINPROGRESS:
385 case -EBUSY:
386 ret = wait_for_completion_interruptible(
387 &result.completion);
388 if (!ret && !(ret = result.err)) {
389 INIT_COMPLETION(result.completion);
390 break;
391 }
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800392 case -EBADMSG:
393 if (template[i].novrfy)
394 /* verification failure was expected */
395 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800396 /* fall through */
397 default:
398 printk(KERN_ERR "alg: aead: %s failed on test "
399 "%d for %s: ret=%d\n", e, j, algo, -ret);
400 goto out;
401 }
402
403 q = input;
404 if (memcmp(q, template[i].result, template[i].rlen)) {
405 printk(KERN_ERR "alg: aead: Test %d failed on "
406 "%s for %s\n", j, e, algo);
407 hexdump(q, template[i].rlen);
408 ret = -EINVAL;
409 goto out;
410 }
411 }
412 }
413
414 for (i = 0, j = 0; i < tcount; i++) {
415 if (template[i].np) {
416 j++;
417
418 if (template[i].iv)
419 memcpy(iv, template[i].iv, MAX_IVLEN);
420 else
421 memset(iv, 0, MAX_IVLEN);
422
423 crypto_aead_clear_flags(tfm, ~0);
424 if (template[i].wk)
425 crypto_aead_set_flags(
426 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
427 key = template[i].key;
428
429 ret = crypto_aead_setkey(tfm, key, template[i].klen);
430 if (!ret == template[i].fail) {
431 printk(KERN_ERR "alg: aead: setkey failed on "
432 "chunk test %d for %s: flags=%x\n", j,
433 algo, crypto_aead_get_flags(tfm));
434 goto out;
435 } else if (ret)
436 continue;
437
438 authsize = abs(template[i].rlen - template[i].ilen);
439
440 ret = -EINVAL;
441 sg_init_table(sg, template[i].np);
442 for (k = 0, temp = 0; k < template[i].np; k++) {
443 if (WARN_ON(offset_in_page(IDX[k]) +
444 template[i].tap[k] > PAGE_SIZE))
445 goto out;
446
447 q = xbuf[IDX[k] >> PAGE_SHIFT] +
448 offset_in_page(IDX[k]);
449
450 memcpy(q, template[i].input + temp,
451 template[i].tap[k]);
452
453 n = template[i].tap[k];
454 if (k == template[i].np - 1 && enc)
455 n += authsize;
456 if (offset_in_page(q) + n < PAGE_SIZE)
457 q[n] = 0;
458
459 sg_set_buf(&sg[k], q, template[i].tap[k]);
460 temp += template[i].tap[k];
461 }
462
463 ret = crypto_aead_setauthsize(tfm, authsize);
464 if (ret) {
465 printk(KERN_ERR "alg: aead: Failed to set "
466 "authsize to %u on chunk test %d for "
467 "%s\n", authsize, j, algo);
468 goto out;
469 }
470
471 if (enc) {
472 if (WARN_ON(sg[k - 1].offset +
473 sg[k - 1].length + authsize >
474 PAGE_SIZE)) {
475 ret = -EINVAL;
476 goto out;
477 }
478
479 sg[k - 1].length += authsize;
480 }
481
482 sg_init_table(asg, template[i].anp);
483 for (k = 0, temp = 0; k < template[i].anp; k++) {
484 sg_set_buf(&asg[k],
485 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
486 offset_in_page(IDX[k]),
487 template[i].assoc + temp,
488 template[i].atap[k]),
489 template[i].atap[k]);
490 temp += template[i].atap[k];
491 }
492
493 aead_request_set_crypt(req, sg, sg,
494 template[i].ilen,
495 iv);
496
497 aead_request_set_assoc(req, asg, template[i].alen);
498
499 ret = enc ?
500 crypto_aead_encrypt(req) :
501 crypto_aead_decrypt(req);
502
503 switch (ret) {
504 case 0:
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800505 if (template[i].novrfy) {
506 /* verification was supposed to fail */
507 printk(KERN_ERR "alg: aead: %s failed "
508 "on chunk test %d for %s: ret "
509 "was 0, expected -EBADMSG\n",
510 e, j, algo);
511 /* so really, we got a bad message */
512 ret = -EBADMSG;
513 goto out;
514 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800515 break;
516 case -EINPROGRESS:
517 case -EBUSY:
518 ret = wait_for_completion_interruptible(
519 &result.completion);
520 if (!ret && !(ret = result.err)) {
521 INIT_COMPLETION(result.completion);
522 break;
523 }
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800524 case -EBADMSG:
525 if (template[i].novrfy)
526 /* verification failure was expected */
527 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800528 /* fall through */
529 default:
530 printk(KERN_ERR "alg: aead: %s failed on "
531 "chunk test %d for %s: ret=%d\n", e, j,
532 algo, -ret);
533 goto out;
534 }
535
536 ret = -EINVAL;
537 for (k = 0, temp = 0; k < template[i].np; k++) {
538 q = xbuf[IDX[k] >> PAGE_SHIFT] +
539 offset_in_page(IDX[k]);
540
541 n = template[i].tap[k];
542 if (k == template[i].np - 1)
543 n += enc ? authsize : -authsize;
544
545 if (memcmp(q, template[i].result + temp, n)) {
546 printk(KERN_ERR "alg: aead: Chunk "
547 "test %d failed on %s at page "
548 "%u for %s\n", j, e, k, algo);
549 hexdump(q, n);
550 goto out;
551 }
552
553 q += n;
554 if (k == template[i].np - 1 && !enc) {
555 if (memcmp(q, template[i].input +
556 temp + n, authsize))
557 n = authsize;
558 else
559 n = 0;
560 } else {
561 for (n = 0; offset_in_page(q + n) &&
562 q[n]; n++)
563 ;
564 }
565 if (n) {
566 printk(KERN_ERR "alg: aead: Result "
567 "buffer corruption in chunk "
568 "test %d on %s at page %u for "
569 "%s: %u bytes:\n", j, e, k,
570 algo, n);
571 hexdump(q, n);
572 goto out;
573 }
574
575 temp += template[i].tap[k];
576 }
577 }
578 }
579
580 ret = 0;
581
582out:
583 aead_request_free(req);
584 return ret;
585}
586
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000587static int test_cipher(struct crypto_cipher *tfm, int enc,
Herbert Xuda7f0332008-07-31 17:08:25 +0800588 struct cipher_testvec *template, unsigned int tcount)
589{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000590 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
591 unsigned int i, j, k;
592 int ret;
593 char *q;
594 const char *e;
595 void *data;
596
597 if (enc == ENCRYPT)
598 e = "encryption";
599 else
600 e = "decryption";
601
602 j = 0;
603 for (i = 0; i < tcount; i++) {
604 if (template[i].np)
605 continue;
606
607 j++;
608
609 data = xbuf[0];
610 memcpy(data, template[i].input, template[i].ilen);
611
612 crypto_cipher_clear_flags(tfm, ~0);
613 if (template[i].wk)
614 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
615
616 ret = crypto_cipher_setkey(tfm, template[i].key,
617 template[i].klen);
618 if (!ret == template[i].fail) {
619 printk(KERN_ERR "alg: cipher: setkey failed "
620 "on test %d for %s: flags=%x\n", j,
621 algo, crypto_cipher_get_flags(tfm));
622 goto out;
623 } else if (ret)
624 continue;
625
626 for (k = 0; k < template[i].ilen;
627 k += crypto_cipher_blocksize(tfm)) {
628 if (enc)
629 crypto_cipher_encrypt_one(tfm, data + k,
630 data + k);
631 else
632 crypto_cipher_decrypt_one(tfm, data + k,
633 data + k);
634 }
635
636 q = data;
637 if (memcmp(q, template[i].result, template[i].rlen)) {
638 printk(KERN_ERR "alg: cipher: Test %d failed "
639 "on %s for %s\n", j, e, algo);
640 hexdump(q, template[i].rlen);
641 ret = -EINVAL;
642 goto out;
643 }
644 }
645
646 ret = 0;
647
648out:
649 return ret;
650}
651
652static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
653 struct cipher_testvec *template, unsigned int tcount)
654{
Herbert Xuda7f0332008-07-31 17:08:25 +0800655 const char *algo =
656 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
657 unsigned int i, j, k, n, temp;
658 int ret;
659 char *q;
660 struct ablkcipher_request *req;
661 struct scatterlist sg[8];
662 const char *e;
663 struct tcrypt_result result;
664 void *data;
665 char iv[MAX_IVLEN];
666
667 if (enc == ENCRYPT)
668 e = "encryption";
669 else
670 e = "decryption";
671
672 init_completion(&result.completion);
673
674 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
675 if (!req) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000676 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
677 "for %s\n", algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800678 ret = -ENOMEM;
679 goto out;
680 }
681
682 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
683 tcrypt_complete, &result);
684
685 j = 0;
686 for (i = 0; i < tcount; i++) {
687 if (template[i].iv)
688 memcpy(iv, template[i].iv, MAX_IVLEN);
689 else
690 memset(iv, 0, MAX_IVLEN);
691
692 if (!(template[i].np)) {
693 j++;
694
695 data = xbuf[0];
696 memcpy(data, template[i].input, template[i].ilen);
697
698 crypto_ablkcipher_clear_flags(tfm, ~0);
699 if (template[i].wk)
700 crypto_ablkcipher_set_flags(
701 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
702
703 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
704 template[i].klen);
705 if (!ret == template[i].fail) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000706 printk(KERN_ERR "alg: skcipher: setkey failed "
Herbert Xuda7f0332008-07-31 17:08:25 +0800707 "on test %d for %s: flags=%x\n", j,
708 algo, crypto_ablkcipher_get_flags(tfm));
709 goto out;
710 } else if (ret)
711 continue;
712
713 sg_init_one(&sg[0], data, template[i].ilen);
714
715 ablkcipher_request_set_crypt(req, sg, sg,
716 template[i].ilen, iv);
717 ret = enc ?
718 crypto_ablkcipher_encrypt(req) :
719 crypto_ablkcipher_decrypt(req);
720
721 switch (ret) {
722 case 0:
723 break;
724 case -EINPROGRESS:
725 case -EBUSY:
726 ret = wait_for_completion_interruptible(
727 &result.completion);
728 if (!ret && !((ret = result.err))) {
729 INIT_COMPLETION(result.completion);
730 break;
731 }
732 /* fall through */
733 default:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000734 printk(KERN_ERR "alg: skcipher: %s failed on "
Herbert Xuda7f0332008-07-31 17:08:25 +0800735 "test %d for %s: ret=%d\n", e, j, algo,
736 -ret);
737 goto out;
738 }
739
740 q = data;
741 if (memcmp(q, template[i].result, template[i].rlen)) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000742 printk(KERN_ERR "alg: skcipher: Test %d "
743 "failed on %s for %s\n", j, e, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800744 hexdump(q, template[i].rlen);
745 ret = -EINVAL;
746 goto out;
747 }
748 }
749 }
750
751 j = 0;
752 for (i = 0; i < tcount; i++) {
753
754 if (template[i].iv)
755 memcpy(iv, template[i].iv, MAX_IVLEN);
756 else
757 memset(iv, 0, MAX_IVLEN);
758
759 if (template[i].np) {
760 j++;
761
762 crypto_ablkcipher_clear_flags(tfm, ~0);
763 if (template[i].wk)
764 crypto_ablkcipher_set_flags(
765 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
766
767 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
768 template[i].klen);
769 if (!ret == template[i].fail) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000770 printk(KERN_ERR "alg: skcipher: setkey failed "
Herbert Xuda7f0332008-07-31 17:08:25 +0800771 "on chunk test %d for %s: flags=%x\n",
772 j, algo,
773 crypto_ablkcipher_get_flags(tfm));
774 goto out;
775 } else if (ret)
776 continue;
777
778 temp = 0;
779 ret = -EINVAL;
780 sg_init_table(sg, template[i].np);
781 for (k = 0; k < template[i].np; k++) {
782 if (WARN_ON(offset_in_page(IDX[k]) +
783 template[i].tap[k] > PAGE_SIZE))
784 goto out;
785
786 q = xbuf[IDX[k] >> PAGE_SHIFT] +
787 offset_in_page(IDX[k]);
788
789 memcpy(q, template[i].input + temp,
790 template[i].tap[k]);
791
792 if (offset_in_page(q) + template[i].tap[k] <
793 PAGE_SIZE)
794 q[template[i].tap[k]] = 0;
795
796 sg_set_buf(&sg[k], q, template[i].tap[k]);
797
798 temp += template[i].tap[k];
799 }
800
801 ablkcipher_request_set_crypt(req, sg, sg,
802 template[i].ilen, iv);
803
804 ret = enc ?
805 crypto_ablkcipher_encrypt(req) :
806 crypto_ablkcipher_decrypt(req);
807
808 switch (ret) {
809 case 0:
810 break;
811 case -EINPROGRESS:
812 case -EBUSY:
813 ret = wait_for_completion_interruptible(
814 &result.completion);
815 if (!ret && !((ret = result.err))) {
816 INIT_COMPLETION(result.completion);
817 break;
818 }
819 /* fall through */
820 default:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000821 printk(KERN_ERR "alg: skcipher: %s failed on "
Herbert Xuda7f0332008-07-31 17:08:25 +0800822 "chunk test %d for %s: ret=%d\n", e, j,
823 algo, -ret);
824 goto out;
825 }
826
827 temp = 0;
828 ret = -EINVAL;
829 for (k = 0; k < template[i].np; k++) {
830 q = xbuf[IDX[k] >> PAGE_SHIFT] +
831 offset_in_page(IDX[k]);
832
833 if (memcmp(q, template[i].result + temp,
834 template[i].tap[k])) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000835 printk(KERN_ERR "alg: skcipher: Chunk "
Herbert Xuda7f0332008-07-31 17:08:25 +0800836 "test %d failed on %s at page "
837 "%u for %s\n", j, e, k, algo);
838 hexdump(q, template[i].tap[k]);
839 goto out;
840 }
841
842 q += template[i].tap[k];
843 for (n = 0; offset_in_page(q + n) && q[n]; n++)
844 ;
845 if (n) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000846 printk(KERN_ERR "alg: skcipher: "
Herbert Xuda7f0332008-07-31 17:08:25 +0800847 "Result buffer corruption in "
848 "chunk test %d on %s at page "
849 "%u for %s: %u bytes:\n", j, e,
850 k, algo, n);
851 hexdump(q, n);
852 goto out;
853 }
854 temp += template[i].tap[k];
855 }
856 }
857 }
858
859 ret = 0;
860
861out:
862 ablkcipher_request_free(req);
863 return ret;
864}
865
866static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
867 struct comp_testvec *dtemplate, int ctcount, int dtcount)
868{
869 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
870 unsigned int i;
871 char result[COMP_BUF_SIZE];
872 int ret;
873
874 for (i = 0; i < ctcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +0800875 int ilen;
876 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +0800877
878 memset(result, 0, sizeof (result));
879
880 ilen = ctemplate[i].inlen;
881 ret = crypto_comp_compress(tfm, ctemplate[i].input,
882 ilen, result, &dlen);
883 if (ret) {
884 printk(KERN_ERR "alg: comp: compression failed "
885 "on test %d for %s: ret=%d\n", i + 1, algo,
886 -ret);
887 goto out;
888 }
889
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +0800890 if (dlen != ctemplate[i].outlen) {
891 printk(KERN_ERR "alg: comp: Compression test %d "
892 "failed for %s: output len = %d\n", i + 1, algo,
893 dlen);
894 ret = -EINVAL;
895 goto out;
896 }
897
Herbert Xuda7f0332008-07-31 17:08:25 +0800898 if (memcmp(result, ctemplate[i].output, dlen)) {
899 printk(KERN_ERR "alg: comp: Compression test %d "
900 "failed for %s\n", i + 1, algo);
901 hexdump(result, dlen);
902 ret = -EINVAL;
903 goto out;
904 }
905 }
906
907 for (i = 0; i < dtcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +0800908 int ilen;
909 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +0800910
911 memset(result, 0, sizeof (result));
912
913 ilen = dtemplate[i].inlen;
914 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
915 ilen, result, &dlen);
916 if (ret) {
917 printk(KERN_ERR "alg: comp: decompression failed "
918 "on test %d for %s: ret=%d\n", i + 1, algo,
919 -ret);
920 goto out;
921 }
922
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +0800923 if (dlen != dtemplate[i].outlen) {
924 printk(KERN_ERR "alg: comp: Decompression test %d "
925 "failed for %s: output len = %d\n", i + 1, algo,
926 dlen);
927 ret = -EINVAL;
928 goto out;
929 }
930
Herbert Xuda7f0332008-07-31 17:08:25 +0800931 if (memcmp(result, dtemplate[i].output, dlen)) {
932 printk(KERN_ERR "alg: comp: Decompression test %d "
933 "failed for %s\n", i + 1, algo);
934 hexdump(result, dlen);
935 ret = -EINVAL;
936 goto out;
937 }
938 }
939
940 ret = 0;
941
942out:
943 return ret;
944}
945
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +0800946static int test_pcomp(struct crypto_pcomp *tfm,
947 struct pcomp_testvec *ctemplate,
948 struct pcomp_testvec *dtemplate, int ctcount,
949 int dtcount)
950{
951 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
952 unsigned int i;
953 char result[COMP_BUF_SIZE];
954 int error;
955
956 for (i = 0; i < ctcount; i++) {
957 struct comp_request req;
958
959 error = crypto_compress_setup(tfm, ctemplate[i].params,
960 ctemplate[i].paramsize);
961 if (error) {
962 pr_err("alg: pcomp: compression setup failed on test "
963 "%d for %s: error=%d\n", i + 1, algo, error);
964 return error;
965 }
966
967 error = crypto_compress_init(tfm);
968 if (error) {
969 pr_err("alg: pcomp: compression init failed on test "
970 "%d for %s: error=%d\n", i + 1, algo, error);
971 return error;
972 }
973
974 memset(result, 0, sizeof(result));
975
976 req.next_in = ctemplate[i].input;
977 req.avail_in = ctemplate[i].inlen / 2;
978 req.next_out = result;
979 req.avail_out = ctemplate[i].outlen / 2;
980
981 error = crypto_compress_update(tfm, &req);
982 if (error && (error != -EAGAIN || req.avail_in)) {
983 pr_err("alg: pcomp: compression update failed on test "
984 "%d for %s: error=%d\n", i + 1, algo, error);
985 return error;
986 }
987
988 /* Add remaining input data */
989 req.avail_in += (ctemplate[i].inlen + 1) / 2;
990
991 error = crypto_compress_update(tfm, &req);
992 if (error && (error != -EAGAIN || req.avail_in)) {
993 pr_err("alg: pcomp: compression update failed on test "
994 "%d for %s: error=%d\n", i + 1, algo, error);
995 return error;
996 }
997
998 /* Provide remaining output space */
999 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1000
1001 error = crypto_compress_final(tfm, &req);
1002 if (error) {
1003 pr_err("alg: pcomp: compression final failed on test "
1004 "%d for %s: error=%d\n", i + 1, algo, error);
1005 return error;
1006 }
1007
1008 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1009 pr_err("alg: comp: Compression test %d failed for %s: "
1010 "output len = %d (expected %d)\n", i + 1, algo,
1011 COMP_BUF_SIZE - req.avail_out,
1012 ctemplate[i].outlen);
1013 return -EINVAL;
1014 }
1015
1016 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1017 pr_err("alg: pcomp: Compression test %d failed for "
1018 "%s\n", i + 1, algo);
1019 hexdump(result, ctemplate[i].outlen);
1020 return -EINVAL;
1021 }
1022 }
1023
1024 for (i = 0; i < dtcount; i++) {
1025 struct comp_request req;
1026
1027 error = crypto_decompress_setup(tfm, dtemplate[i].params,
1028 dtemplate[i].paramsize);
1029 if (error) {
1030 pr_err("alg: pcomp: decompression setup failed on "
1031 "test %d for %s: error=%d\n", i + 1, algo,
1032 error);
1033 return error;
1034 }
1035
1036 error = crypto_decompress_init(tfm);
1037 if (error) {
1038 pr_err("alg: pcomp: decompression init failed on test "
1039 "%d for %s: error=%d\n", i + 1, algo, error);
1040 return error;
1041 }
1042
1043 memset(result, 0, sizeof(result));
1044
1045 req.next_in = dtemplate[i].input;
1046 req.avail_in = dtemplate[i].inlen / 2;
1047 req.next_out = result;
1048 req.avail_out = dtemplate[i].outlen / 2;
1049
1050 error = crypto_decompress_update(tfm, &req);
1051 if (error && (error != -EAGAIN || req.avail_in)) {
1052 pr_err("alg: pcomp: decompression update failed on "
1053 "test %d for %s: error=%d\n", i + 1, algo,
1054 error);
1055 return error;
1056 }
1057
1058 /* Add remaining input data */
1059 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1060
1061 error = crypto_decompress_update(tfm, &req);
1062 if (error && (error != -EAGAIN || req.avail_in)) {
1063 pr_err("alg: pcomp: decompression update failed on "
1064 "test %d for %s: error=%d\n", i + 1, algo,
1065 error);
1066 return error;
1067 }
1068
1069 /* Provide remaining output space */
1070 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1071
1072 error = crypto_decompress_final(tfm, &req);
1073 if (error && (error != -EAGAIN || req.avail_in)) {
1074 pr_err("alg: pcomp: decompression final failed on "
1075 "test %d for %s: error=%d\n", i + 1, algo,
1076 error);
1077 return error;
1078 }
1079
1080 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1081 pr_err("alg: comp: Decompression test %d failed for "
1082 "%s: output len = %d (expected %d)\n", i + 1,
1083 algo, COMP_BUF_SIZE - req.avail_out,
1084 dtemplate[i].outlen);
1085 return -EINVAL;
1086 }
1087
1088 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1089 pr_err("alg: pcomp: Decompression test %d failed for "
1090 "%s\n", i + 1, algo);
1091 hexdump(result, dtemplate[i].outlen);
1092 return -EINVAL;
1093 }
1094 }
1095
1096 return 0;
1097}
1098
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001099
1100static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1101 unsigned int tcount)
1102{
1103 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1104 int err, i, j, seedsize;
1105 u8 *seed;
1106 char result[32];
1107
1108 seedsize = crypto_rng_seedsize(tfm);
1109
1110 seed = kmalloc(seedsize, GFP_KERNEL);
1111 if (!seed) {
1112 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1113 "for %s\n", algo);
1114 return -ENOMEM;
1115 }
1116
1117 for (i = 0; i < tcount; i++) {
1118 memset(result, 0, 32);
1119
1120 memcpy(seed, template[i].v, template[i].vlen);
1121 memcpy(seed + template[i].vlen, template[i].key,
1122 template[i].klen);
1123 memcpy(seed + template[i].vlen + template[i].klen,
1124 template[i].dt, template[i].dtlen);
1125
1126 err = crypto_rng_reset(tfm, seed, seedsize);
1127 if (err) {
1128 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1129 "for %s\n", algo);
1130 goto out;
1131 }
1132
1133 for (j = 0; j < template[i].loops; j++) {
1134 err = crypto_rng_get_bytes(tfm, result,
1135 template[i].rlen);
1136 if (err != template[i].rlen) {
1137 printk(KERN_ERR "alg: cprng: Failed to obtain "
1138 "the correct amount of random data for "
1139 "%s (requested %d, got %d)\n", algo,
1140 template[i].rlen, err);
1141 goto out;
1142 }
1143 }
1144
1145 err = memcmp(result, template[i].result,
1146 template[i].rlen);
1147 if (err) {
1148 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1149 i, algo);
1150 hexdump(result, template[i].rlen);
1151 err = -EINVAL;
1152 goto out;
1153 }
1154 }
1155
1156out:
1157 kfree(seed);
1158 return err;
1159}
1160
Herbert Xuda7f0332008-07-31 17:08:25 +08001161static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1162 u32 type, u32 mask)
1163{
1164 struct crypto_aead *tfm;
1165 int err = 0;
1166
1167 tfm = crypto_alloc_aead(driver, type, mask);
1168 if (IS_ERR(tfm)) {
1169 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1170 "%ld\n", driver, PTR_ERR(tfm));
1171 return PTR_ERR(tfm);
1172 }
1173
1174 if (desc->suite.aead.enc.vecs) {
1175 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1176 desc->suite.aead.enc.count);
1177 if (err)
1178 goto out;
1179 }
1180
1181 if (!err && desc->suite.aead.dec.vecs)
1182 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1183 desc->suite.aead.dec.count);
1184
1185out:
1186 crypto_free_aead(tfm);
1187 return err;
1188}
1189
1190static int alg_test_cipher(const struct alg_test_desc *desc,
1191 const char *driver, u32 type, u32 mask)
1192{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001193 struct crypto_cipher *tfm;
Herbert Xuda7f0332008-07-31 17:08:25 +08001194 int err = 0;
1195
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001196 tfm = crypto_alloc_cipher(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001197 if (IS_ERR(tfm)) {
1198 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1199 "%s: %ld\n", driver, PTR_ERR(tfm));
1200 return PTR_ERR(tfm);
1201 }
1202
1203 if (desc->suite.cipher.enc.vecs) {
1204 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1205 desc->suite.cipher.enc.count);
1206 if (err)
1207 goto out;
1208 }
1209
1210 if (desc->suite.cipher.dec.vecs)
1211 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1212 desc->suite.cipher.dec.count);
1213
1214out:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001215 crypto_free_cipher(tfm);
1216 return err;
1217}
1218
1219static int alg_test_skcipher(const struct alg_test_desc *desc,
1220 const char *driver, u32 type, u32 mask)
1221{
1222 struct crypto_ablkcipher *tfm;
1223 int err = 0;
1224
1225 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1226 if (IS_ERR(tfm)) {
1227 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1228 "%s: %ld\n", driver, PTR_ERR(tfm));
1229 return PTR_ERR(tfm);
1230 }
1231
1232 if (desc->suite.cipher.enc.vecs) {
1233 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1234 desc->suite.cipher.enc.count);
1235 if (err)
1236 goto out;
1237 }
1238
1239 if (desc->suite.cipher.dec.vecs)
1240 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1241 desc->suite.cipher.dec.count);
1242
1243out:
Herbert Xuda7f0332008-07-31 17:08:25 +08001244 crypto_free_ablkcipher(tfm);
1245 return err;
1246}
1247
1248static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1249 u32 type, u32 mask)
1250{
1251 struct crypto_comp *tfm;
1252 int err;
1253
1254 tfm = crypto_alloc_comp(driver, type, mask);
1255 if (IS_ERR(tfm)) {
1256 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1257 "%ld\n", driver, PTR_ERR(tfm));
1258 return PTR_ERR(tfm);
1259 }
1260
1261 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1262 desc->suite.comp.decomp.vecs,
1263 desc->suite.comp.comp.count,
1264 desc->suite.comp.decomp.count);
1265
1266 crypto_free_comp(tfm);
1267 return err;
1268}
1269
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +08001270static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1271 u32 type, u32 mask)
1272{
1273 struct crypto_pcomp *tfm;
1274 int err;
1275
1276 tfm = crypto_alloc_pcomp(driver, type, mask);
1277 if (IS_ERR(tfm)) {
1278 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1279 driver, PTR_ERR(tfm));
1280 return PTR_ERR(tfm);
1281 }
1282
1283 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1284 desc->suite.pcomp.decomp.vecs,
1285 desc->suite.pcomp.comp.count,
1286 desc->suite.pcomp.decomp.count);
1287
1288 crypto_free_pcomp(tfm);
1289 return err;
1290}
1291
Herbert Xuda7f0332008-07-31 17:08:25 +08001292static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1293 u32 type, u32 mask)
1294{
1295 struct crypto_ahash *tfm;
1296 int err;
1297
1298 tfm = crypto_alloc_ahash(driver, type, mask);
1299 if (IS_ERR(tfm)) {
1300 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1301 "%ld\n", driver, PTR_ERR(tfm));
1302 return PTR_ERR(tfm);
1303 }
1304
1305 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1306
1307 crypto_free_ahash(tfm);
1308 return err;
1309}
1310
Herbert Xu8e3ee852008-11-07 14:58:52 +08001311static int alg_test_crc32c(const struct alg_test_desc *desc,
1312 const char *driver, u32 type, u32 mask)
1313{
1314 struct crypto_shash *tfm;
1315 u32 val;
1316 int err;
1317
1318 err = alg_test_hash(desc, driver, type, mask);
1319 if (err)
1320 goto out;
1321
1322 tfm = crypto_alloc_shash(driver, type, mask);
1323 if (IS_ERR(tfm)) {
1324 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1325 "%ld\n", driver, PTR_ERR(tfm));
1326 err = PTR_ERR(tfm);
1327 goto out;
1328 }
1329
1330 do {
1331 struct {
1332 struct shash_desc shash;
1333 char ctx[crypto_shash_descsize(tfm)];
1334 } sdesc;
1335
1336 sdesc.shash.tfm = tfm;
1337 sdesc.shash.flags = 0;
1338
1339 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1340 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1341 if (err) {
1342 printk(KERN_ERR "alg: crc32c: Operation failed for "
1343 "%s: %d\n", driver, err);
1344 break;
1345 }
1346
1347 if (val != ~420553207) {
1348 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1349 "%d\n", driver, val);
1350 err = -EINVAL;
1351 }
1352 } while (0);
1353
1354 crypto_free_shash(tfm);
1355
1356out:
1357 return err;
1358}
1359
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001360static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1361 u32 type, u32 mask)
1362{
1363 struct crypto_rng *rng;
1364 int err;
1365
1366 rng = crypto_alloc_rng(driver, type, mask);
1367 if (IS_ERR(rng)) {
1368 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1369 "%ld\n", driver, PTR_ERR(rng));
1370 return PTR_ERR(rng);
1371 }
1372
1373 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1374
1375 crypto_free_rng(rng);
1376
1377 return err;
1378}
1379
Herbert Xuda7f0332008-07-31 17:08:25 +08001380/* Please keep this list sorted by algorithm name. */
1381static const struct alg_test_desc alg_test_descs[] = {
1382 {
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001383 .alg = "ansi_cprng",
1384 .test = alg_test_cprng,
1385 .suite = {
1386 .cprng = {
1387 .vecs = ansi_cprng_aes_tv_template,
1388 .count = ANSI_CPRNG_AES_TEST_VECTORS
1389 }
1390 }
1391 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08001392 .alg = "cbc(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001393 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001394 .suite = {
1395 .cipher = {
1396 .enc = {
1397 .vecs = aes_cbc_enc_tv_template,
1398 .count = AES_CBC_ENC_TEST_VECTORS
1399 },
1400 .dec = {
1401 .vecs = aes_cbc_dec_tv_template,
1402 .count = AES_CBC_DEC_TEST_VECTORS
1403 }
1404 }
1405 }
1406 }, {
1407 .alg = "cbc(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001408 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001409 .suite = {
1410 .cipher = {
1411 .enc = {
1412 .vecs = anubis_cbc_enc_tv_template,
1413 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1414 },
1415 .dec = {
1416 .vecs = anubis_cbc_dec_tv_template,
1417 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1418 }
1419 }
1420 }
1421 }, {
1422 .alg = "cbc(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001423 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001424 .suite = {
1425 .cipher = {
1426 .enc = {
1427 .vecs = bf_cbc_enc_tv_template,
1428 .count = BF_CBC_ENC_TEST_VECTORS
1429 },
1430 .dec = {
1431 .vecs = bf_cbc_dec_tv_template,
1432 .count = BF_CBC_DEC_TEST_VECTORS
1433 }
1434 }
1435 }
1436 }, {
1437 .alg = "cbc(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001438 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001439 .suite = {
1440 .cipher = {
1441 .enc = {
1442 .vecs = camellia_cbc_enc_tv_template,
1443 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1444 },
1445 .dec = {
1446 .vecs = camellia_cbc_dec_tv_template,
1447 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1448 }
1449 }
1450 }
1451 }, {
1452 .alg = "cbc(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001453 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001454 .suite = {
1455 .cipher = {
1456 .enc = {
1457 .vecs = des_cbc_enc_tv_template,
1458 .count = DES_CBC_ENC_TEST_VECTORS
1459 },
1460 .dec = {
1461 .vecs = des_cbc_dec_tv_template,
1462 .count = DES_CBC_DEC_TEST_VECTORS
1463 }
1464 }
1465 }
1466 }, {
1467 .alg = "cbc(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001468 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001469 .suite = {
1470 .cipher = {
1471 .enc = {
1472 .vecs = des3_ede_cbc_enc_tv_template,
1473 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1474 },
1475 .dec = {
1476 .vecs = des3_ede_cbc_dec_tv_template,
1477 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1478 }
1479 }
1480 }
1481 }, {
1482 .alg = "cbc(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001483 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001484 .suite = {
1485 .cipher = {
1486 .enc = {
1487 .vecs = tf_cbc_enc_tv_template,
1488 .count = TF_CBC_ENC_TEST_VECTORS
1489 },
1490 .dec = {
1491 .vecs = tf_cbc_dec_tv_template,
1492 .count = TF_CBC_DEC_TEST_VECTORS
1493 }
1494 }
1495 }
1496 }, {
1497 .alg = "ccm(aes)",
1498 .test = alg_test_aead,
1499 .suite = {
1500 .aead = {
1501 .enc = {
1502 .vecs = aes_ccm_enc_tv_template,
1503 .count = AES_CCM_ENC_TEST_VECTORS
1504 },
1505 .dec = {
1506 .vecs = aes_ccm_dec_tv_template,
1507 .count = AES_CCM_DEC_TEST_VECTORS
1508 }
1509 }
1510 }
1511 }, {
1512 .alg = "crc32c",
Herbert Xu8e3ee852008-11-07 14:58:52 +08001513 .test = alg_test_crc32c,
Herbert Xuda7f0332008-07-31 17:08:25 +08001514 .suite = {
1515 .hash = {
1516 .vecs = crc32c_tv_template,
1517 .count = CRC32C_TEST_VECTORS
1518 }
1519 }
1520 }, {
1521 .alg = "cts(cbc(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001522 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001523 .suite = {
1524 .cipher = {
1525 .enc = {
1526 .vecs = cts_mode_enc_tv_template,
1527 .count = CTS_MODE_ENC_TEST_VECTORS
1528 },
1529 .dec = {
1530 .vecs = cts_mode_dec_tv_template,
1531 .count = CTS_MODE_DEC_TEST_VECTORS
1532 }
1533 }
1534 }
1535 }, {
1536 .alg = "deflate",
1537 .test = alg_test_comp,
1538 .suite = {
1539 .comp = {
1540 .comp = {
1541 .vecs = deflate_comp_tv_template,
1542 .count = DEFLATE_COMP_TEST_VECTORS
1543 },
1544 .decomp = {
1545 .vecs = deflate_decomp_tv_template,
1546 .count = DEFLATE_DECOMP_TEST_VECTORS
1547 }
1548 }
1549 }
1550 }, {
1551 .alg = "ecb(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001552 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001553 .suite = {
1554 .cipher = {
1555 .enc = {
1556 .vecs = aes_enc_tv_template,
1557 .count = AES_ENC_TEST_VECTORS
1558 },
1559 .dec = {
1560 .vecs = aes_dec_tv_template,
1561 .count = AES_DEC_TEST_VECTORS
1562 }
1563 }
1564 }
1565 }, {
1566 .alg = "ecb(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001567 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001568 .suite = {
1569 .cipher = {
1570 .enc = {
1571 .vecs = anubis_enc_tv_template,
1572 .count = ANUBIS_ENC_TEST_VECTORS
1573 },
1574 .dec = {
1575 .vecs = anubis_dec_tv_template,
1576 .count = ANUBIS_DEC_TEST_VECTORS
1577 }
1578 }
1579 }
1580 }, {
1581 .alg = "ecb(arc4)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001582 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001583 .suite = {
1584 .cipher = {
1585 .enc = {
1586 .vecs = arc4_enc_tv_template,
1587 .count = ARC4_ENC_TEST_VECTORS
1588 },
1589 .dec = {
1590 .vecs = arc4_dec_tv_template,
1591 .count = ARC4_DEC_TEST_VECTORS
1592 }
1593 }
1594 }
1595 }, {
1596 .alg = "ecb(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001597 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001598 .suite = {
1599 .cipher = {
1600 .enc = {
1601 .vecs = bf_enc_tv_template,
1602 .count = BF_ENC_TEST_VECTORS
1603 },
1604 .dec = {
1605 .vecs = bf_dec_tv_template,
1606 .count = BF_DEC_TEST_VECTORS
1607 }
1608 }
1609 }
1610 }, {
1611 .alg = "ecb(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001612 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001613 .suite = {
1614 .cipher = {
1615 .enc = {
1616 .vecs = camellia_enc_tv_template,
1617 .count = CAMELLIA_ENC_TEST_VECTORS
1618 },
1619 .dec = {
1620 .vecs = camellia_dec_tv_template,
1621 .count = CAMELLIA_DEC_TEST_VECTORS
1622 }
1623 }
1624 }
1625 }, {
1626 .alg = "ecb(cast5)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001627 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001628 .suite = {
1629 .cipher = {
1630 .enc = {
1631 .vecs = cast5_enc_tv_template,
1632 .count = CAST5_ENC_TEST_VECTORS
1633 },
1634 .dec = {
1635 .vecs = cast5_dec_tv_template,
1636 .count = CAST5_DEC_TEST_VECTORS
1637 }
1638 }
1639 }
1640 }, {
1641 .alg = "ecb(cast6)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001642 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001643 .suite = {
1644 .cipher = {
1645 .enc = {
1646 .vecs = cast6_enc_tv_template,
1647 .count = CAST6_ENC_TEST_VECTORS
1648 },
1649 .dec = {
1650 .vecs = cast6_dec_tv_template,
1651 .count = CAST6_DEC_TEST_VECTORS
1652 }
1653 }
1654 }
1655 }, {
1656 .alg = "ecb(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001657 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001658 .suite = {
1659 .cipher = {
1660 .enc = {
1661 .vecs = des_enc_tv_template,
1662 .count = DES_ENC_TEST_VECTORS
1663 },
1664 .dec = {
1665 .vecs = des_dec_tv_template,
1666 .count = DES_DEC_TEST_VECTORS
1667 }
1668 }
1669 }
1670 }, {
1671 .alg = "ecb(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001672 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001673 .suite = {
1674 .cipher = {
1675 .enc = {
1676 .vecs = des3_ede_enc_tv_template,
1677 .count = DES3_EDE_ENC_TEST_VECTORS
1678 },
1679 .dec = {
1680 .vecs = des3_ede_dec_tv_template,
1681 .count = DES3_EDE_DEC_TEST_VECTORS
1682 }
1683 }
1684 }
1685 }, {
1686 .alg = "ecb(khazad)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001687 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001688 .suite = {
1689 .cipher = {
1690 .enc = {
1691 .vecs = khazad_enc_tv_template,
1692 .count = KHAZAD_ENC_TEST_VECTORS
1693 },
1694 .dec = {
1695 .vecs = khazad_dec_tv_template,
1696 .count = KHAZAD_DEC_TEST_VECTORS
1697 }
1698 }
1699 }
1700 }, {
1701 .alg = "ecb(seed)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001702 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001703 .suite = {
1704 .cipher = {
1705 .enc = {
1706 .vecs = seed_enc_tv_template,
1707 .count = SEED_ENC_TEST_VECTORS
1708 },
1709 .dec = {
1710 .vecs = seed_dec_tv_template,
1711 .count = SEED_DEC_TEST_VECTORS
1712 }
1713 }
1714 }
1715 }, {
1716 .alg = "ecb(serpent)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001717 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001718 .suite = {
1719 .cipher = {
1720 .enc = {
1721 .vecs = serpent_enc_tv_template,
1722 .count = SERPENT_ENC_TEST_VECTORS
1723 },
1724 .dec = {
1725 .vecs = serpent_dec_tv_template,
1726 .count = SERPENT_DEC_TEST_VECTORS
1727 }
1728 }
1729 }
1730 }, {
1731 .alg = "ecb(tea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001732 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001733 .suite = {
1734 .cipher = {
1735 .enc = {
1736 .vecs = tea_enc_tv_template,
1737 .count = TEA_ENC_TEST_VECTORS
1738 },
1739 .dec = {
1740 .vecs = tea_dec_tv_template,
1741 .count = TEA_DEC_TEST_VECTORS
1742 }
1743 }
1744 }
1745 }, {
1746 .alg = "ecb(tnepres)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001747 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001748 .suite = {
1749 .cipher = {
1750 .enc = {
1751 .vecs = tnepres_enc_tv_template,
1752 .count = TNEPRES_ENC_TEST_VECTORS
1753 },
1754 .dec = {
1755 .vecs = tnepres_dec_tv_template,
1756 .count = TNEPRES_DEC_TEST_VECTORS
1757 }
1758 }
1759 }
1760 }, {
1761 .alg = "ecb(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001762 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001763 .suite = {
1764 .cipher = {
1765 .enc = {
1766 .vecs = tf_enc_tv_template,
1767 .count = TF_ENC_TEST_VECTORS
1768 },
1769 .dec = {
1770 .vecs = tf_dec_tv_template,
1771 .count = TF_DEC_TEST_VECTORS
1772 }
1773 }
1774 }
1775 }, {
1776 .alg = "ecb(xeta)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001777 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001778 .suite = {
1779 .cipher = {
1780 .enc = {
1781 .vecs = xeta_enc_tv_template,
1782 .count = XETA_ENC_TEST_VECTORS
1783 },
1784 .dec = {
1785 .vecs = xeta_dec_tv_template,
1786 .count = XETA_DEC_TEST_VECTORS
1787 }
1788 }
1789 }
1790 }, {
1791 .alg = "ecb(xtea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001792 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001793 .suite = {
1794 .cipher = {
1795 .enc = {
1796 .vecs = xtea_enc_tv_template,
1797 .count = XTEA_ENC_TEST_VECTORS
1798 },
1799 .dec = {
1800 .vecs = xtea_dec_tv_template,
1801 .count = XTEA_DEC_TEST_VECTORS
1802 }
1803 }
1804 }
1805 }, {
1806 .alg = "gcm(aes)",
1807 .test = alg_test_aead,
1808 .suite = {
1809 .aead = {
1810 .enc = {
1811 .vecs = aes_gcm_enc_tv_template,
1812 .count = AES_GCM_ENC_TEST_VECTORS
1813 },
1814 .dec = {
1815 .vecs = aes_gcm_dec_tv_template,
1816 .count = AES_GCM_DEC_TEST_VECTORS
1817 }
1818 }
1819 }
1820 }, {
1821 .alg = "hmac(md5)",
1822 .test = alg_test_hash,
1823 .suite = {
1824 .hash = {
1825 .vecs = hmac_md5_tv_template,
1826 .count = HMAC_MD5_TEST_VECTORS
1827 }
1828 }
1829 }, {
1830 .alg = "hmac(rmd128)",
1831 .test = alg_test_hash,
1832 .suite = {
1833 .hash = {
1834 .vecs = hmac_rmd128_tv_template,
1835 .count = HMAC_RMD128_TEST_VECTORS
1836 }
1837 }
1838 }, {
1839 .alg = "hmac(rmd160)",
1840 .test = alg_test_hash,
1841 .suite = {
1842 .hash = {
1843 .vecs = hmac_rmd160_tv_template,
1844 .count = HMAC_RMD160_TEST_VECTORS
1845 }
1846 }
1847 }, {
1848 .alg = "hmac(sha1)",
1849 .test = alg_test_hash,
1850 .suite = {
1851 .hash = {
1852 .vecs = hmac_sha1_tv_template,
1853 .count = HMAC_SHA1_TEST_VECTORS
1854 }
1855 }
1856 }, {
1857 .alg = "hmac(sha224)",
1858 .test = alg_test_hash,
1859 .suite = {
1860 .hash = {
1861 .vecs = hmac_sha224_tv_template,
1862 .count = HMAC_SHA224_TEST_VECTORS
1863 }
1864 }
1865 }, {
1866 .alg = "hmac(sha256)",
1867 .test = alg_test_hash,
1868 .suite = {
1869 .hash = {
1870 .vecs = hmac_sha256_tv_template,
1871 .count = HMAC_SHA256_TEST_VECTORS
1872 }
1873 }
1874 }, {
1875 .alg = "hmac(sha384)",
1876 .test = alg_test_hash,
1877 .suite = {
1878 .hash = {
1879 .vecs = hmac_sha384_tv_template,
1880 .count = HMAC_SHA384_TEST_VECTORS
1881 }
1882 }
1883 }, {
1884 .alg = "hmac(sha512)",
1885 .test = alg_test_hash,
1886 .suite = {
1887 .hash = {
1888 .vecs = hmac_sha512_tv_template,
1889 .count = HMAC_SHA512_TEST_VECTORS
1890 }
1891 }
1892 }, {
1893 .alg = "lrw(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001894 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001895 .suite = {
1896 .cipher = {
1897 .enc = {
1898 .vecs = aes_lrw_enc_tv_template,
1899 .count = AES_LRW_ENC_TEST_VECTORS
1900 },
1901 .dec = {
1902 .vecs = aes_lrw_dec_tv_template,
1903 .count = AES_LRW_DEC_TEST_VECTORS
1904 }
1905 }
1906 }
1907 }, {
1908 .alg = "lzo",
1909 .test = alg_test_comp,
1910 .suite = {
1911 .comp = {
1912 .comp = {
1913 .vecs = lzo_comp_tv_template,
1914 .count = LZO_COMP_TEST_VECTORS
1915 },
1916 .decomp = {
1917 .vecs = lzo_decomp_tv_template,
1918 .count = LZO_DECOMP_TEST_VECTORS
1919 }
1920 }
1921 }
1922 }, {
1923 .alg = "md4",
1924 .test = alg_test_hash,
1925 .suite = {
1926 .hash = {
1927 .vecs = md4_tv_template,
1928 .count = MD4_TEST_VECTORS
1929 }
1930 }
1931 }, {
1932 .alg = "md5",
1933 .test = alg_test_hash,
1934 .suite = {
1935 .hash = {
1936 .vecs = md5_tv_template,
1937 .count = MD5_TEST_VECTORS
1938 }
1939 }
1940 }, {
1941 .alg = "michael_mic",
1942 .test = alg_test_hash,
1943 .suite = {
1944 .hash = {
1945 .vecs = michael_mic_tv_template,
1946 .count = MICHAEL_MIC_TEST_VECTORS
1947 }
1948 }
1949 }, {
1950 .alg = "pcbc(fcrypt)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001951 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001952 .suite = {
1953 .cipher = {
1954 .enc = {
1955 .vecs = fcrypt_pcbc_enc_tv_template,
1956 .count = FCRYPT_ENC_TEST_VECTORS
1957 },
1958 .dec = {
1959 .vecs = fcrypt_pcbc_dec_tv_template,
1960 .count = FCRYPT_DEC_TEST_VECTORS
1961 }
1962 }
1963 }
1964 }, {
1965 .alg = "rfc3686(ctr(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001966 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001967 .suite = {
1968 .cipher = {
1969 .enc = {
1970 .vecs = aes_ctr_enc_tv_template,
1971 .count = AES_CTR_ENC_TEST_VECTORS
1972 },
1973 .dec = {
1974 .vecs = aes_ctr_dec_tv_template,
1975 .count = AES_CTR_DEC_TEST_VECTORS
1976 }
1977 }
1978 }
1979 }, {
Jarod Wilson5d667322009-05-04 19:23:40 +08001980 .alg = "rfc4309(ccm(aes))",
1981 .test = alg_test_aead,
1982 .suite = {
1983 .aead = {
1984 .enc = {
1985 .vecs = aes_ccm_rfc4309_enc_tv_template,
1986 .count = AES_CCM_4309_ENC_TEST_VECTORS
1987 },
1988 .dec = {
1989 .vecs = aes_ccm_rfc4309_dec_tv_template,
1990 .count = AES_CCM_4309_DEC_TEST_VECTORS
1991 }
1992 }
1993 }
1994 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08001995 .alg = "rmd128",
1996 .test = alg_test_hash,
1997 .suite = {
1998 .hash = {
1999 .vecs = rmd128_tv_template,
2000 .count = RMD128_TEST_VECTORS
2001 }
2002 }
2003 }, {
2004 .alg = "rmd160",
2005 .test = alg_test_hash,
2006 .suite = {
2007 .hash = {
2008 .vecs = rmd160_tv_template,
2009 .count = RMD160_TEST_VECTORS
2010 }
2011 }
2012 }, {
2013 .alg = "rmd256",
2014 .test = alg_test_hash,
2015 .suite = {
2016 .hash = {
2017 .vecs = rmd256_tv_template,
2018 .count = RMD256_TEST_VECTORS
2019 }
2020 }
2021 }, {
2022 .alg = "rmd320",
2023 .test = alg_test_hash,
2024 .suite = {
2025 .hash = {
2026 .vecs = rmd320_tv_template,
2027 .count = RMD320_TEST_VECTORS
2028 }
2029 }
2030 }, {
2031 .alg = "salsa20",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002032 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002033 .suite = {
2034 .cipher = {
2035 .enc = {
2036 .vecs = salsa20_stream_enc_tv_template,
2037 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2038 }
2039 }
2040 }
2041 }, {
2042 .alg = "sha1",
2043 .test = alg_test_hash,
2044 .suite = {
2045 .hash = {
2046 .vecs = sha1_tv_template,
2047 .count = SHA1_TEST_VECTORS
2048 }
2049 }
2050 }, {
2051 .alg = "sha224",
2052 .test = alg_test_hash,
2053 .suite = {
2054 .hash = {
2055 .vecs = sha224_tv_template,
2056 .count = SHA224_TEST_VECTORS
2057 }
2058 }
2059 }, {
2060 .alg = "sha256",
2061 .test = alg_test_hash,
2062 .suite = {
2063 .hash = {
2064 .vecs = sha256_tv_template,
2065 .count = SHA256_TEST_VECTORS
2066 }
2067 }
2068 }, {
2069 .alg = "sha384",
2070 .test = alg_test_hash,
2071 .suite = {
2072 .hash = {
2073 .vecs = sha384_tv_template,
2074 .count = SHA384_TEST_VECTORS
2075 }
2076 }
2077 }, {
2078 .alg = "sha512",
2079 .test = alg_test_hash,
2080 .suite = {
2081 .hash = {
2082 .vecs = sha512_tv_template,
2083 .count = SHA512_TEST_VECTORS
2084 }
2085 }
2086 }, {
2087 .alg = "tgr128",
2088 .test = alg_test_hash,
2089 .suite = {
2090 .hash = {
2091 .vecs = tgr128_tv_template,
2092 .count = TGR128_TEST_VECTORS
2093 }
2094 }
2095 }, {
2096 .alg = "tgr160",
2097 .test = alg_test_hash,
2098 .suite = {
2099 .hash = {
2100 .vecs = tgr160_tv_template,
2101 .count = TGR160_TEST_VECTORS
2102 }
2103 }
2104 }, {
2105 .alg = "tgr192",
2106 .test = alg_test_hash,
2107 .suite = {
2108 .hash = {
2109 .vecs = tgr192_tv_template,
2110 .count = TGR192_TEST_VECTORS
2111 }
2112 }
2113 }, {
2114 .alg = "wp256",
2115 .test = alg_test_hash,
2116 .suite = {
2117 .hash = {
2118 .vecs = wp256_tv_template,
2119 .count = WP256_TEST_VECTORS
2120 }
2121 }
2122 }, {
2123 .alg = "wp384",
2124 .test = alg_test_hash,
2125 .suite = {
2126 .hash = {
2127 .vecs = wp384_tv_template,
2128 .count = WP384_TEST_VECTORS
2129 }
2130 }
2131 }, {
2132 .alg = "wp512",
2133 .test = alg_test_hash,
2134 .suite = {
2135 .hash = {
2136 .vecs = wp512_tv_template,
2137 .count = WP512_TEST_VECTORS
2138 }
2139 }
2140 }, {
2141 .alg = "xcbc(aes)",
2142 .test = alg_test_hash,
2143 .suite = {
2144 .hash = {
2145 .vecs = aes_xcbc128_tv_template,
2146 .count = XCBC_AES_TEST_VECTORS
2147 }
2148 }
2149 }, {
2150 .alg = "xts(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002151 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002152 .suite = {
2153 .cipher = {
2154 .enc = {
2155 .vecs = aes_xts_enc_tv_template,
2156 .count = AES_XTS_ENC_TEST_VECTORS
2157 },
2158 .dec = {
2159 .vecs = aes_xts_dec_tv_template,
2160 .count = AES_XTS_DEC_TEST_VECTORS
2161 }
2162 }
2163 }
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08002164 }, {
2165 .alg = "zlib",
2166 .test = alg_test_pcomp,
2167 .suite = {
2168 .pcomp = {
2169 .comp = {
2170 .vecs = zlib_comp_tv_template,
2171 .count = ZLIB_COMP_TEST_VECTORS
2172 },
2173 .decomp = {
2174 .vecs = zlib_decomp_tv_template,
2175 .count = ZLIB_DECOMP_TEST_VECTORS
2176 }
2177 }
2178 }
Herbert Xuda7f0332008-07-31 17:08:25 +08002179 }
2180};
2181
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002182static int alg_find_test(const char *alg)
Herbert Xuda7f0332008-07-31 17:08:25 +08002183{
2184 int start = 0;
2185 int end = ARRAY_SIZE(alg_test_descs);
2186
2187 while (start < end) {
2188 int i = (start + end) / 2;
2189 int diff = strcmp(alg_test_descs[i].alg, alg);
2190
2191 if (diff > 0) {
2192 end = i;
2193 continue;
2194 }
2195
2196 if (diff < 0) {
2197 start = i + 1;
2198 continue;
2199 }
2200
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002201 return i;
Herbert Xuda7f0332008-07-31 17:08:25 +08002202 }
2203
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002204 return -1;
2205}
2206
2207int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2208{
2209 int i;
Neil Hormand12d6b62008-10-12 20:36:51 +08002210 int rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002211
2212 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2213 char nalg[CRYPTO_MAX_ALG_NAME];
2214
2215 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2216 sizeof(nalg))
2217 return -ENAMETOOLONG;
2218
2219 i = alg_find_test(nalg);
2220 if (i < 0)
2221 goto notest;
2222
2223 return alg_test_cipher(alg_test_descs + i, driver, type, mask);
2224 }
2225
2226 i = alg_find_test(alg);
2227 if (i < 0)
2228 goto notest;
2229
Neil Hormand12d6b62008-10-12 20:36:51 +08002230 rc = alg_test_descs[i].test(alg_test_descs + i, driver,
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002231 type, mask);
Neil Hormand12d6b62008-10-12 20:36:51 +08002232 if (fips_enabled && rc)
2233 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2234
2235 return rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002236
2237notest:
Herbert Xuda7f0332008-07-31 17:08:25 +08002238 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2239 return 0;
2240}
2241EXPORT_SYMBOL_GPL(alg_test);
2242
2243int __init testmgr_init(void)
2244{
2245 int i;
2246
2247 for (i = 0; i < XBUFSIZE; i++) {
2248 xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2249 if (!xbuf[i])
2250 goto err_free_xbuf;
2251 }
2252
2253 for (i = 0; i < XBUFSIZE; i++) {
2254 axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2255 if (!axbuf[i])
2256 goto err_free_axbuf;
2257 }
2258
2259 return 0;
2260
2261err_free_axbuf:
2262 for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
2263 free_page((unsigned long)axbuf[i]);
2264err_free_xbuf:
2265 for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
2266 free_page((unsigned long)xbuf[i]);
2267
2268 return -ENOMEM;
2269}
2270
2271void testmgr_exit(void)
2272{
2273 int i;
2274
2275 for (i = 0; i < XBUFSIZE; i++)
2276 free_page((unsigned long)axbuf[i]);
2277 for (i = 0; i < XBUFSIZE; i++)
2278 free_page((unsigned long)xbuf[i]);
2279}