blob: adc54cfd39dffb07d6160d8ea78b5b33cd288834 [file] [log] [blame]
Herbert Xuda7f0332008-07-31 17:08:25 +08001/*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <crypto/hash.h>
17#include <linux/err.h>
18#include <linux/module.h>
19#include <linux/scatterlist.h>
20#include <linux/slab.h>
21#include <linux/string.h>
Jarod Wilson7647d6c2009-05-04 19:44:50 +080022#include <crypto/rng.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080023
24#include "internal.h"
25#include "testmgr.h"
26
27/*
28 * Need slab memory for testing (size in number of pages).
29 */
30#define XBUFSIZE 8
31
32/*
33 * Indexes into the xbuf to simulate cross-page access.
34 */
35#define IDX1 32
36#define IDX2 32400
37#define IDX3 1
38#define IDX4 8193
39#define IDX5 22222
40#define IDX6 17101
41#define IDX7 27333
42#define IDX8 3000
43
44/*
45* Used by test_cipher()
46*/
47#define ENCRYPT 1
48#define DECRYPT 0
49
50struct tcrypt_result {
51 struct completion completion;
52 int err;
53};
54
55struct aead_test_suite {
56 struct {
57 struct aead_testvec *vecs;
58 unsigned int count;
59 } enc, dec;
60};
61
62struct cipher_test_suite {
63 struct {
64 struct cipher_testvec *vecs;
65 unsigned int count;
66 } enc, dec;
67};
68
69struct comp_test_suite {
70 struct {
71 struct comp_testvec *vecs;
72 unsigned int count;
73 } comp, decomp;
74};
75
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +080076struct pcomp_test_suite {
77 struct {
78 struct pcomp_testvec *vecs;
79 unsigned int count;
80 } comp, decomp;
81};
82
Herbert Xuda7f0332008-07-31 17:08:25 +080083struct hash_test_suite {
84 struct hash_testvec *vecs;
85 unsigned int count;
86};
87
Jarod Wilson7647d6c2009-05-04 19:44:50 +080088struct cprng_test_suite {
89 struct cprng_testvec *vecs;
90 unsigned int count;
91};
92
Herbert Xuda7f0332008-07-31 17:08:25 +080093struct alg_test_desc {
94 const char *alg;
95 int (*test)(const struct alg_test_desc *desc, const char *driver,
96 u32 type, u32 mask);
97
98 union {
99 struct aead_test_suite aead;
100 struct cipher_test_suite cipher;
101 struct comp_test_suite comp;
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +0800102 struct pcomp_test_suite pcomp;
Herbert Xuda7f0332008-07-31 17:08:25 +0800103 struct hash_test_suite hash;
Jarod Wilson7647d6c2009-05-04 19:44:50 +0800104 struct cprng_test_suite cprng;
Herbert Xuda7f0332008-07-31 17:08:25 +0800105 } suite;
106};
107
108static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
109
110static char *xbuf[XBUFSIZE];
111static char *axbuf[XBUFSIZE];
112
113static void hexdump(unsigned char *buf, unsigned int len)
114{
115 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
116 16, 1,
117 buf, len, false);
118}
119
120static void tcrypt_complete(struct crypto_async_request *req, int err)
121{
122 struct tcrypt_result *res = req->data;
123
124 if (err == -EINPROGRESS)
125 return;
126
127 res->err = err;
128 complete(&res->completion);
129}
130
131static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
132 unsigned int tcount)
133{
134 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
135 unsigned int i, j, k, temp;
136 struct scatterlist sg[8];
137 char result[64];
138 struct ahash_request *req;
139 struct tcrypt_result tresult;
140 int ret;
141 void *hash_buff;
142
143 init_completion(&tresult.completion);
144
145 req = ahash_request_alloc(tfm, GFP_KERNEL);
146 if (!req) {
147 printk(KERN_ERR "alg: hash: Failed to allocate request for "
148 "%s\n", algo);
149 ret = -ENOMEM;
150 goto out_noreq;
151 }
152 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
153 tcrypt_complete, &tresult);
154
155 for (i = 0; i < tcount; i++) {
156 memset(result, 0, 64);
157
158 hash_buff = xbuf[0];
159
160 memcpy(hash_buff, template[i].plaintext, template[i].psize);
161 sg_init_one(&sg[0], hash_buff, template[i].psize);
162
163 if (template[i].ksize) {
164 crypto_ahash_clear_flags(tfm, ~0);
165 ret = crypto_ahash_setkey(tfm, template[i].key,
166 template[i].ksize);
167 if (ret) {
168 printk(KERN_ERR "alg: hash: setkey failed on "
169 "test %d for %s: ret=%d\n", i + 1, algo,
170 -ret);
171 goto out;
172 }
173 }
174
175 ahash_request_set_crypt(req, sg, result, template[i].psize);
176 ret = crypto_ahash_digest(req);
177 switch (ret) {
178 case 0:
179 break;
180 case -EINPROGRESS:
181 case -EBUSY:
182 ret = wait_for_completion_interruptible(
183 &tresult.completion);
184 if (!ret && !(ret = tresult.err)) {
185 INIT_COMPLETION(tresult.completion);
186 break;
187 }
188 /* fall through */
189 default:
190 printk(KERN_ERR "alg: hash: digest failed on test %d "
191 "for %s: ret=%d\n", i + 1, algo, -ret);
192 goto out;
193 }
194
195 if (memcmp(result, template[i].digest,
196 crypto_ahash_digestsize(tfm))) {
197 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
198 i + 1, algo);
199 hexdump(result, crypto_ahash_digestsize(tfm));
200 ret = -EINVAL;
201 goto out;
202 }
203 }
204
205 j = 0;
206 for (i = 0; i < tcount; i++) {
207 if (template[i].np) {
208 j++;
209 memset(result, 0, 64);
210
211 temp = 0;
212 sg_init_table(sg, template[i].np);
213 for (k = 0; k < template[i].np; k++) {
214 sg_set_buf(&sg[k],
215 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
216 offset_in_page(IDX[k]),
217 template[i].plaintext + temp,
218 template[i].tap[k]),
219 template[i].tap[k]);
220 temp += template[i].tap[k];
221 }
222
223 if (template[i].ksize) {
224 crypto_ahash_clear_flags(tfm, ~0);
225 ret = crypto_ahash_setkey(tfm, template[i].key,
226 template[i].ksize);
227
228 if (ret) {
229 printk(KERN_ERR "alg: hash: setkey "
230 "failed on chunking test %d "
231 "for %s: ret=%d\n", j, algo,
232 -ret);
233 goto out;
234 }
235 }
236
237 ahash_request_set_crypt(req, sg, result,
238 template[i].psize);
239 ret = crypto_ahash_digest(req);
240 switch (ret) {
241 case 0:
242 break;
243 case -EINPROGRESS:
244 case -EBUSY:
245 ret = wait_for_completion_interruptible(
246 &tresult.completion);
247 if (!ret && !(ret = tresult.err)) {
248 INIT_COMPLETION(tresult.completion);
249 break;
250 }
251 /* fall through */
252 default:
253 printk(KERN_ERR "alg: hash: digest failed "
254 "on chunking test %d for %s: "
255 "ret=%d\n", j, algo, -ret);
256 goto out;
257 }
258
259 if (memcmp(result, template[i].digest,
260 crypto_ahash_digestsize(tfm))) {
261 printk(KERN_ERR "alg: hash: Chunking test %d "
262 "failed for %s\n", j, algo);
263 hexdump(result, crypto_ahash_digestsize(tfm));
264 ret = -EINVAL;
265 goto out;
266 }
267 }
268 }
269
270 ret = 0;
271
272out:
273 ahash_request_free(req);
274out_noreq:
275 return ret;
276}
277
278static int test_aead(struct crypto_aead *tfm, int enc,
279 struct aead_testvec *template, unsigned int tcount)
280{
281 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
282 unsigned int i, j, k, n, temp;
283 int ret = 0;
284 char *q;
285 char *key;
286 struct aead_request *req;
287 struct scatterlist sg[8];
288 struct scatterlist asg[8];
289 const char *e;
290 struct tcrypt_result result;
291 unsigned int authsize;
292 void *input;
293 void *assoc;
294 char iv[MAX_IVLEN];
295
296 if (enc == ENCRYPT)
297 e = "encryption";
298 else
299 e = "decryption";
300
301 init_completion(&result.completion);
302
303 req = aead_request_alloc(tfm, GFP_KERNEL);
304 if (!req) {
305 printk(KERN_ERR "alg: aead: Failed to allocate request for "
306 "%s\n", algo);
307 ret = -ENOMEM;
308 goto out;
309 }
310
311 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
312 tcrypt_complete, &result);
313
314 for (i = 0, j = 0; i < tcount; i++) {
315 if (!template[i].np) {
316 j++;
317
318 /* some tepmplates have no input data but they will
319 * touch input
320 */
321 input = xbuf[0];
322 assoc = axbuf[0];
323
324 memcpy(input, template[i].input, template[i].ilen);
325 memcpy(assoc, template[i].assoc, template[i].alen);
326 if (template[i].iv)
327 memcpy(iv, template[i].iv, MAX_IVLEN);
328 else
329 memset(iv, 0, MAX_IVLEN);
330
331 crypto_aead_clear_flags(tfm, ~0);
332 if (template[i].wk)
333 crypto_aead_set_flags(
334 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
335
336 key = template[i].key;
337
338 ret = crypto_aead_setkey(tfm, key,
339 template[i].klen);
340 if (!ret == template[i].fail) {
341 printk(KERN_ERR "alg: aead: setkey failed on "
342 "test %d for %s: flags=%x\n", j, algo,
343 crypto_aead_get_flags(tfm));
344 goto out;
345 } else if (ret)
346 continue;
347
348 authsize = abs(template[i].rlen - template[i].ilen);
349 ret = crypto_aead_setauthsize(tfm, authsize);
350 if (ret) {
351 printk(KERN_ERR "alg: aead: Failed to set "
352 "authsize to %u on test %d for %s\n",
353 authsize, j, algo);
354 goto out;
355 }
356
357 sg_init_one(&sg[0], input,
358 template[i].ilen + (enc ? authsize : 0));
359
360 sg_init_one(&asg[0], assoc, template[i].alen);
361
362 aead_request_set_crypt(req, sg, sg,
363 template[i].ilen, iv);
364
365 aead_request_set_assoc(req, asg, template[i].alen);
366
367 ret = enc ?
368 crypto_aead_encrypt(req) :
369 crypto_aead_decrypt(req);
370
371 switch (ret) {
372 case 0:
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800373 if (template[i].novrfy) {
374 /* verification was supposed to fail */
375 printk(KERN_ERR "alg: aead: %s failed "
376 "on test %d for %s: ret was 0, "
377 "expected -EBADMSG\n",
378 e, j, algo);
379 /* so really, we got a bad message */
380 ret = -EBADMSG;
381 goto out;
382 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800383 break;
384 case -EINPROGRESS:
385 case -EBUSY:
386 ret = wait_for_completion_interruptible(
387 &result.completion);
388 if (!ret && !(ret = result.err)) {
389 INIT_COMPLETION(result.completion);
390 break;
391 }
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800392 case -EBADMSG:
393 if (template[i].novrfy)
394 /* verification failure was expected */
395 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800396 /* fall through */
397 default:
398 printk(KERN_ERR "alg: aead: %s failed on test "
399 "%d for %s: ret=%d\n", e, j, algo, -ret);
400 goto out;
401 }
402
403 q = input;
404 if (memcmp(q, template[i].result, template[i].rlen)) {
405 printk(KERN_ERR "alg: aead: Test %d failed on "
406 "%s for %s\n", j, e, algo);
407 hexdump(q, template[i].rlen);
408 ret = -EINVAL;
409 goto out;
410 }
411 }
412 }
413
414 for (i = 0, j = 0; i < tcount; i++) {
415 if (template[i].np) {
416 j++;
417
418 if (template[i].iv)
419 memcpy(iv, template[i].iv, MAX_IVLEN);
420 else
421 memset(iv, 0, MAX_IVLEN);
422
423 crypto_aead_clear_flags(tfm, ~0);
424 if (template[i].wk)
425 crypto_aead_set_flags(
426 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
427 key = template[i].key;
428
429 ret = crypto_aead_setkey(tfm, key, template[i].klen);
430 if (!ret == template[i].fail) {
431 printk(KERN_ERR "alg: aead: setkey failed on "
432 "chunk test %d for %s: flags=%x\n", j,
433 algo, crypto_aead_get_flags(tfm));
434 goto out;
435 } else if (ret)
436 continue;
437
438 authsize = abs(template[i].rlen - template[i].ilen);
439
440 ret = -EINVAL;
441 sg_init_table(sg, template[i].np);
442 for (k = 0, temp = 0; k < template[i].np; k++) {
443 if (WARN_ON(offset_in_page(IDX[k]) +
444 template[i].tap[k] > PAGE_SIZE))
445 goto out;
446
447 q = xbuf[IDX[k] >> PAGE_SHIFT] +
448 offset_in_page(IDX[k]);
449
450 memcpy(q, template[i].input + temp,
451 template[i].tap[k]);
452
453 n = template[i].tap[k];
454 if (k == template[i].np - 1 && enc)
455 n += authsize;
456 if (offset_in_page(q) + n < PAGE_SIZE)
457 q[n] = 0;
458
459 sg_set_buf(&sg[k], q, template[i].tap[k]);
460 temp += template[i].tap[k];
461 }
462
463 ret = crypto_aead_setauthsize(tfm, authsize);
464 if (ret) {
465 printk(KERN_ERR "alg: aead: Failed to set "
466 "authsize to %u on chunk test %d for "
467 "%s\n", authsize, j, algo);
468 goto out;
469 }
470
471 if (enc) {
472 if (WARN_ON(sg[k - 1].offset +
473 sg[k - 1].length + authsize >
474 PAGE_SIZE)) {
475 ret = -EINVAL;
476 goto out;
477 }
478
479 sg[k - 1].length += authsize;
480 }
481
482 sg_init_table(asg, template[i].anp);
483 for (k = 0, temp = 0; k < template[i].anp; k++) {
484 sg_set_buf(&asg[k],
485 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
486 offset_in_page(IDX[k]),
487 template[i].assoc + temp,
488 template[i].atap[k]),
489 template[i].atap[k]);
490 temp += template[i].atap[k];
491 }
492
493 aead_request_set_crypt(req, sg, sg,
494 template[i].ilen,
495 iv);
496
497 aead_request_set_assoc(req, asg, template[i].alen);
498
499 ret = enc ?
500 crypto_aead_encrypt(req) :
501 crypto_aead_decrypt(req);
502
503 switch (ret) {
504 case 0:
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800505 if (template[i].novrfy) {
506 /* verification was supposed to fail */
507 printk(KERN_ERR "alg: aead: %s failed "
508 "on chunk test %d for %s: ret "
509 "was 0, expected -EBADMSG\n",
510 e, j, algo);
511 /* so really, we got a bad message */
512 ret = -EBADMSG;
513 goto out;
514 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800515 break;
516 case -EINPROGRESS:
517 case -EBUSY:
518 ret = wait_for_completion_interruptible(
519 &result.completion);
520 if (!ret && !(ret = result.err)) {
521 INIT_COMPLETION(result.completion);
522 break;
523 }
Jarod Wilsone44a1b42009-05-04 19:22:11 +0800524 case -EBADMSG:
525 if (template[i].novrfy)
526 /* verification failure was expected */
527 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800528 /* fall through */
529 default:
530 printk(KERN_ERR "alg: aead: %s failed on "
531 "chunk test %d for %s: ret=%d\n", e, j,
532 algo, -ret);
533 goto out;
534 }
535
536 ret = -EINVAL;
537 for (k = 0, temp = 0; k < template[i].np; k++) {
538 q = xbuf[IDX[k] >> PAGE_SHIFT] +
539 offset_in_page(IDX[k]);
540
541 n = template[i].tap[k];
542 if (k == template[i].np - 1)
543 n += enc ? authsize : -authsize;
544
545 if (memcmp(q, template[i].result + temp, n)) {
546 printk(KERN_ERR "alg: aead: Chunk "
547 "test %d failed on %s at page "
548 "%u for %s\n", j, e, k, algo);
549 hexdump(q, n);
550 goto out;
551 }
552
553 q += n;
554 if (k == template[i].np - 1 && !enc) {
555 if (memcmp(q, template[i].input +
556 temp + n, authsize))
557 n = authsize;
558 else
559 n = 0;
560 } else {
561 for (n = 0; offset_in_page(q + n) &&
562 q[n]; n++)
563 ;
564 }
565 if (n) {
566 printk(KERN_ERR "alg: aead: Result "
567 "buffer corruption in chunk "
568 "test %d on %s at page %u for "
569 "%s: %u bytes:\n", j, e, k,
570 algo, n);
571 hexdump(q, n);
572 goto out;
573 }
574
575 temp += template[i].tap[k];
576 }
577 }
578 }
579
580 ret = 0;
581
582out:
583 aead_request_free(req);
584 return ret;
585}
586
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000587static int test_cipher(struct crypto_cipher *tfm, int enc,
Herbert Xuda7f0332008-07-31 17:08:25 +0800588 struct cipher_testvec *template, unsigned int tcount)
589{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000590 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
591 unsigned int i, j, k;
592 int ret;
593 char *q;
594 const char *e;
595 void *data;
596
597 if (enc == ENCRYPT)
598 e = "encryption";
599 else
600 e = "decryption";
601
602 j = 0;
603 for (i = 0; i < tcount; i++) {
604 if (template[i].np)
605 continue;
606
607 j++;
608
609 data = xbuf[0];
610 memcpy(data, template[i].input, template[i].ilen);
611
612 crypto_cipher_clear_flags(tfm, ~0);
613 if (template[i].wk)
614 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
615
616 ret = crypto_cipher_setkey(tfm, template[i].key,
617 template[i].klen);
618 if (!ret == template[i].fail) {
619 printk(KERN_ERR "alg: cipher: setkey failed "
620 "on test %d for %s: flags=%x\n", j,
621 algo, crypto_cipher_get_flags(tfm));
622 goto out;
623 } else if (ret)
624 continue;
625
626 for (k = 0; k < template[i].ilen;
627 k += crypto_cipher_blocksize(tfm)) {
628 if (enc)
629 crypto_cipher_encrypt_one(tfm, data + k,
630 data + k);
631 else
632 crypto_cipher_decrypt_one(tfm, data + k,
633 data + k);
634 }
635
636 q = data;
637 if (memcmp(q, template[i].result, template[i].rlen)) {
638 printk(KERN_ERR "alg: cipher: Test %d failed "
639 "on %s for %s\n", j, e, algo);
640 hexdump(q, template[i].rlen);
641 ret = -EINVAL;
642 goto out;
643 }
644 }
645
646 ret = 0;
647
648out:
649 return ret;
650}
651
652static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
653 struct cipher_testvec *template, unsigned int tcount)
654{
Herbert Xuda7f0332008-07-31 17:08:25 +0800655 const char *algo =
656 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
657 unsigned int i, j, k, n, temp;
658 int ret;
659 char *q;
660 struct ablkcipher_request *req;
661 struct scatterlist sg[8];
662 const char *e;
663 struct tcrypt_result result;
664 void *data;
665 char iv[MAX_IVLEN];
666
667 if (enc == ENCRYPT)
668 e = "encryption";
669 else
670 e = "decryption";
671
672 init_completion(&result.completion);
673
674 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
675 if (!req) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000676 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
677 "for %s\n", algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800678 ret = -ENOMEM;
679 goto out;
680 }
681
682 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
683 tcrypt_complete, &result);
684
685 j = 0;
686 for (i = 0; i < tcount; i++) {
687 if (template[i].iv)
688 memcpy(iv, template[i].iv, MAX_IVLEN);
689 else
690 memset(iv, 0, MAX_IVLEN);
691
692 if (!(template[i].np)) {
693 j++;
694
695 data = xbuf[0];
696 memcpy(data, template[i].input, template[i].ilen);
697
698 crypto_ablkcipher_clear_flags(tfm, ~0);
699 if (template[i].wk)
700 crypto_ablkcipher_set_flags(
701 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
702
703 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
704 template[i].klen);
705 if (!ret == template[i].fail) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000706 printk(KERN_ERR "alg: skcipher: setkey failed "
Herbert Xuda7f0332008-07-31 17:08:25 +0800707 "on test %d for %s: flags=%x\n", j,
708 algo, crypto_ablkcipher_get_flags(tfm));
709 goto out;
710 } else if (ret)
711 continue;
712
713 sg_init_one(&sg[0], data, template[i].ilen);
714
715 ablkcipher_request_set_crypt(req, sg, sg,
716 template[i].ilen, iv);
717 ret = enc ?
718 crypto_ablkcipher_encrypt(req) :
719 crypto_ablkcipher_decrypt(req);
720
721 switch (ret) {
722 case 0:
723 break;
724 case -EINPROGRESS:
725 case -EBUSY:
726 ret = wait_for_completion_interruptible(
727 &result.completion);
728 if (!ret && !((ret = result.err))) {
729 INIT_COMPLETION(result.completion);
730 break;
731 }
732 /* fall through */
733 default:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000734 printk(KERN_ERR "alg: skcipher: %s failed on "
Herbert Xuda7f0332008-07-31 17:08:25 +0800735 "test %d for %s: ret=%d\n", e, j, algo,
736 -ret);
737 goto out;
738 }
739
740 q = data;
741 if (memcmp(q, template[i].result, template[i].rlen)) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000742 printk(KERN_ERR "alg: skcipher: Test %d "
743 "failed on %s for %s\n", j, e, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800744 hexdump(q, template[i].rlen);
745 ret = -EINVAL;
746 goto out;
747 }
748 }
749 }
750
751 j = 0;
752 for (i = 0; i < tcount; i++) {
753
754 if (template[i].iv)
755 memcpy(iv, template[i].iv, MAX_IVLEN);
756 else
757 memset(iv, 0, MAX_IVLEN);
758
759 if (template[i].np) {
760 j++;
761
762 crypto_ablkcipher_clear_flags(tfm, ~0);
763 if (template[i].wk)
764 crypto_ablkcipher_set_flags(
765 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
766
767 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
768 template[i].klen);
769 if (!ret == template[i].fail) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000770 printk(KERN_ERR "alg: skcipher: setkey failed "
Herbert Xuda7f0332008-07-31 17:08:25 +0800771 "on chunk test %d for %s: flags=%x\n",
772 j, algo,
773 crypto_ablkcipher_get_flags(tfm));
774 goto out;
775 } else if (ret)
776 continue;
777
778 temp = 0;
779 ret = -EINVAL;
780 sg_init_table(sg, template[i].np);
781 for (k = 0; k < template[i].np; k++) {
782 if (WARN_ON(offset_in_page(IDX[k]) +
783 template[i].tap[k] > PAGE_SIZE))
784 goto out;
785
786 q = xbuf[IDX[k] >> PAGE_SHIFT] +
787 offset_in_page(IDX[k]);
788
789 memcpy(q, template[i].input + temp,
790 template[i].tap[k]);
791
792 if (offset_in_page(q) + template[i].tap[k] <
793 PAGE_SIZE)
794 q[template[i].tap[k]] = 0;
795
796 sg_set_buf(&sg[k], q, template[i].tap[k]);
797
798 temp += template[i].tap[k];
799 }
800
801 ablkcipher_request_set_crypt(req, sg, sg,
802 template[i].ilen, iv);
803
804 ret = enc ?
805 crypto_ablkcipher_encrypt(req) :
806 crypto_ablkcipher_decrypt(req);
807
808 switch (ret) {
809 case 0:
810 break;
811 case -EINPROGRESS:
812 case -EBUSY:
813 ret = wait_for_completion_interruptible(
814 &result.completion);
815 if (!ret && !((ret = result.err))) {
816 INIT_COMPLETION(result.completion);
817 break;
818 }
819 /* fall through */
820 default:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000821 printk(KERN_ERR "alg: skcipher: %s failed on "
Herbert Xuda7f0332008-07-31 17:08:25 +0800822 "chunk test %d for %s: ret=%d\n", e, j,
823 algo, -ret);
824 goto out;
825 }
826
827 temp = 0;
828 ret = -EINVAL;
829 for (k = 0; k < template[i].np; k++) {
830 q = xbuf[IDX[k] >> PAGE_SHIFT] +
831 offset_in_page(IDX[k]);
832
833 if (memcmp(q, template[i].result + temp,
834 template[i].tap[k])) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000835 printk(KERN_ERR "alg: skcipher: Chunk "
Herbert Xuda7f0332008-07-31 17:08:25 +0800836 "test %d failed on %s at page "
837 "%u for %s\n", j, e, k, algo);
838 hexdump(q, template[i].tap[k]);
839 goto out;
840 }
841
842 q += template[i].tap[k];
843 for (n = 0; offset_in_page(q + n) && q[n]; n++)
844 ;
845 if (n) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000846 printk(KERN_ERR "alg: skcipher: "
Herbert Xuda7f0332008-07-31 17:08:25 +0800847 "Result buffer corruption in "
848 "chunk test %d on %s at page "
849 "%u for %s: %u bytes:\n", j, e,
850 k, algo, n);
851 hexdump(q, n);
852 goto out;
853 }
854 temp += template[i].tap[k];
855 }
856 }
857 }
858
859 ret = 0;
860
861out:
862 ablkcipher_request_free(req);
863 return ret;
864}
865
866static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
867 struct comp_testvec *dtemplate, int ctcount, int dtcount)
868{
869 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
870 unsigned int i;
871 char result[COMP_BUF_SIZE];
872 int ret;
873
874 for (i = 0; i < ctcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +0800875 int ilen;
876 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +0800877
878 memset(result, 0, sizeof (result));
879
880 ilen = ctemplate[i].inlen;
881 ret = crypto_comp_compress(tfm, ctemplate[i].input,
882 ilen, result, &dlen);
883 if (ret) {
884 printk(KERN_ERR "alg: comp: compression failed "
885 "on test %d for %s: ret=%d\n", i + 1, algo,
886 -ret);
887 goto out;
888 }
889
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +0800890 if (dlen != ctemplate[i].outlen) {
891 printk(KERN_ERR "alg: comp: Compression test %d "
892 "failed for %s: output len = %d\n", i + 1, algo,
893 dlen);
894 ret = -EINVAL;
895 goto out;
896 }
897
Herbert Xuda7f0332008-07-31 17:08:25 +0800898 if (memcmp(result, ctemplate[i].output, dlen)) {
899 printk(KERN_ERR "alg: comp: Compression test %d "
900 "failed for %s\n", i + 1, algo);
901 hexdump(result, dlen);
902 ret = -EINVAL;
903 goto out;
904 }
905 }
906
907 for (i = 0; i < dtcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +0800908 int ilen;
909 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +0800910
911 memset(result, 0, sizeof (result));
912
913 ilen = dtemplate[i].inlen;
914 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
915 ilen, result, &dlen);
916 if (ret) {
917 printk(KERN_ERR "alg: comp: decompression failed "
918 "on test %d for %s: ret=%d\n", i + 1, algo,
919 -ret);
920 goto out;
921 }
922
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +0800923 if (dlen != dtemplate[i].outlen) {
924 printk(KERN_ERR "alg: comp: Decompression test %d "
925 "failed for %s: output len = %d\n", i + 1, algo,
926 dlen);
927 ret = -EINVAL;
928 goto out;
929 }
930
Herbert Xuda7f0332008-07-31 17:08:25 +0800931 if (memcmp(result, dtemplate[i].output, dlen)) {
932 printk(KERN_ERR "alg: comp: Decompression test %d "
933 "failed for %s\n", i + 1, algo);
934 hexdump(result, dlen);
935 ret = -EINVAL;
936 goto out;
937 }
938 }
939
940 ret = 0;
941
942out:
943 return ret;
944}
945
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +0800946static int test_pcomp(struct crypto_pcomp *tfm,
947 struct pcomp_testvec *ctemplate,
948 struct pcomp_testvec *dtemplate, int ctcount,
949 int dtcount)
950{
951 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
952 unsigned int i;
953 char result[COMP_BUF_SIZE];
954 int error;
955
956 for (i = 0; i < ctcount; i++) {
957 struct comp_request req;
958
959 error = crypto_compress_setup(tfm, ctemplate[i].params,
960 ctemplate[i].paramsize);
961 if (error) {
962 pr_err("alg: pcomp: compression setup failed on test "
963 "%d for %s: error=%d\n", i + 1, algo, error);
964 return error;
965 }
966
967 error = crypto_compress_init(tfm);
968 if (error) {
969 pr_err("alg: pcomp: compression init failed on test "
970 "%d for %s: error=%d\n", i + 1, algo, error);
971 return error;
972 }
973
974 memset(result, 0, sizeof(result));
975
976 req.next_in = ctemplate[i].input;
977 req.avail_in = ctemplate[i].inlen / 2;
978 req.next_out = result;
979 req.avail_out = ctemplate[i].outlen / 2;
980
981 error = crypto_compress_update(tfm, &req);
982 if (error && (error != -EAGAIN || req.avail_in)) {
983 pr_err("alg: pcomp: compression update failed on test "
984 "%d for %s: error=%d\n", i + 1, algo, error);
985 return error;
986 }
987
988 /* Add remaining input data */
989 req.avail_in += (ctemplate[i].inlen + 1) / 2;
990
991 error = crypto_compress_update(tfm, &req);
992 if (error && (error != -EAGAIN || req.avail_in)) {
993 pr_err("alg: pcomp: compression update failed on test "
994 "%d for %s: error=%d\n", i + 1, algo, error);
995 return error;
996 }
997
998 /* Provide remaining output space */
999 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1000
1001 error = crypto_compress_final(tfm, &req);
1002 if (error) {
1003 pr_err("alg: pcomp: compression final failed on test "
1004 "%d for %s: error=%d\n", i + 1, algo, error);
1005 return error;
1006 }
1007
1008 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1009 pr_err("alg: comp: Compression test %d failed for %s: "
1010 "output len = %d (expected %d)\n", i + 1, algo,
1011 COMP_BUF_SIZE - req.avail_out,
1012 ctemplate[i].outlen);
1013 return -EINVAL;
1014 }
1015
1016 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1017 pr_err("alg: pcomp: Compression test %d failed for "
1018 "%s\n", i + 1, algo);
1019 hexdump(result, ctemplate[i].outlen);
1020 return -EINVAL;
1021 }
1022 }
1023
1024 for (i = 0; i < dtcount; i++) {
1025 struct comp_request req;
1026
1027 error = crypto_decompress_setup(tfm, dtemplate[i].params,
1028 dtemplate[i].paramsize);
1029 if (error) {
1030 pr_err("alg: pcomp: decompression setup failed on "
1031 "test %d for %s: error=%d\n", i + 1, algo,
1032 error);
1033 return error;
1034 }
1035
1036 error = crypto_decompress_init(tfm);
1037 if (error) {
1038 pr_err("alg: pcomp: decompression init failed on test "
1039 "%d for %s: error=%d\n", i + 1, algo, error);
1040 return error;
1041 }
1042
1043 memset(result, 0, sizeof(result));
1044
1045 req.next_in = dtemplate[i].input;
1046 req.avail_in = dtemplate[i].inlen / 2;
1047 req.next_out = result;
1048 req.avail_out = dtemplate[i].outlen / 2;
1049
1050 error = crypto_decompress_update(tfm, &req);
1051 if (error && (error != -EAGAIN || req.avail_in)) {
1052 pr_err("alg: pcomp: decompression update failed on "
1053 "test %d for %s: error=%d\n", i + 1, algo,
1054 error);
1055 return error;
1056 }
1057
1058 /* Add remaining input data */
1059 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1060
1061 error = crypto_decompress_update(tfm, &req);
1062 if (error && (error != -EAGAIN || req.avail_in)) {
1063 pr_err("alg: pcomp: decompression update failed on "
1064 "test %d for %s: error=%d\n", i + 1, algo,
1065 error);
1066 return error;
1067 }
1068
1069 /* Provide remaining output space */
1070 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1071
1072 error = crypto_decompress_final(tfm, &req);
1073 if (error && (error != -EAGAIN || req.avail_in)) {
1074 pr_err("alg: pcomp: decompression final failed on "
1075 "test %d for %s: error=%d\n", i + 1, algo,
1076 error);
1077 return error;
1078 }
1079
1080 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1081 pr_err("alg: comp: Decompression test %d failed for "
1082 "%s: output len = %d (expected %d)\n", i + 1,
1083 algo, COMP_BUF_SIZE - req.avail_out,
1084 dtemplate[i].outlen);
1085 return -EINVAL;
1086 }
1087
1088 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1089 pr_err("alg: pcomp: Decompression test %d failed for "
1090 "%s\n", i + 1, algo);
1091 hexdump(result, dtemplate[i].outlen);
1092 return -EINVAL;
1093 }
1094 }
1095
1096 return 0;
1097}
1098
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001099
1100static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1101 unsigned int tcount)
1102{
1103 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1104 int err, i, j, seedsize;
1105 u8 *seed;
1106 char result[32];
1107
1108 seedsize = crypto_rng_seedsize(tfm);
1109
1110 seed = kmalloc(seedsize, GFP_KERNEL);
1111 if (!seed) {
1112 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1113 "for %s\n", algo);
1114 return -ENOMEM;
1115 }
1116
1117 for (i = 0; i < tcount; i++) {
1118 memset(result, 0, 32);
1119
1120 memcpy(seed, template[i].v, template[i].vlen);
1121 memcpy(seed + template[i].vlen, template[i].key,
1122 template[i].klen);
1123 memcpy(seed + template[i].vlen + template[i].klen,
1124 template[i].dt, template[i].dtlen);
1125
1126 err = crypto_rng_reset(tfm, seed, seedsize);
1127 if (err) {
1128 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1129 "for %s\n", algo);
1130 goto out;
1131 }
1132
1133 for (j = 0; j < template[i].loops; j++) {
1134 err = crypto_rng_get_bytes(tfm, result,
1135 template[i].rlen);
1136 if (err != template[i].rlen) {
1137 printk(KERN_ERR "alg: cprng: Failed to obtain "
1138 "the correct amount of random data for "
1139 "%s (requested %d, got %d)\n", algo,
1140 template[i].rlen, err);
1141 goto out;
1142 }
1143 }
1144
1145 err = memcmp(result, template[i].result,
1146 template[i].rlen);
1147 if (err) {
1148 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1149 i, algo);
1150 hexdump(result, template[i].rlen);
1151 err = -EINVAL;
1152 goto out;
1153 }
1154 }
1155
1156out:
1157 kfree(seed);
1158 return err;
1159}
1160
Herbert Xuda7f0332008-07-31 17:08:25 +08001161static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1162 u32 type, u32 mask)
1163{
1164 struct crypto_aead *tfm;
1165 int err = 0;
1166
1167 tfm = crypto_alloc_aead(driver, type, mask);
1168 if (IS_ERR(tfm)) {
1169 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1170 "%ld\n", driver, PTR_ERR(tfm));
1171 return PTR_ERR(tfm);
1172 }
1173
1174 if (desc->suite.aead.enc.vecs) {
1175 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1176 desc->suite.aead.enc.count);
1177 if (err)
1178 goto out;
1179 }
1180
1181 if (!err && desc->suite.aead.dec.vecs)
1182 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1183 desc->suite.aead.dec.count);
1184
1185out:
1186 crypto_free_aead(tfm);
1187 return err;
1188}
1189
1190static int alg_test_cipher(const struct alg_test_desc *desc,
1191 const char *driver, u32 type, u32 mask)
1192{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001193 struct crypto_cipher *tfm;
Herbert Xuda7f0332008-07-31 17:08:25 +08001194 int err = 0;
1195
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001196 tfm = crypto_alloc_cipher(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001197 if (IS_ERR(tfm)) {
1198 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1199 "%s: %ld\n", driver, PTR_ERR(tfm));
1200 return PTR_ERR(tfm);
1201 }
1202
1203 if (desc->suite.cipher.enc.vecs) {
1204 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1205 desc->suite.cipher.enc.count);
1206 if (err)
1207 goto out;
1208 }
1209
1210 if (desc->suite.cipher.dec.vecs)
1211 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1212 desc->suite.cipher.dec.count);
1213
1214out:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001215 crypto_free_cipher(tfm);
1216 return err;
1217}
1218
1219static int alg_test_skcipher(const struct alg_test_desc *desc,
1220 const char *driver, u32 type, u32 mask)
1221{
1222 struct crypto_ablkcipher *tfm;
1223 int err = 0;
1224
1225 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1226 if (IS_ERR(tfm)) {
1227 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1228 "%s: %ld\n", driver, PTR_ERR(tfm));
1229 return PTR_ERR(tfm);
1230 }
1231
1232 if (desc->suite.cipher.enc.vecs) {
1233 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1234 desc->suite.cipher.enc.count);
1235 if (err)
1236 goto out;
1237 }
1238
1239 if (desc->suite.cipher.dec.vecs)
1240 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1241 desc->suite.cipher.dec.count);
1242
1243out:
Herbert Xuda7f0332008-07-31 17:08:25 +08001244 crypto_free_ablkcipher(tfm);
1245 return err;
1246}
1247
1248static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1249 u32 type, u32 mask)
1250{
1251 struct crypto_comp *tfm;
1252 int err;
1253
1254 tfm = crypto_alloc_comp(driver, type, mask);
1255 if (IS_ERR(tfm)) {
1256 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1257 "%ld\n", driver, PTR_ERR(tfm));
1258 return PTR_ERR(tfm);
1259 }
1260
1261 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1262 desc->suite.comp.decomp.vecs,
1263 desc->suite.comp.comp.count,
1264 desc->suite.comp.decomp.count);
1265
1266 crypto_free_comp(tfm);
1267 return err;
1268}
1269
Geert Uytterhoeven8064efb2009-03-04 15:08:03 +08001270static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1271 u32 type, u32 mask)
1272{
1273 struct crypto_pcomp *tfm;
1274 int err;
1275
1276 tfm = crypto_alloc_pcomp(driver, type, mask);
1277 if (IS_ERR(tfm)) {
1278 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1279 driver, PTR_ERR(tfm));
1280 return PTR_ERR(tfm);
1281 }
1282
1283 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1284 desc->suite.pcomp.decomp.vecs,
1285 desc->suite.pcomp.comp.count,
1286 desc->suite.pcomp.decomp.count);
1287
1288 crypto_free_pcomp(tfm);
1289 return err;
1290}
1291
Herbert Xuda7f0332008-07-31 17:08:25 +08001292static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1293 u32 type, u32 mask)
1294{
1295 struct crypto_ahash *tfm;
1296 int err;
1297
1298 tfm = crypto_alloc_ahash(driver, type, mask);
1299 if (IS_ERR(tfm)) {
1300 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1301 "%ld\n", driver, PTR_ERR(tfm));
1302 return PTR_ERR(tfm);
1303 }
1304
1305 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1306
1307 crypto_free_ahash(tfm);
1308 return err;
1309}
1310
Herbert Xu8e3ee852008-11-07 14:58:52 +08001311static int alg_test_crc32c(const struct alg_test_desc *desc,
1312 const char *driver, u32 type, u32 mask)
1313{
1314 struct crypto_shash *tfm;
1315 u32 val;
1316 int err;
1317
1318 err = alg_test_hash(desc, driver, type, mask);
1319 if (err)
1320 goto out;
1321
1322 tfm = crypto_alloc_shash(driver, type, mask);
1323 if (IS_ERR(tfm)) {
1324 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1325 "%ld\n", driver, PTR_ERR(tfm));
1326 err = PTR_ERR(tfm);
1327 goto out;
1328 }
1329
1330 do {
1331 struct {
1332 struct shash_desc shash;
1333 char ctx[crypto_shash_descsize(tfm)];
1334 } sdesc;
1335
1336 sdesc.shash.tfm = tfm;
1337 sdesc.shash.flags = 0;
1338
1339 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1340 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1341 if (err) {
1342 printk(KERN_ERR "alg: crc32c: Operation failed for "
1343 "%s: %d\n", driver, err);
1344 break;
1345 }
1346
1347 if (val != ~420553207) {
1348 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1349 "%d\n", driver, val);
1350 err = -EINVAL;
1351 }
1352 } while (0);
1353
1354 crypto_free_shash(tfm);
1355
1356out:
1357 return err;
1358}
1359
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001360static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1361 u32 type, u32 mask)
1362{
1363 struct crypto_rng *rng;
1364 int err;
1365
1366 rng = crypto_alloc_rng(driver, type, mask);
1367 if (IS_ERR(rng)) {
1368 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1369 "%ld\n", driver, PTR_ERR(rng));
1370 return PTR_ERR(rng);
1371 }
1372
1373 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1374
1375 crypto_free_rng(rng);
1376
1377 return err;
1378}
1379
Herbert Xuda7f0332008-07-31 17:08:25 +08001380/* Please keep this list sorted by algorithm name. */
1381static const struct alg_test_desc alg_test_descs[] = {
1382 {
1383 .alg = "cbc(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001384 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001385 .suite = {
1386 .cipher = {
1387 .enc = {
1388 .vecs = aes_cbc_enc_tv_template,
1389 .count = AES_CBC_ENC_TEST_VECTORS
1390 },
1391 .dec = {
1392 .vecs = aes_cbc_dec_tv_template,
1393 .count = AES_CBC_DEC_TEST_VECTORS
1394 }
1395 }
1396 }
1397 }, {
1398 .alg = "cbc(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001399 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001400 .suite = {
1401 .cipher = {
1402 .enc = {
1403 .vecs = anubis_cbc_enc_tv_template,
1404 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1405 },
1406 .dec = {
1407 .vecs = anubis_cbc_dec_tv_template,
1408 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1409 }
1410 }
1411 }
1412 }, {
1413 .alg = "cbc(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001414 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001415 .suite = {
1416 .cipher = {
1417 .enc = {
1418 .vecs = bf_cbc_enc_tv_template,
1419 .count = BF_CBC_ENC_TEST_VECTORS
1420 },
1421 .dec = {
1422 .vecs = bf_cbc_dec_tv_template,
1423 .count = BF_CBC_DEC_TEST_VECTORS
1424 }
1425 }
1426 }
1427 }, {
1428 .alg = "cbc(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001429 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001430 .suite = {
1431 .cipher = {
1432 .enc = {
1433 .vecs = camellia_cbc_enc_tv_template,
1434 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1435 },
1436 .dec = {
1437 .vecs = camellia_cbc_dec_tv_template,
1438 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1439 }
1440 }
1441 }
1442 }, {
1443 .alg = "cbc(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001444 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001445 .suite = {
1446 .cipher = {
1447 .enc = {
1448 .vecs = des_cbc_enc_tv_template,
1449 .count = DES_CBC_ENC_TEST_VECTORS
1450 },
1451 .dec = {
1452 .vecs = des_cbc_dec_tv_template,
1453 .count = DES_CBC_DEC_TEST_VECTORS
1454 }
1455 }
1456 }
1457 }, {
1458 .alg = "cbc(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001459 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001460 .suite = {
1461 .cipher = {
1462 .enc = {
1463 .vecs = des3_ede_cbc_enc_tv_template,
1464 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1465 },
1466 .dec = {
1467 .vecs = des3_ede_cbc_dec_tv_template,
1468 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1469 }
1470 }
1471 }
1472 }, {
1473 .alg = "cbc(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001474 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001475 .suite = {
1476 .cipher = {
1477 .enc = {
1478 .vecs = tf_cbc_enc_tv_template,
1479 .count = TF_CBC_ENC_TEST_VECTORS
1480 },
1481 .dec = {
1482 .vecs = tf_cbc_dec_tv_template,
1483 .count = TF_CBC_DEC_TEST_VECTORS
1484 }
1485 }
1486 }
1487 }, {
1488 .alg = "ccm(aes)",
1489 .test = alg_test_aead,
1490 .suite = {
1491 .aead = {
1492 .enc = {
1493 .vecs = aes_ccm_enc_tv_template,
1494 .count = AES_CCM_ENC_TEST_VECTORS
1495 },
1496 .dec = {
1497 .vecs = aes_ccm_dec_tv_template,
1498 .count = AES_CCM_DEC_TEST_VECTORS
1499 }
1500 }
1501 }
1502 }, {
1503 .alg = "crc32c",
Herbert Xu8e3ee852008-11-07 14:58:52 +08001504 .test = alg_test_crc32c,
Herbert Xuda7f0332008-07-31 17:08:25 +08001505 .suite = {
1506 .hash = {
1507 .vecs = crc32c_tv_template,
1508 .count = CRC32C_TEST_VECTORS
1509 }
1510 }
1511 }, {
1512 .alg = "cts(cbc(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001513 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001514 .suite = {
1515 .cipher = {
1516 .enc = {
1517 .vecs = cts_mode_enc_tv_template,
1518 .count = CTS_MODE_ENC_TEST_VECTORS
1519 },
1520 .dec = {
1521 .vecs = cts_mode_dec_tv_template,
1522 .count = CTS_MODE_DEC_TEST_VECTORS
1523 }
1524 }
1525 }
1526 }, {
1527 .alg = "deflate",
1528 .test = alg_test_comp,
1529 .suite = {
1530 .comp = {
1531 .comp = {
1532 .vecs = deflate_comp_tv_template,
1533 .count = DEFLATE_COMP_TEST_VECTORS
1534 },
1535 .decomp = {
1536 .vecs = deflate_decomp_tv_template,
1537 .count = DEFLATE_DECOMP_TEST_VECTORS
1538 }
1539 }
1540 }
1541 }, {
1542 .alg = "ecb(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001543 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001544 .suite = {
1545 .cipher = {
1546 .enc = {
1547 .vecs = aes_enc_tv_template,
1548 .count = AES_ENC_TEST_VECTORS
1549 },
1550 .dec = {
1551 .vecs = aes_dec_tv_template,
1552 .count = AES_DEC_TEST_VECTORS
1553 }
1554 }
1555 }
1556 }, {
1557 .alg = "ecb(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001558 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001559 .suite = {
1560 .cipher = {
1561 .enc = {
1562 .vecs = anubis_enc_tv_template,
1563 .count = ANUBIS_ENC_TEST_VECTORS
1564 },
1565 .dec = {
1566 .vecs = anubis_dec_tv_template,
1567 .count = ANUBIS_DEC_TEST_VECTORS
1568 }
1569 }
1570 }
1571 }, {
1572 .alg = "ecb(arc4)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001573 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001574 .suite = {
1575 .cipher = {
1576 .enc = {
1577 .vecs = arc4_enc_tv_template,
1578 .count = ARC4_ENC_TEST_VECTORS
1579 },
1580 .dec = {
1581 .vecs = arc4_dec_tv_template,
1582 .count = ARC4_DEC_TEST_VECTORS
1583 }
1584 }
1585 }
1586 }, {
1587 .alg = "ecb(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001588 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001589 .suite = {
1590 .cipher = {
1591 .enc = {
1592 .vecs = bf_enc_tv_template,
1593 .count = BF_ENC_TEST_VECTORS
1594 },
1595 .dec = {
1596 .vecs = bf_dec_tv_template,
1597 .count = BF_DEC_TEST_VECTORS
1598 }
1599 }
1600 }
1601 }, {
1602 .alg = "ecb(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001603 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001604 .suite = {
1605 .cipher = {
1606 .enc = {
1607 .vecs = camellia_enc_tv_template,
1608 .count = CAMELLIA_ENC_TEST_VECTORS
1609 },
1610 .dec = {
1611 .vecs = camellia_dec_tv_template,
1612 .count = CAMELLIA_DEC_TEST_VECTORS
1613 }
1614 }
1615 }
1616 }, {
1617 .alg = "ecb(cast5)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001618 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001619 .suite = {
1620 .cipher = {
1621 .enc = {
1622 .vecs = cast5_enc_tv_template,
1623 .count = CAST5_ENC_TEST_VECTORS
1624 },
1625 .dec = {
1626 .vecs = cast5_dec_tv_template,
1627 .count = CAST5_DEC_TEST_VECTORS
1628 }
1629 }
1630 }
1631 }, {
1632 .alg = "ecb(cast6)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001633 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001634 .suite = {
1635 .cipher = {
1636 .enc = {
1637 .vecs = cast6_enc_tv_template,
1638 .count = CAST6_ENC_TEST_VECTORS
1639 },
1640 .dec = {
1641 .vecs = cast6_dec_tv_template,
1642 .count = CAST6_DEC_TEST_VECTORS
1643 }
1644 }
1645 }
1646 }, {
1647 .alg = "ecb(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001648 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001649 .suite = {
1650 .cipher = {
1651 .enc = {
1652 .vecs = des_enc_tv_template,
1653 .count = DES_ENC_TEST_VECTORS
1654 },
1655 .dec = {
1656 .vecs = des_dec_tv_template,
1657 .count = DES_DEC_TEST_VECTORS
1658 }
1659 }
1660 }
1661 }, {
1662 .alg = "ecb(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001663 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001664 .suite = {
1665 .cipher = {
1666 .enc = {
1667 .vecs = des3_ede_enc_tv_template,
1668 .count = DES3_EDE_ENC_TEST_VECTORS
1669 },
1670 .dec = {
1671 .vecs = des3_ede_dec_tv_template,
1672 .count = DES3_EDE_DEC_TEST_VECTORS
1673 }
1674 }
1675 }
1676 }, {
1677 .alg = "ecb(khazad)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001678 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001679 .suite = {
1680 .cipher = {
1681 .enc = {
1682 .vecs = khazad_enc_tv_template,
1683 .count = KHAZAD_ENC_TEST_VECTORS
1684 },
1685 .dec = {
1686 .vecs = khazad_dec_tv_template,
1687 .count = KHAZAD_DEC_TEST_VECTORS
1688 }
1689 }
1690 }
1691 }, {
1692 .alg = "ecb(seed)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001693 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001694 .suite = {
1695 .cipher = {
1696 .enc = {
1697 .vecs = seed_enc_tv_template,
1698 .count = SEED_ENC_TEST_VECTORS
1699 },
1700 .dec = {
1701 .vecs = seed_dec_tv_template,
1702 .count = SEED_DEC_TEST_VECTORS
1703 }
1704 }
1705 }
1706 }, {
1707 .alg = "ecb(serpent)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001708 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001709 .suite = {
1710 .cipher = {
1711 .enc = {
1712 .vecs = serpent_enc_tv_template,
1713 .count = SERPENT_ENC_TEST_VECTORS
1714 },
1715 .dec = {
1716 .vecs = serpent_dec_tv_template,
1717 .count = SERPENT_DEC_TEST_VECTORS
1718 }
1719 }
1720 }
1721 }, {
1722 .alg = "ecb(tea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001723 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001724 .suite = {
1725 .cipher = {
1726 .enc = {
1727 .vecs = tea_enc_tv_template,
1728 .count = TEA_ENC_TEST_VECTORS
1729 },
1730 .dec = {
1731 .vecs = tea_dec_tv_template,
1732 .count = TEA_DEC_TEST_VECTORS
1733 }
1734 }
1735 }
1736 }, {
1737 .alg = "ecb(tnepres)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001738 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001739 .suite = {
1740 .cipher = {
1741 .enc = {
1742 .vecs = tnepres_enc_tv_template,
1743 .count = TNEPRES_ENC_TEST_VECTORS
1744 },
1745 .dec = {
1746 .vecs = tnepres_dec_tv_template,
1747 .count = TNEPRES_DEC_TEST_VECTORS
1748 }
1749 }
1750 }
1751 }, {
1752 .alg = "ecb(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001753 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001754 .suite = {
1755 .cipher = {
1756 .enc = {
1757 .vecs = tf_enc_tv_template,
1758 .count = TF_ENC_TEST_VECTORS
1759 },
1760 .dec = {
1761 .vecs = tf_dec_tv_template,
1762 .count = TF_DEC_TEST_VECTORS
1763 }
1764 }
1765 }
1766 }, {
1767 .alg = "ecb(xeta)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001768 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001769 .suite = {
1770 .cipher = {
1771 .enc = {
1772 .vecs = xeta_enc_tv_template,
1773 .count = XETA_ENC_TEST_VECTORS
1774 },
1775 .dec = {
1776 .vecs = xeta_dec_tv_template,
1777 .count = XETA_DEC_TEST_VECTORS
1778 }
1779 }
1780 }
1781 }, {
1782 .alg = "ecb(xtea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001783 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001784 .suite = {
1785 .cipher = {
1786 .enc = {
1787 .vecs = xtea_enc_tv_template,
1788 .count = XTEA_ENC_TEST_VECTORS
1789 },
1790 .dec = {
1791 .vecs = xtea_dec_tv_template,
1792 .count = XTEA_DEC_TEST_VECTORS
1793 }
1794 }
1795 }
1796 }, {
1797 .alg = "gcm(aes)",
1798 .test = alg_test_aead,
1799 .suite = {
1800 .aead = {
1801 .enc = {
1802 .vecs = aes_gcm_enc_tv_template,
1803 .count = AES_GCM_ENC_TEST_VECTORS
1804 },
1805 .dec = {
1806 .vecs = aes_gcm_dec_tv_template,
1807 .count = AES_GCM_DEC_TEST_VECTORS
1808 }
1809 }
1810 }
1811 }, {
1812 .alg = "hmac(md5)",
1813 .test = alg_test_hash,
1814 .suite = {
1815 .hash = {
1816 .vecs = hmac_md5_tv_template,
1817 .count = HMAC_MD5_TEST_VECTORS
1818 }
1819 }
1820 }, {
1821 .alg = "hmac(rmd128)",
1822 .test = alg_test_hash,
1823 .suite = {
1824 .hash = {
1825 .vecs = hmac_rmd128_tv_template,
1826 .count = HMAC_RMD128_TEST_VECTORS
1827 }
1828 }
1829 }, {
1830 .alg = "hmac(rmd160)",
1831 .test = alg_test_hash,
1832 .suite = {
1833 .hash = {
1834 .vecs = hmac_rmd160_tv_template,
1835 .count = HMAC_RMD160_TEST_VECTORS
1836 }
1837 }
1838 }, {
1839 .alg = "hmac(sha1)",
1840 .test = alg_test_hash,
1841 .suite = {
1842 .hash = {
1843 .vecs = hmac_sha1_tv_template,
1844 .count = HMAC_SHA1_TEST_VECTORS
1845 }
1846 }
1847 }, {
1848 .alg = "hmac(sha224)",
1849 .test = alg_test_hash,
1850 .suite = {
1851 .hash = {
1852 .vecs = hmac_sha224_tv_template,
1853 .count = HMAC_SHA224_TEST_VECTORS
1854 }
1855 }
1856 }, {
1857 .alg = "hmac(sha256)",
1858 .test = alg_test_hash,
1859 .suite = {
1860 .hash = {
1861 .vecs = hmac_sha256_tv_template,
1862 .count = HMAC_SHA256_TEST_VECTORS
1863 }
1864 }
1865 }, {
1866 .alg = "hmac(sha384)",
1867 .test = alg_test_hash,
1868 .suite = {
1869 .hash = {
1870 .vecs = hmac_sha384_tv_template,
1871 .count = HMAC_SHA384_TEST_VECTORS
1872 }
1873 }
1874 }, {
1875 .alg = "hmac(sha512)",
1876 .test = alg_test_hash,
1877 .suite = {
1878 .hash = {
1879 .vecs = hmac_sha512_tv_template,
1880 .count = HMAC_SHA512_TEST_VECTORS
1881 }
1882 }
1883 }, {
1884 .alg = "lrw(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001885 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001886 .suite = {
1887 .cipher = {
1888 .enc = {
1889 .vecs = aes_lrw_enc_tv_template,
1890 .count = AES_LRW_ENC_TEST_VECTORS
1891 },
1892 .dec = {
1893 .vecs = aes_lrw_dec_tv_template,
1894 .count = AES_LRW_DEC_TEST_VECTORS
1895 }
1896 }
1897 }
1898 }, {
1899 .alg = "lzo",
1900 .test = alg_test_comp,
1901 .suite = {
1902 .comp = {
1903 .comp = {
1904 .vecs = lzo_comp_tv_template,
1905 .count = LZO_COMP_TEST_VECTORS
1906 },
1907 .decomp = {
1908 .vecs = lzo_decomp_tv_template,
1909 .count = LZO_DECOMP_TEST_VECTORS
1910 }
1911 }
1912 }
1913 }, {
1914 .alg = "md4",
1915 .test = alg_test_hash,
1916 .suite = {
1917 .hash = {
1918 .vecs = md4_tv_template,
1919 .count = MD4_TEST_VECTORS
1920 }
1921 }
1922 }, {
1923 .alg = "md5",
1924 .test = alg_test_hash,
1925 .suite = {
1926 .hash = {
1927 .vecs = md5_tv_template,
1928 .count = MD5_TEST_VECTORS
1929 }
1930 }
1931 }, {
1932 .alg = "michael_mic",
1933 .test = alg_test_hash,
1934 .suite = {
1935 .hash = {
1936 .vecs = michael_mic_tv_template,
1937 .count = MICHAEL_MIC_TEST_VECTORS
1938 }
1939 }
1940 }, {
1941 .alg = "pcbc(fcrypt)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001942 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001943 .suite = {
1944 .cipher = {
1945 .enc = {
1946 .vecs = fcrypt_pcbc_enc_tv_template,
1947 .count = FCRYPT_ENC_TEST_VECTORS
1948 },
1949 .dec = {
1950 .vecs = fcrypt_pcbc_dec_tv_template,
1951 .count = FCRYPT_DEC_TEST_VECTORS
1952 }
1953 }
1954 }
1955 }, {
1956 .alg = "rfc3686(ctr(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001957 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08001958 .suite = {
1959 .cipher = {
1960 .enc = {
1961 .vecs = aes_ctr_enc_tv_template,
1962 .count = AES_CTR_ENC_TEST_VECTORS
1963 },
1964 .dec = {
1965 .vecs = aes_ctr_dec_tv_template,
1966 .count = AES_CTR_DEC_TEST_VECTORS
1967 }
1968 }
1969 }
1970 }, {
Jarod Wilson5d667322009-05-04 19:23:40 +08001971 .alg = "rfc4309(ccm(aes))",
1972 .test = alg_test_aead,
1973 .suite = {
1974 .aead = {
1975 .enc = {
1976 .vecs = aes_ccm_rfc4309_enc_tv_template,
1977 .count = AES_CCM_4309_ENC_TEST_VECTORS
1978 },
1979 .dec = {
1980 .vecs = aes_ccm_rfc4309_dec_tv_template,
1981 .count = AES_CCM_4309_DEC_TEST_VECTORS
1982 }
1983 }
1984 }
1985 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08001986 .alg = "rmd128",
1987 .test = alg_test_hash,
1988 .suite = {
1989 .hash = {
1990 .vecs = rmd128_tv_template,
1991 .count = RMD128_TEST_VECTORS
1992 }
1993 }
1994 }, {
1995 .alg = "rmd160",
1996 .test = alg_test_hash,
1997 .suite = {
1998 .hash = {
1999 .vecs = rmd160_tv_template,
2000 .count = RMD160_TEST_VECTORS
2001 }
2002 }
2003 }, {
2004 .alg = "rmd256",
2005 .test = alg_test_hash,
2006 .suite = {
2007 .hash = {
2008 .vecs = rmd256_tv_template,
2009 .count = RMD256_TEST_VECTORS
2010 }
2011 }
2012 }, {
2013 .alg = "rmd320",
2014 .test = alg_test_hash,
2015 .suite = {
2016 .hash = {
2017 .vecs = rmd320_tv_template,
2018 .count = RMD320_TEST_VECTORS
2019 }
2020 }
2021 }, {
2022 .alg = "salsa20",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002023 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002024 .suite = {
2025 .cipher = {
2026 .enc = {
2027 .vecs = salsa20_stream_enc_tv_template,
2028 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2029 }
2030 }
2031 }
2032 }, {
2033 .alg = "sha1",
2034 .test = alg_test_hash,
2035 .suite = {
2036 .hash = {
2037 .vecs = sha1_tv_template,
2038 .count = SHA1_TEST_VECTORS
2039 }
2040 }
2041 }, {
2042 .alg = "sha224",
2043 .test = alg_test_hash,
2044 .suite = {
2045 .hash = {
2046 .vecs = sha224_tv_template,
2047 .count = SHA224_TEST_VECTORS
2048 }
2049 }
2050 }, {
2051 .alg = "sha256",
2052 .test = alg_test_hash,
2053 .suite = {
2054 .hash = {
2055 .vecs = sha256_tv_template,
2056 .count = SHA256_TEST_VECTORS
2057 }
2058 }
2059 }, {
2060 .alg = "sha384",
2061 .test = alg_test_hash,
2062 .suite = {
2063 .hash = {
2064 .vecs = sha384_tv_template,
2065 .count = SHA384_TEST_VECTORS
2066 }
2067 }
2068 }, {
2069 .alg = "sha512",
2070 .test = alg_test_hash,
2071 .suite = {
2072 .hash = {
2073 .vecs = sha512_tv_template,
2074 .count = SHA512_TEST_VECTORS
2075 }
2076 }
2077 }, {
2078 .alg = "tgr128",
2079 .test = alg_test_hash,
2080 .suite = {
2081 .hash = {
2082 .vecs = tgr128_tv_template,
2083 .count = TGR128_TEST_VECTORS
2084 }
2085 }
2086 }, {
2087 .alg = "tgr160",
2088 .test = alg_test_hash,
2089 .suite = {
2090 .hash = {
2091 .vecs = tgr160_tv_template,
2092 .count = TGR160_TEST_VECTORS
2093 }
2094 }
2095 }, {
2096 .alg = "tgr192",
2097 .test = alg_test_hash,
2098 .suite = {
2099 .hash = {
2100 .vecs = tgr192_tv_template,
2101 .count = TGR192_TEST_VECTORS
2102 }
2103 }
2104 }, {
2105 .alg = "wp256",
2106 .test = alg_test_hash,
2107 .suite = {
2108 .hash = {
2109 .vecs = wp256_tv_template,
2110 .count = WP256_TEST_VECTORS
2111 }
2112 }
2113 }, {
2114 .alg = "wp384",
2115 .test = alg_test_hash,
2116 .suite = {
2117 .hash = {
2118 .vecs = wp384_tv_template,
2119 .count = WP384_TEST_VECTORS
2120 }
2121 }
2122 }, {
2123 .alg = "wp512",
2124 .test = alg_test_hash,
2125 .suite = {
2126 .hash = {
2127 .vecs = wp512_tv_template,
2128 .count = WP512_TEST_VECTORS
2129 }
2130 }
2131 }, {
2132 .alg = "xcbc(aes)",
2133 .test = alg_test_hash,
2134 .suite = {
2135 .hash = {
2136 .vecs = aes_xcbc128_tv_template,
2137 .count = XCBC_AES_TEST_VECTORS
2138 }
2139 }
2140 }, {
2141 .alg = "xts(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002142 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002143 .suite = {
2144 .cipher = {
2145 .enc = {
2146 .vecs = aes_xts_enc_tv_template,
2147 .count = AES_XTS_ENC_TEST_VECTORS
2148 },
2149 .dec = {
2150 .vecs = aes_xts_dec_tv_template,
2151 .count = AES_XTS_DEC_TEST_VECTORS
2152 }
2153 }
2154 }
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08002155 }, {
2156 .alg = "zlib",
2157 .test = alg_test_pcomp,
2158 .suite = {
2159 .pcomp = {
2160 .comp = {
2161 .vecs = zlib_comp_tv_template,
2162 .count = ZLIB_COMP_TEST_VECTORS
2163 },
2164 .decomp = {
2165 .vecs = zlib_decomp_tv_template,
2166 .count = ZLIB_DECOMP_TEST_VECTORS
2167 }
2168 }
2169 }
Herbert Xuda7f0332008-07-31 17:08:25 +08002170 }
2171};
2172
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002173static int alg_find_test(const char *alg)
Herbert Xuda7f0332008-07-31 17:08:25 +08002174{
2175 int start = 0;
2176 int end = ARRAY_SIZE(alg_test_descs);
2177
2178 while (start < end) {
2179 int i = (start + end) / 2;
2180 int diff = strcmp(alg_test_descs[i].alg, alg);
2181
2182 if (diff > 0) {
2183 end = i;
2184 continue;
2185 }
2186
2187 if (diff < 0) {
2188 start = i + 1;
2189 continue;
2190 }
2191
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002192 return i;
Herbert Xuda7f0332008-07-31 17:08:25 +08002193 }
2194
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002195 return -1;
2196}
2197
2198int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2199{
2200 int i;
Neil Hormand12d6b62008-10-12 20:36:51 +08002201 int rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002202
2203 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2204 char nalg[CRYPTO_MAX_ALG_NAME];
2205
2206 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2207 sizeof(nalg))
2208 return -ENAMETOOLONG;
2209
2210 i = alg_find_test(nalg);
2211 if (i < 0)
2212 goto notest;
2213
2214 return alg_test_cipher(alg_test_descs + i, driver, type, mask);
2215 }
2216
2217 i = alg_find_test(alg);
2218 if (i < 0)
2219 goto notest;
2220
Neil Hormand12d6b62008-10-12 20:36:51 +08002221 rc = alg_test_descs[i].test(alg_test_descs + i, driver,
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002222 type, mask);
Neil Hormand12d6b62008-10-12 20:36:51 +08002223 if (fips_enabled && rc)
2224 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2225
2226 return rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002227
2228notest:
Herbert Xuda7f0332008-07-31 17:08:25 +08002229 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2230 return 0;
2231}
2232EXPORT_SYMBOL_GPL(alg_test);
2233
2234int __init testmgr_init(void)
2235{
2236 int i;
2237
2238 for (i = 0; i < XBUFSIZE; i++) {
2239 xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2240 if (!xbuf[i])
2241 goto err_free_xbuf;
2242 }
2243
2244 for (i = 0; i < XBUFSIZE; i++) {
2245 axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2246 if (!axbuf[i])
2247 goto err_free_axbuf;
2248 }
2249
2250 return 0;
2251
2252err_free_axbuf:
2253 for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
2254 free_page((unsigned long)axbuf[i]);
2255err_free_xbuf:
2256 for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
2257 free_page((unsigned long)xbuf[i]);
2258
2259 return -ENOMEM;
2260}
2261
2262void testmgr_exit(void)
2263{
2264 int i;
2265
2266 for (i = 0; i < XBUFSIZE; i++)
2267 free_page((unsigned long)axbuf[i]);
2268 for (i = 0; i < XBUFSIZE; i++)
2269 free_page((unsigned long)xbuf[i]);
2270}