blob: 94418cbf90139eb68591bf23600f483d3039a3d2 [file] [log] [blame]
Huang Ying1cac2cb2009-01-18 16:19:46 +11001/*
2 * Software async crypto daemon
Adrian Hoban298c9262010-09-20 16:05:12 +08003 *
4 * Added AEAD support to cryptd.
5 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
6 * Adrian Hoban <adrian.hoban@intel.com>
7 * Gabriele Paoloni <gabriele.paoloni@intel.com>
8 * Aidan O'Mahony (aidan.o.mahony@intel.com)
9 * Copyright (c) 2010, Intel Corporation.
Huang Ying1cac2cb2009-01-18 16:19:46 +110010 */
11
12#ifndef _CRYPTO_CRYPT_H
13#define _CRYPTO_CRYPT_H
14
Huang Ying1cac2cb2009-01-18 16:19:46 +110015#include <linux/kernel.h>
Herbert Xu53033d42015-05-11 17:48:11 +080016#include <crypto/aead.h>
Huang Yingace13662009-08-06 15:35:20 +100017#include <crypto/hash.h>
Herbert Xu4e0958d2016-11-22 20:08:23 +080018#include <crypto/skcipher.h>
Huang Ying1cac2cb2009-01-18 16:19:46 +110019
20struct cryptd_ablkcipher {
21 struct crypto_ablkcipher base;
22};
23
24static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
25 struct crypto_ablkcipher *tfm)
26{
27 return (struct cryptd_ablkcipher *)tfm;
28}
29
30/* alg_name should be algorithm to be cryptd-ed */
31struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
32 u32 type, u32 mask);
33struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
Herbert Xu81760ea2016-06-21 16:55:13 +080034bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
Huang Ying1cac2cb2009-01-18 16:19:46 +110035void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
36
Herbert Xu4e0958d2016-11-22 20:08:23 +080037struct cryptd_skcipher {
38 struct crypto_skcipher base;
39};
40
41struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
42 u32 type, u32 mask);
43struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
44/* Must be called without moving CPUs. */
45bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
46void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
47
Huang Yingace13662009-08-06 15:35:20 +100048struct cryptd_ahash {
49 struct crypto_ahash base;
50};
51
52static inline struct cryptd_ahash *__cryptd_ahash_cast(
53 struct crypto_ahash *tfm)
54{
55 return (struct cryptd_ahash *)tfm;
56}
57
58/* alg_name should be algorithm to be cryptd-ed */
59struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
60 u32 type, u32 mask);
61struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
Huang Ying0e1227d2009-10-19 11:53:06 +090062struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
Herbert Xu81760ea2016-06-21 16:55:13 +080063/* Must be called without moving CPUs. */
64bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
Huang Yingace13662009-08-06 15:35:20 +100065void cryptd_free_ahash(struct cryptd_ahash *tfm);
66
Adrian Hoban298c9262010-09-20 16:05:12 +080067struct cryptd_aead {
68 struct crypto_aead base;
69};
70
71static inline struct cryptd_aead *__cryptd_aead_cast(
72 struct crypto_aead *tfm)
73{
74 return (struct cryptd_aead *)tfm;
75}
76
77struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
78 u32 type, u32 mask);
79
80struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
Herbert Xu81760ea2016-06-21 16:55:13 +080081/* Must be called without moving CPUs. */
82bool cryptd_aead_queued(struct cryptd_aead *tfm);
Adrian Hoban298c9262010-09-20 16:05:12 +080083
84void cryptd_free_aead(struct cryptd_aead *tfm);
85
Huang Ying1cac2cb2009-01-18 16:19:46 +110086#endif