blob: 1e64f354c2b83b37dbf2ddbd8564af8d48e0c110 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Huang Ying1cac2cb2009-01-18 16:19:46 +11002/*
3 * Software async crypto daemon
Adrian Hoban298c9262010-09-20 16:05:12 +08004 *
5 * Added AEAD support to cryptd.
6 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
7 * Adrian Hoban <adrian.hoban@intel.com>
8 * Gabriele Paoloni <gabriele.paoloni@intel.com>
9 * Aidan O'Mahony (aidan.o.mahony@intel.com)
10 * Copyright (c) 2010, Intel Corporation.
Huang Ying1cac2cb2009-01-18 16:19:46 +110011 */
12
13#ifndef _CRYPTO_CRYPT_H
14#define _CRYPTO_CRYPT_H
15
Huang Ying1cac2cb2009-01-18 16:19:46 +110016#include <linux/kernel.h>
Herbert Xu53033d42015-05-11 17:48:11 +080017#include <crypto/aead.h>
Huang Yingace13662009-08-06 15:35:20 +100018#include <crypto/hash.h>
Herbert Xu4e0958d2016-11-22 20:08:23 +080019#include <crypto/skcipher.h>
Huang Ying1cac2cb2009-01-18 16:19:46 +110020
21struct cryptd_ablkcipher {
22 struct crypto_ablkcipher base;
23};
24
25static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
26 struct crypto_ablkcipher *tfm)
27{
28 return (struct cryptd_ablkcipher *)tfm;
29}
30
31/* alg_name should be algorithm to be cryptd-ed */
32struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
33 u32 type, u32 mask);
34struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
Herbert Xu81760ea2016-06-21 16:55:13 +080035bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
Huang Ying1cac2cb2009-01-18 16:19:46 +110036void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
37
Herbert Xu4e0958d2016-11-22 20:08:23 +080038struct cryptd_skcipher {
39 struct crypto_skcipher base;
40};
41
42struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
43 u32 type, u32 mask);
44struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
45/* Must be called without moving CPUs. */
46bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
47void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
48
Huang Yingace13662009-08-06 15:35:20 +100049struct cryptd_ahash {
50 struct crypto_ahash base;
51};
52
53static inline struct cryptd_ahash *__cryptd_ahash_cast(
54 struct crypto_ahash *tfm)
55{
56 return (struct cryptd_ahash *)tfm;
57}
58
59/* alg_name should be algorithm to be cryptd-ed */
60struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
61 u32 type, u32 mask);
62struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
Huang Ying0e1227d2009-10-19 11:53:06 +090063struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
Herbert Xu81760ea2016-06-21 16:55:13 +080064/* Must be called without moving CPUs. */
65bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
Huang Yingace13662009-08-06 15:35:20 +100066void cryptd_free_ahash(struct cryptd_ahash *tfm);
67
Adrian Hoban298c9262010-09-20 16:05:12 +080068struct cryptd_aead {
69 struct crypto_aead base;
70};
71
72static inline struct cryptd_aead *__cryptd_aead_cast(
73 struct crypto_aead *tfm)
74{
75 return (struct cryptd_aead *)tfm;
76}
77
78struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
79 u32 type, u32 mask);
80
81struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
Herbert Xu81760ea2016-06-21 16:55:13 +080082/* Must be called without moving CPUs. */
83bool cryptd_aead_queued(struct cryptd_aead *tfm);
Adrian Hoban298c9262010-09-20 16:05:12 +080084
85void cryptd_free_aead(struct cryptd_aead *tfm);
86
Huang Ying1cac2cb2009-01-18 16:19:46 +110087#endif