blob: 84d26ead9307047a2784afe626cfa43aa4489aab [file] [log] [blame]
Herbert Xu124b53d2007-04-16 20:49:20 +10001/*
2 * Software async crypto daemon.
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
Adrian Hoban298c9262010-09-20 16:05:12 +08006 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
12 *
Herbert Xu124b53d2007-04-16 20:49:20 +100013 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
17 *
18 */
19
Herbert Xu18e33e62008-07-10 16:01:22 +080020#include <crypto/internal/hash.h>
Adrian Hoban298c9262010-09-20 16:05:12 +080021#include <crypto/internal/aead.h>
Herbert Xu4e0958d2016-11-22 20:08:23 +080022#include <crypto/internal/skcipher.h>
Huang Ying1cac2cb2009-01-18 16:19:46 +110023#include <crypto/cryptd.h>
Huang Ying254eff72009-02-19 14:42:19 +080024#include <crypto/crypto_wq.h>
Herbert Xu81760ea2016-06-21 16:55:13 +080025#include <linux/atomic.h>
Herbert Xu124b53d2007-04-16 20:49:20 +100026#include <linux/err.h>
27#include <linux/init.h>
28#include <linux/kernel.h>
Herbert Xu124b53d2007-04-16 20:49:20 +100029#include <linux/list.h>
30#include <linux/module.h>
Herbert Xu124b53d2007-04-16 20:49:20 +100031#include <linux/scatterlist.h>
32#include <linux/sched.h>
33#include <linux/slab.h>
Herbert Xu124b53d2007-04-16 20:49:20 +100034
Colin Ian Kingeaf356e2017-11-30 11:26:14 +000035static unsigned int cryptd_max_cpu_qlen = 1000;
Jon Maxwellc3a53602017-11-22 16:08:17 +110036module_param(cryptd_max_cpu_qlen, uint, 0);
37MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
Herbert Xu124b53d2007-04-16 20:49:20 +100038
Huang Ying254eff72009-02-19 14:42:19 +080039struct cryptd_cpu_queue {
Herbert Xu124b53d2007-04-16 20:49:20 +100040 struct crypto_queue queue;
Huang Ying254eff72009-02-19 14:42:19 +080041 struct work_struct work;
42};
43
44struct cryptd_queue {
Tejun Heoa29d8b82010-02-02 14:39:15 +090045 struct cryptd_cpu_queue __percpu *cpu_queue;
Herbert Xu124b53d2007-04-16 20:49:20 +100046};
47
48struct cryptd_instance_ctx {
49 struct crypto_spawn spawn;
Huang Ying254eff72009-02-19 14:42:19 +080050 struct cryptd_queue *queue;
Herbert Xu124b53d2007-04-16 20:49:20 +100051};
52
Herbert Xu4e0958d2016-11-22 20:08:23 +080053struct skcipherd_instance_ctx {
54 struct crypto_skcipher_spawn spawn;
55 struct cryptd_queue *queue;
56};
57
Herbert Xu46309d82009-07-12 21:38:59 +080058struct hashd_instance_ctx {
59 struct crypto_shash_spawn spawn;
60 struct cryptd_queue *queue;
61};
62
Adrian Hoban298c9262010-09-20 16:05:12 +080063struct aead_instance_ctx {
64 struct crypto_aead_spawn aead_spawn;
65 struct cryptd_queue *queue;
66};
67
Herbert Xu124b53d2007-04-16 20:49:20 +100068struct cryptd_blkcipher_ctx {
Herbert Xu81760ea2016-06-21 16:55:13 +080069 atomic_t refcnt;
Herbert Xu124b53d2007-04-16 20:49:20 +100070 struct crypto_blkcipher *child;
71};
72
73struct cryptd_blkcipher_request_ctx {
74 crypto_completion_t complete;
75};
76
Herbert Xu4e0958d2016-11-22 20:08:23 +080077struct cryptd_skcipher_ctx {
78 atomic_t refcnt;
Kees Cooke08156e2018-09-18 19:10:52 -070079 struct crypto_sync_skcipher *child;
Herbert Xu4e0958d2016-11-22 20:08:23 +080080};
81
82struct cryptd_skcipher_request_ctx {
83 crypto_completion_t complete;
84};
85
Loc Hob8a28252008-05-14 21:23:00 +080086struct cryptd_hash_ctx {
Herbert Xu81760ea2016-06-21 16:55:13 +080087 atomic_t refcnt;
Herbert Xu46309d82009-07-12 21:38:59 +080088 struct crypto_shash *child;
Loc Hob8a28252008-05-14 21:23:00 +080089};
90
91struct cryptd_hash_request_ctx {
92 crypto_completion_t complete;
Herbert Xu46309d82009-07-12 21:38:59 +080093 struct shash_desc desc;
Loc Hob8a28252008-05-14 21:23:00 +080094};
Herbert Xu124b53d2007-04-16 20:49:20 +100095
Adrian Hoban298c9262010-09-20 16:05:12 +080096struct cryptd_aead_ctx {
Herbert Xu81760ea2016-06-21 16:55:13 +080097 atomic_t refcnt;
Adrian Hoban298c9262010-09-20 16:05:12 +080098 struct crypto_aead *child;
99};
100
101struct cryptd_aead_request_ctx {
102 crypto_completion_t complete;
103};
104
Huang Ying254eff72009-02-19 14:42:19 +0800105static void cryptd_queue_worker(struct work_struct *work);
106
107static int cryptd_init_queue(struct cryptd_queue *queue,
108 unsigned int max_cpu_qlen)
109{
110 int cpu;
111 struct cryptd_cpu_queue *cpu_queue;
112
113 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
114 if (!queue->cpu_queue)
115 return -ENOMEM;
116 for_each_possible_cpu(cpu) {
117 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
118 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
119 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
120 }
Jon Maxwellc3a53602017-11-22 16:08:17 +1100121 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
Huang Ying254eff72009-02-19 14:42:19 +0800122 return 0;
123}
124
125static void cryptd_fini_queue(struct cryptd_queue *queue)
126{
127 int cpu;
128 struct cryptd_cpu_queue *cpu_queue;
129
130 for_each_possible_cpu(cpu) {
131 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
132 BUG_ON(cpu_queue->queue.qlen);
133 }
134 free_percpu(queue->cpu_queue);
135}
136
137static int cryptd_enqueue_request(struct cryptd_queue *queue,
138 struct crypto_async_request *request)
139{
140 int cpu, err;
141 struct cryptd_cpu_queue *cpu_queue;
Herbert Xu81760ea2016-06-21 16:55:13 +0800142 atomic_t *refcnt;
Huang Ying254eff72009-02-19 14:42:19 +0800143
144 cpu = get_cpu();
Christoph Lameter0b44f482009-10-03 19:48:23 +0900145 cpu_queue = this_cpu_ptr(queue->cpu_queue);
Huang Ying254eff72009-02-19 14:42:19 +0800146 err = crypto_enqueue_request(&cpu_queue->queue, request);
Herbert Xu81760ea2016-06-21 16:55:13 +0800147
148 refcnt = crypto_tfm_ctx(request->tfm);
Herbert Xu81760ea2016-06-21 16:55:13 +0800149
Gilad Ben-Yossef6b80ea32017-10-18 08:00:33 +0100150 if (err == -ENOSPC)
Herbert Xu81760ea2016-06-21 16:55:13 +0800151 goto out_put_cpu;
152
Huang Ying254eff72009-02-19 14:42:19 +0800153 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
Herbert Xu81760ea2016-06-21 16:55:13 +0800154
155 if (!atomic_read(refcnt))
156 goto out_put_cpu;
157
Herbert Xu81760ea2016-06-21 16:55:13 +0800158 atomic_inc(refcnt);
159
160out_put_cpu:
Huang Ying254eff72009-02-19 14:42:19 +0800161 put_cpu();
162
163 return err;
164}
165
166/* Called in workqueue context, do one real cryption work (via
167 * req->complete) and reschedule itself if there are more work to
168 * do. */
169static void cryptd_queue_worker(struct work_struct *work)
170{
171 struct cryptd_cpu_queue *cpu_queue;
172 struct crypto_async_request *req, *backlog;
173
174 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
Jussi Kivilinna9efade12012-10-21 20:42:28 +0300175 /*
176 * Only handle one request at a time to avoid hogging crypto workqueue.
177 * preempt_disable/enable is used to prevent being preempted by
178 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
179 * cryptd_enqueue_request() being accessed from software interrupts.
180 */
181 local_bh_disable();
Huang Ying254eff72009-02-19 14:42:19 +0800182 preempt_disable();
183 backlog = crypto_get_backlog(&cpu_queue->queue);
184 req = crypto_dequeue_request(&cpu_queue->queue);
185 preempt_enable();
Jussi Kivilinna9efade12012-10-21 20:42:28 +0300186 local_bh_enable();
Huang Ying254eff72009-02-19 14:42:19 +0800187
188 if (!req)
189 return;
190
191 if (backlog)
192 backlog->complete(backlog, -EINPROGRESS);
193 req->complete(req, 0);
194
195 if (cpu_queue->queue.qlen)
196 queue_work(kcrypto_wq, &cpu_queue->work);
197}
198
199static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
Herbert Xu124b53d2007-04-16 20:49:20 +1000200{
201 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
202 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
Huang Ying254eff72009-02-19 14:42:19 +0800203 return ictx->queue;
Herbert Xu124b53d2007-04-16 20:49:20 +1000204}
205
Stephan Mueller466a7b92015-03-30 21:57:06 +0200206static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
207 u32 *mask)
208{
209 struct crypto_attr_type *algt;
210
211 algt = crypto_get_attr_type(tb);
212 if (IS_ERR(algt))
213 return;
Herbert Xuf6da3202015-07-09 07:17:19 +0800214
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800215 *type |= algt->type & CRYPTO_ALG_INTERNAL;
216 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
Stephan Mueller466a7b92015-03-30 21:57:06 +0200217}
218
Herbert Xu124b53d2007-04-16 20:49:20 +1000219static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
220 const u8 *key, unsigned int keylen)
221{
222 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
223 struct crypto_blkcipher *child = ctx->child;
224 int err;
225
226 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
227 crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
228 CRYPTO_TFM_REQ_MASK);
229 err = crypto_blkcipher_setkey(child, key, keylen);
230 crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
231 CRYPTO_TFM_RES_MASK);
232 return err;
233}
234
235static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
236 struct crypto_blkcipher *child,
237 int err,
238 int (*crypt)(struct blkcipher_desc *desc,
239 struct scatterlist *dst,
240 struct scatterlist *src,
241 unsigned int len))
242{
243 struct cryptd_blkcipher_request_ctx *rctx;
Herbert Xu81760ea2016-06-21 16:55:13 +0800244 struct cryptd_blkcipher_ctx *ctx;
245 struct crypto_ablkcipher *tfm;
Herbert Xu124b53d2007-04-16 20:49:20 +1000246 struct blkcipher_desc desc;
Herbert Xu81760ea2016-06-21 16:55:13 +0800247 int refcnt;
Herbert Xu124b53d2007-04-16 20:49:20 +1000248
249 rctx = ablkcipher_request_ctx(req);
250
Herbert Xu93aa7f82008-05-07 21:10:13 +0800251 if (unlikely(err == -EINPROGRESS))
252 goto out;
Herbert Xu124b53d2007-04-16 20:49:20 +1000253
254 desc.tfm = child;
255 desc.info = req->info;
256 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
257
258 err = crypt(&desc, req->dst, req->src, req->nbytes);
259
260 req->base.complete = rctx->complete;
261
Herbert Xu93aa7f82008-05-07 21:10:13 +0800262out:
Herbert Xu81760ea2016-06-21 16:55:13 +0800263 tfm = crypto_ablkcipher_reqtfm(req);
264 ctx = crypto_ablkcipher_ctx(tfm);
265 refcnt = atomic_read(&ctx->refcnt);
266
Herbert Xu124b53d2007-04-16 20:49:20 +1000267 local_bh_disable();
Herbert Xu93aa7f82008-05-07 21:10:13 +0800268 rctx->complete(&req->base, err);
Herbert Xu124b53d2007-04-16 20:49:20 +1000269 local_bh_enable();
Herbert Xu81760ea2016-06-21 16:55:13 +0800270
271 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
272 crypto_free_ablkcipher(tfm);
Herbert Xu124b53d2007-04-16 20:49:20 +1000273}
274
275static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
276{
277 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
278 struct crypto_blkcipher *child = ctx->child;
279
280 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
281 crypto_blkcipher_crt(child)->encrypt);
282}
283
284static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
285{
286 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
287 struct crypto_blkcipher *child = ctx->child;
288
289 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
290 crypto_blkcipher_crt(child)->decrypt);
291}
292
293static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700294 crypto_completion_t compl)
Herbert Xu124b53d2007-04-16 20:49:20 +1000295{
296 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
297 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
Huang Ying254eff72009-02-19 14:42:19 +0800298 struct cryptd_queue *queue;
Herbert Xu124b53d2007-04-16 20:49:20 +1000299
Huang Ying254eff72009-02-19 14:42:19 +0800300 queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
Herbert Xu124b53d2007-04-16 20:49:20 +1000301 rctx->complete = req->base.complete;
Mark Rustad3e3dc252014-07-25 02:53:38 -0700302 req->base.complete = compl;
Herbert Xu124b53d2007-04-16 20:49:20 +1000303
Huang Ying254eff72009-02-19 14:42:19 +0800304 return cryptd_enqueue_request(queue, &req->base);
Herbert Xu124b53d2007-04-16 20:49:20 +1000305}
306
307static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
308{
309 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
310}
311
312static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
313{
314 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
315}
316
317static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
318{
319 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
320 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
321 struct crypto_spawn *spawn = &ictx->spawn;
322 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
323 struct crypto_blkcipher *cipher;
324
325 cipher = crypto_spawn_blkcipher(spawn);
326 if (IS_ERR(cipher))
327 return PTR_ERR(cipher);
328
329 ctx->child = cipher;
330 tfm->crt_ablkcipher.reqsize =
331 sizeof(struct cryptd_blkcipher_request_ctx);
332 return 0;
333}
334
335static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
336{
337 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
Herbert Xu124b53d2007-04-16 20:49:20 +1000338
339 crypto_free_blkcipher(ctx->child);
340}
341
Herbert Xu9b8c4562015-05-21 15:10:57 +0800342static int cryptd_init_instance(struct crypto_instance *inst,
343 struct crypto_alg *alg)
344{
345 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
346 "cryptd(%s)",
347 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
348 return -ENAMETOOLONG;
349
350 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
351
352 inst->alg.cra_priority = alg->cra_priority + 50;
353 inst->alg.cra_blocksize = alg->cra_blocksize;
354 inst->alg.cra_alignmask = alg->cra_alignmask;
355
356 return 0;
357}
358
Herbert Xu0b535ad2009-07-14 19:11:32 +0800359static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
360 unsigned int tail)
Herbert Xu124b53d2007-04-16 20:49:20 +1000361{
Herbert Xu0b535ad2009-07-14 19:11:32 +0800362 char *p;
Herbert Xu124b53d2007-04-16 20:49:20 +1000363 struct crypto_instance *inst;
Herbert Xu124b53d2007-04-16 20:49:20 +1000364 int err;
365
Herbert Xu0b535ad2009-07-14 19:11:32 +0800366 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
367 if (!p)
368 return ERR_PTR(-ENOMEM);
369
370 inst = (void *)(p + head);
Herbert Xu124b53d2007-04-16 20:49:20 +1000371
Herbert Xu9b8c4562015-05-21 15:10:57 +0800372 err = cryptd_init_instance(inst, alg);
373 if (err)
Herbert Xu124b53d2007-04-16 20:49:20 +1000374 goto out_free_inst;
375
Herbert Xu124b53d2007-04-16 20:49:20 +1000376out:
Herbert Xu0b535ad2009-07-14 19:11:32 +0800377 return p;
Herbert Xu124b53d2007-04-16 20:49:20 +1000378
379out_free_inst:
Herbert Xu0b535ad2009-07-14 19:11:32 +0800380 kfree(p);
381 p = ERR_PTR(err);
Herbert Xu124b53d2007-04-16 20:49:20 +1000382 goto out;
383}
384
Herbert Xu9cd899a2009-07-14 18:45:45 +0800385static int cryptd_create_blkcipher(struct crypto_template *tmpl,
386 struct rtattr **tb,
387 struct cryptd_queue *queue)
Herbert Xu124b53d2007-04-16 20:49:20 +1000388{
Herbert Xu46309d82009-07-12 21:38:59 +0800389 struct cryptd_instance_ctx *ctx;
Herbert Xu124b53d2007-04-16 20:49:20 +1000390 struct crypto_instance *inst;
391 struct crypto_alg *alg;
Stephan Mueller466a7b92015-03-30 21:57:06 +0200392 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
393 u32 mask = CRYPTO_ALG_TYPE_MASK;
Herbert Xu46309d82009-07-12 21:38:59 +0800394 int err;
Herbert Xu124b53d2007-04-16 20:49:20 +1000395
Stephan Mueller466a7b92015-03-30 21:57:06 +0200396 cryptd_check_internal(tb, &type, &mask);
397
398 alg = crypto_get_attr_alg(tb, type, mask);
Herbert Xu124b53d2007-04-16 20:49:20 +1000399 if (IS_ERR(alg))
Herbert Xu9cd899a2009-07-14 18:45:45 +0800400 return PTR_ERR(alg);
Herbert Xu124b53d2007-04-16 20:49:20 +1000401
Herbert Xu0b535ad2009-07-14 19:11:32 +0800402 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
Steffen Klassert05ed8752009-07-15 16:51:04 +0800403 err = PTR_ERR(inst);
Herbert Xu124b53d2007-04-16 20:49:20 +1000404 if (IS_ERR(inst))
405 goto out_put_alg;
406
Herbert Xu46309d82009-07-12 21:38:59 +0800407 ctx = crypto_instance_ctx(inst);
408 ctx->queue = queue;
409
410 err = crypto_init_spawn(&ctx->spawn, alg, inst,
411 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
412 if (err)
413 goto out_free_inst;
414
Stephan Mueller466a7b92015-03-30 21:57:06 +0200415 type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
416 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
417 type |= CRYPTO_ALG_INTERNAL;
418 inst->alg.cra_flags = type;
Herbert Xu124b53d2007-04-16 20:49:20 +1000419 inst->alg.cra_type = &crypto_ablkcipher_type;
420
421 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
422 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
423 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
424
Herbert Xu927eead2007-11-27 21:15:31 +0800425 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
426
Herbert Xu124b53d2007-04-16 20:49:20 +1000427 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
428
429 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
430 inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
431
432 inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
433 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
434 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
435
Herbert Xu9cd899a2009-07-14 18:45:45 +0800436 err = crypto_register_instance(tmpl, inst);
437 if (err) {
438 crypto_drop_spawn(&ctx->spawn);
439out_free_inst:
440 kfree(inst);
441 }
442
Herbert Xu124b53d2007-04-16 20:49:20 +1000443out_put_alg:
444 crypto_mod_put(alg);
Herbert Xu9cd899a2009-07-14 18:45:45 +0800445 return err;
Herbert Xu124b53d2007-04-16 20:49:20 +1000446}
447
Herbert Xu4e0958d2016-11-22 20:08:23 +0800448static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
449 const u8 *key, unsigned int keylen)
450{
451 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
Kees Cooke08156e2018-09-18 19:10:52 -0700452 struct crypto_sync_skcipher *child = ctx->child;
Herbert Xu4e0958d2016-11-22 20:08:23 +0800453 int err;
454
Kees Cooke08156e2018-09-18 19:10:52 -0700455 crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
456 crypto_sync_skcipher_set_flags(child,
457 crypto_skcipher_get_flags(parent) &
Herbert Xu4e0958d2016-11-22 20:08:23 +0800458 CRYPTO_TFM_REQ_MASK);
Kees Cooke08156e2018-09-18 19:10:52 -0700459 err = crypto_sync_skcipher_setkey(child, key, keylen);
460 crypto_skcipher_set_flags(parent,
461 crypto_sync_skcipher_get_flags(child) &
Herbert Xu4e0958d2016-11-22 20:08:23 +0800462 CRYPTO_TFM_RES_MASK);
463 return err;
464}
465
466static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
467{
468 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
469 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
470 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
471 int refcnt = atomic_read(&ctx->refcnt);
472
473 local_bh_disable();
474 rctx->complete(&req->base, err);
475 local_bh_enable();
476
477 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
478 crypto_free_skcipher(tfm);
479}
480
481static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
482 int err)
483{
484 struct skcipher_request *req = skcipher_request_cast(base);
485 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
486 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
487 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
Kees Cooke08156e2018-09-18 19:10:52 -0700488 struct crypto_sync_skcipher *child = ctx->child;
489 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
Herbert Xu4e0958d2016-11-22 20:08:23 +0800490
491 if (unlikely(err == -EINPROGRESS))
492 goto out;
493
Kees Cooke08156e2018-09-18 19:10:52 -0700494 skcipher_request_set_sync_tfm(subreq, child);
Herbert Xu4e0958d2016-11-22 20:08:23 +0800495 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
496 NULL, NULL);
497 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
498 req->iv);
499
500 err = crypto_skcipher_encrypt(subreq);
501 skcipher_request_zero(subreq);
502
503 req->base.complete = rctx->complete;
504
505out:
506 cryptd_skcipher_complete(req, err);
507}
508
509static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
510 int err)
511{
512 struct skcipher_request *req = skcipher_request_cast(base);
513 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
514 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
515 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
Kees Cooke08156e2018-09-18 19:10:52 -0700516 struct crypto_sync_skcipher *child = ctx->child;
517 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
Herbert Xu4e0958d2016-11-22 20:08:23 +0800518
519 if (unlikely(err == -EINPROGRESS))
520 goto out;
521
Kees Cooke08156e2018-09-18 19:10:52 -0700522 skcipher_request_set_sync_tfm(subreq, child);
Herbert Xu4e0958d2016-11-22 20:08:23 +0800523 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
524 NULL, NULL);
525 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
526 req->iv);
527
528 err = crypto_skcipher_decrypt(subreq);
529 skcipher_request_zero(subreq);
530
531 req->base.complete = rctx->complete;
532
533out:
534 cryptd_skcipher_complete(req, err);
535}
536
537static int cryptd_skcipher_enqueue(struct skcipher_request *req,
538 crypto_completion_t compl)
539{
540 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
541 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
542 struct cryptd_queue *queue;
543
544 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
545 rctx->complete = req->base.complete;
546 req->base.complete = compl;
547
548 return cryptd_enqueue_request(queue, &req->base);
549}
550
551static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
552{
553 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
554}
555
556static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
557{
558 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
559}
560
561static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
562{
563 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
564 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
565 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
566 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
567 struct crypto_skcipher *cipher;
568
569 cipher = crypto_spawn_skcipher(spawn);
570 if (IS_ERR(cipher))
571 return PTR_ERR(cipher);
572
Kees Cooke08156e2018-09-18 19:10:52 -0700573 ctx->child = (struct crypto_sync_skcipher *)cipher;
Herbert Xu4e0958d2016-11-22 20:08:23 +0800574 crypto_skcipher_set_reqsize(
575 tfm, sizeof(struct cryptd_skcipher_request_ctx));
576 return 0;
577}
578
579static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
580{
581 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
582
Kees Cooke08156e2018-09-18 19:10:52 -0700583 crypto_free_sync_skcipher(ctx->child);
Herbert Xu4e0958d2016-11-22 20:08:23 +0800584}
585
586static void cryptd_skcipher_free(struct skcipher_instance *inst)
587{
588 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
589
590 crypto_drop_skcipher(&ctx->spawn);
Vincent Whitchurchae3fa282019-07-02 09:53:25 +0200591 kfree(inst);
Herbert Xu4e0958d2016-11-22 20:08:23 +0800592}
593
594static int cryptd_create_skcipher(struct crypto_template *tmpl,
595 struct rtattr **tb,
596 struct cryptd_queue *queue)
597{
598 struct skcipherd_instance_ctx *ctx;
599 struct skcipher_instance *inst;
600 struct skcipher_alg *alg;
601 const char *name;
602 u32 type;
603 u32 mask;
604 int err;
605
606 type = 0;
607 mask = CRYPTO_ALG_ASYNC;
608
609 cryptd_check_internal(tb, &type, &mask);
610
611 name = crypto_attr_alg_name(tb[1]);
612 if (IS_ERR(name))
613 return PTR_ERR(name);
614
615 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
616 if (!inst)
617 return -ENOMEM;
618
619 ctx = skcipher_instance_ctx(inst);
620 ctx->queue = queue;
621
622 crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
623 err = crypto_grab_skcipher(&ctx->spawn, name, type, mask);
624 if (err)
625 goto out_free_inst;
626
627 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
628 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
629 if (err)
630 goto out_drop_skcipher;
631
632 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
633 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
634
635 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
636 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
637 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
638 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
639
640 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
641
642 inst->alg.init = cryptd_skcipher_init_tfm;
643 inst->alg.exit = cryptd_skcipher_exit_tfm;
644
645 inst->alg.setkey = cryptd_skcipher_setkey;
646 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
647 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
648
649 inst->free = cryptd_skcipher_free;
650
651 err = skcipher_register_instance(tmpl, inst);
652 if (err) {
653out_drop_skcipher:
654 crypto_drop_skcipher(&ctx->spawn);
655out_free_inst:
656 kfree(inst);
657 }
658 return err;
659}
660
Loc Hob8a28252008-05-14 21:23:00 +0800661static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
662{
663 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
Herbert Xu46309d82009-07-12 21:38:59 +0800664 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
665 struct crypto_shash_spawn *spawn = &ictx->spawn;
Loc Hob8a28252008-05-14 21:23:00 +0800666 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
Herbert Xu46309d82009-07-12 21:38:59 +0800667 struct crypto_shash *hash;
Loc Hob8a28252008-05-14 21:23:00 +0800668
Herbert Xu46309d82009-07-12 21:38:59 +0800669 hash = crypto_spawn_shash(spawn);
670 if (IS_ERR(hash))
671 return PTR_ERR(hash);
Loc Hob8a28252008-05-14 21:23:00 +0800672
Herbert Xu46309d82009-07-12 21:38:59 +0800673 ctx->child = hash;
Herbert Xu0d6669e2009-07-12 23:06:33 +0800674 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
675 sizeof(struct cryptd_hash_request_ctx) +
676 crypto_shash_descsize(hash));
Loc Hob8a28252008-05-14 21:23:00 +0800677 return 0;
678}
679
680static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
681{
682 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
Loc Hob8a28252008-05-14 21:23:00 +0800683
Herbert Xu46309d82009-07-12 21:38:59 +0800684 crypto_free_shash(ctx->child);
Loc Hob8a28252008-05-14 21:23:00 +0800685}
686
687static int cryptd_hash_setkey(struct crypto_ahash *parent,
688 const u8 *key, unsigned int keylen)
689{
690 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
Herbert Xu46309d82009-07-12 21:38:59 +0800691 struct crypto_shash *child = ctx->child;
Loc Hob8a28252008-05-14 21:23:00 +0800692 int err;
693
Herbert Xu46309d82009-07-12 21:38:59 +0800694 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
695 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
696 CRYPTO_TFM_REQ_MASK);
697 err = crypto_shash_setkey(child, key, keylen);
698 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
699 CRYPTO_TFM_RES_MASK);
Loc Hob8a28252008-05-14 21:23:00 +0800700 return err;
701}
702
703static int cryptd_hash_enqueue(struct ahash_request *req,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700704 crypto_completion_t compl)
Loc Hob8a28252008-05-14 21:23:00 +0800705{
706 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
707 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
Huang Ying254eff72009-02-19 14:42:19 +0800708 struct cryptd_queue *queue =
709 cryptd_get_queue(crypto_ahash_tfm(tfm));
Loc Hob8a28252008-05-14 21:23:00 +0800710
711 rctx->complete = req->base.complete;
Mark Rustad3e3dc252014-07-25 02:53:38 -0700712 req->base.complete = compl;
Loc Hob8a28252008-05-14 21:23:00 +0800713
Huang Ying254eff72009-02-19 14:42:19 +0800714 return cryptd_enqueue_request(queue, &req->base);
Loc Hob8a28252008-05-14 21:23:00 +0800715}
716
Herbert Xu81760ea2016-06-21 16:55:13 +0800717static void cryptd_hash_complete(struct ahash_request *req, int err)
718{
719 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
720 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
721 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
722 int refcnt = atomic_read(&ctx->refcnt);
723
724 local_bh_disable();
725 rctx->complete(&req->base, err);
726 local_bh_enable();
727
728 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
729 crypto_free_ahash(tfm);
730}
731
Loc Hob8a28252008-05-14 21:23:00 +0800732static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
733{
Herbert Xu46309d82009-07-12 21:38:59 +0800734 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
735 struct crypto_shash *child = ctx->child;
736 struct ahash_request *req = ahash_request_cast(req_async);
737 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
738 struct shash_desc *desc = &rctx->desc;
Loc Hob8a28252008-05-14 21:23:00 +0800739
740 if (unlikely(err == -EINPROGRESS))
741 goto out;
742
Herbert Xu46309d82009-07-12 21:38:59 +0800743 desc->tfm = child;
744 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
Loc Hob8a28252008-05-14 21:23:00 +0800745
Herbert Xu46309d82009-07-12 21:38:59 +0800746 err = crypto_shash_init(desc);
Loc Hob8a28252008-05-14 21:23:00 +0800747
748 req->base.complete = rctx->complete;
749
750out:
Herbert Xu81760ea2016-06-21 16:55:13 +0800751 cryptd_hash_complete(req, err);
Loc Hob8a28252008-05-14 21:23:00 +0800752}
753
754static int cryptd_hash_init_enqueue(struct ahash_request *req)
755{
756 return cryptd_hash_enqueue(req, cryptd_hash_init);
757}
758
759static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
760{
Herbert Xu46309d82009-07-12 21:38:59 +0800761 struct ahash_request *req = ahash_request_cast(req_async);
Loc Hob8a28252008-05-14 21:23:00 +0800762 struct cryptd_hash_request_ctx *rctx;
Loc Hob8a28252008-05-14 21:23:00 +0800763
764 rctx = ahash_request_ctx(req);
765
766 if (unlikely(err == -EINPROGRESS))
767 goto out;
768
Herbert Xu46309d82009-07-12 21:38:59 +0800769 err = shash_ahash_update(req, &rctx->desc);
Loc Hob8a28252008-05-14 21:23:00 +0800770
771 req->base.complete = rctx->complete;
772
773out:
Herbert Xu81760ea2016-06-21 16:55:13 +0800774 cryptd_hash_complete(req, err);
Loc Hob8a28252008-05-14 21:23:00 +0800775}
776
777static int cryptd_hash_update_enqueue(struct ahash_request *req)
778{
779 return cryptd_hash_enqueue(req, cryptd_hash_update);
780}
781
782static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
783{
Herbert Xu46309d82009-07-12 21:38:59 +0800784 struct ahash_request *req = ahash_request_cast(req_async);
785 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
Loc Hob8a28252008-05-14 21:23:00 +0800786
787 if (unlikely(err == -EINPROGRESS))
788 goto out;
789
Herbert Xu46309d82009-07-12 21:38:59 +0800790 err = crypto_shash_final(&rctx->desc, req->result);
Loc Hob8a28252008-05-14 21:23:00 +0800791
792 req->base.complete = rctx->complete;
793
794out:
Herbert Xu81760ea2016-06-21 16:55:13 +0800795 cryptd_hash_complete(req, err);
Loc Hob8a28252008-05-14 21:23:00 +0800796}
797
798static int cryptd_hash_final_enqueue(struct ahash_request *req)
799{
800 return cryptd_hash_enqueue(req, cryptd_hash_final);
801}
802
Herbert Xu6fba00d2009-07-22 11:10:22 +0800803static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
804{
805 struct ahash_request *req = ahash_request_cast(req_async);
806 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
807
808 if (unlikely(err == -EINPROGRESS))
809 goto out;
810
811 err = shash_ahash_finup(req, &rctx->desc);
812
813 req->base.complete = rctx->complete;
814
815out:
Herbert Xu81760ea2016-06-21 16:55:13 +0800816 cryptd_hash_complete(req, err);
Herbert Xu6fba00d2009-07-22 11:10:22 +0800817}
818
819static int cryptd_hash_finup_enqueue(struct ahash_request *req)
820{
821 return cryptd_hash_enqueue(req, cryptd_hash_finup);
822}
823
Loc Hob8a28252008-05-14 21:23:00 +0800824static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
825{
Herbert Xu46309d82009-07-12 21:38:59 +0800826 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
827 struct crypto_shash *child = ctx->child;
828 struct ahash_request *req = ahash_request_cast(req_async);
829 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
830 struct shash_desc *desc = &rctx->desc;
Loc Hob8a28252008-05-14 21:23:00 +0800831
832 if (unlikely(err == -EINPROGRESS))
833 goto out;
834
Herbert Xu46309d82009-07-12 21:38:59 +0800835 desc->tfm = child;
836 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
Loc Hob8a28252008-05-14 21:23:00 +0800837
Herbert Xu46309d82009-07-12 21:38:59 +0800838 err = shash_ahash_digest(req, desc);
Loc Hob8a28252008-05-14 21:23:00 +0800839
840 req->base.complete = rctx->complete;
841
842out:
Herbert Xu81760ea2016-06-21 16:55:13 +0800843 cryptd_hash_complete(req, err);
Loc Hob8a28252008-05-14 21:23:00 +0800844}
845
846static int cryptd_hash_digest_enqueue(struct ahash_request *req)
847{
848 return cryptd_hash_enqueue(req, cryptd_hash_digest);
849}
850
Herbert Xu6fba00d2009-07-22 11:10:22 +0800851static int cryptd_hash_export(struct ahash_request *req, void *out)
852{
853 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
854
855 return crypto_shash_export(&rctx->desc, out);
856}
857
858static int cryptd_hash_import(struct ahash_request *req, const void *in)
859{
Ard Biesheuvel0bd22232016-09-01 14:25:43 +0100860 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
861 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
862 struct shash_desc *desc = cryptd_shash_desc(req);
Herbert Xu6fba00d2009-07-22 11:10:22 +0800863
Ard Biesheuvel0bd22232016-09-01 14:25:43 +0100864 desc->tfm = ctx->child;
865 desc->flags = req->base.flags;
866
867 return crypto_shash_import(desc, in);
Herbert Xu6fba00d2009-07-22 11:10:22 +0800868}
869
Herbert Xu9cd899a2009-07-14 18:45:45 +0800870static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
871 struct cryptd_queue *queue)
Loc Hob8a28252008-05-14 21:23:00 +0800872{
Herbert Xu46309d82009-07-12 21:38:59 +0800873 struct hashd_instance_ctx *ctx;
Herbert Xu0b535ad2009-07-14 19:11:32 +0800874 struct ahash_instance *inst;
Herbert Xu46309d82009-07-12 21:38:59 +0800875 struct shash_alg *salg;
Loc Hob8a28252008-05-14 21:23:00 +0800876 struct crypto_alg *alg;
Stephan Mueller466a7b92015-03-30 21:57:06 +0200877 u32 type = 0;
878 u32 mask = 0;
Herbert Xu46309d82009-07-12 21:38:59 +0800879 int err;
Loc Hob8a28252008-05-14 21:23:00 +0800880
Stephan Mueller466a7b92015-03-30 21:57:06 +0200881 cryptd_check_internal(tb, &type, &mask);
882
883 salg = shash_attr_alg(tb[1], type, mask);
Herbert Xu46309d82009-07-12 21:38:59 +0800884 if (IS_ERR(salg))
Herbert Xu9cd899a2009-07-14 18:45:45 +0800885 return PTR_ERR(salg);
Loc Hob8a28252008-05-14 21:23:00 +0800886
Herbert Xu46309d82009-07-12 21:38:59 +0800887 alg = &salg->base;
Herbert Xu0b535ad2009-07-14 19:11:32 +0800888 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
889 sizeof(*ctx));
Steffen Klassert05ed8752009-07-15 16:51:04 +0800890 err = PTR_ERR(inst);
Loc Hob8a28252008-05-14 21:23:00 +0800891 if (IS_ERR(inst))
892 goto out_put_alg;
893
Herbert Xu0b535ad2009-07-14 19:11:32 +0800894 ctx = ahash_instance_ctx(inst);
Herbert Xu46309d82009-07-12 21:38:59 +0800895 ctx->queue = queue;
896
Herbert Xu0b535ad2009-07-14 19:11:32 +0800897 err = crypto_init_shash_spawn(&ctx->spawn, salg,
898 ahash_crypto_instance(inst));
Herbert Xu46309d82009-07-12 21:38:59 +0800899 if (err)
900 goto out_free_inst;
901
Eric Biggersa208fa82018-01-03 11:16:26 -0800902 inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC |
903 (alg->cra_flags & (CRYPTO_ALG_INTERNAL |
904 CRYPTO_ALG_OPTIONAL_KEY));
Loc Hob8a28252008-05-14 21:23:00 +0800905
Herbert Xu0b535ad2009-07-14 19:11:32 +0800906 inst->alg.halg.digestsize = salg->digestsize;
Wang, Rui Y1a078342015-11-29 22:45:34 +0800907 inst->alg.halg.statesize = salg->statesize;
Herbert Xu0b535ad2009-07-14 19:11:32 +0800908 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
Loc Hob8a28252008-05-14 21:23:00 +0800909
Herbert Xu0b535ad2009-07-14 19:11:32 +0800910 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
911 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
Loc Hob8a28252008-05-14 21:23:00 +0800912
Herbert Xu0b535ad2009-07-14 19:11:32 +0800913 inst->alg.init = cryptd_hash_init_enqueue;
914 inst->alg.update = cryptd_hash_update_enqueue;
915 inst->alg.final = cryptd_hash_final_enqueue;
Herbert Xu6fba00d2009-07-22 11:10:22 +0800916 inst->alg.finup = cryptd_hash_finup_enqueue;
917 inst->alg.export = cryptd_hash_export;
918 inst->alg.import = cryptd_hash_import;
Eric Biggers841a3ff2018-01-03 11:16:23 -0800919 if (crypto_shash_alg_has_setkey(salg))
920 inst->alg.setkey = cryptd_hash_setkey;
Herbert Xu0b535ad2009-07-14 19:11:32 +0800921 inst->alg.digest = cryptd_hash_digest_enqueue;
Loc Hob8a28252008-05-14 21:23:00 +0800922
Herbert Xu0b535ad2009-07-14 19:11:32 +0800923 err = ahash_register_instance(tmpl, inst);
Herbert Xu9cd899a2009-07-14 18:45:45 +0800924 if (err) {
925 crypto_drop_shash(&ctx->spawn);
926out_free_inst:
927 kfree(inst);
928 }
929
Loc Hob8a28252008-05-14 21:23:00 +0800930out_put_alg:
931 crypto_mod_put(alg);
Herbert Xu9cd899a2009-07-14 18:45:45 +0800932 return err;
Loc Hob8a28252008-05-14 21:23:00 +0800933}
934
Herbert Xu92b98762015-05-28 22:08:01 +0800935static int cryptd_aead_setkey(struct crypto_aead *parent,
936 const u8 *key, unsigned int keylen)
937{
938 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
939 struct crypto_aead *child = ctx->child;
940
941 return crypto_aead_setkey(child, key, keylen);
942}
943
944static int cryptd_aead_setauthsize(struct crypto_aead *parent,
945 unsigned int authsize)
946{
947 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
948 struct crypto_aead *child = ctx->child;
949
950 return crypto_aead_setauthsize(child, authsize);
951}
952
Adrian Hoban298c9262010-09-20 16:05:12 +0800953static void cryptd_aead_crypt(struct aead_request *req,
954 struct crypto_aead *child,
955 int err,
956 int (*crypt)(struct aead_request *req))
957{
958 struct cryptd_aead_request_ctx *rctx;
Herbert Xu81760ea2016-06-21 16:55:13 +0800959 struct cryptd_aead_ctx *ctx;
Herbert Xuec9f2002015-07-06 19:11:03 +0800960 crypto_completion_t compl;
Herbert Xu81760ea2016-06-21 16:55:13 +0800961 struct crypto_aead *tfm;
962 int refcnt;
Herbert Xuec9f2002015-07-06 19:11:03 +0800963
Adrian Hoban298c9262010-09-20 16:05:12 +0800964 rctx = aead_request_ctx(req);
Herbert Xuec9f2002015-07-06 19:11:03 +0800965 compl = rctx->complete;
Adrian Hoban298c9262010-09-20 16:05:12 +0800966
Herbert Xu31bd44e2016-08-25 16:49:51 +0800967 tfm = crypto_aead_reqtfm(req);
968
Adrian Hoban298c9262010-09-20 16:05:12 +0800969 if (unlikely(err == -EINPROGRESS))
970 goto out;
971 aead_request_set_tfm(req, child);
972 err = crypt( req );
Herbert Xu81760ea2016-06-21 16:55:13 +0800973
Adrian Hoban298c9262010-09-20 16:05:12 +0800974out:
Herbert Xu81760ea2016-06-21 16:55:13 +0800975 ctx = crypto_aead_ctx(tfm);
976 refcnt = atomic_read(&ctx->refcnt);
977
Adrian Hoban298c9262010-09-20 16:05:12 +0800978 local_bh_disable();
Herbert Xuec9f2002015-07-06 19:11:03 +0800979 compl(&req->base, err);
Adrian Hoban298c9262010-09-20 16:05:12 +0800980 local_bh_enable();
Herbert Xu81760ea2016-06-21 16:55:13 +0800981
982 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
983 crypto_free_aead(tfm);
Adrian Hoban298c9262010-09-20 16:05:12 +0800984}
985
986static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
987{
988 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
989 struct crypto_aead *child = ctx->child;
990 struct aead_request *req;
991
992 req = container_of(areq, struct aead_request, base);
Herbert Xuba3749a2015-08-13 17:29:02 +0800993 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
Adrian Hoban298c9262010-09-20 16:05:12 +0800994}
995
996static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
997{
998 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
999 struct crypto_aead *child = ctx->child;
1000 struct aead_request *req;
1001
1002 req = container_of(areq, struct aead_request, base);
Herbert Xuba3749a2015-08-13 17:29:02 +08001003 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
Adrian Hoban298c9262010-09-20 16:05:12 +08001004}
1005
1006static int cryptd_aead_enqueue(struct aead_request *req,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001007 crypto_completion_t compl)
Adrian Hoban298c9262010-09-20 16:05:12 +08001008{
1009 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
1010 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1011 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
1012
1013 rctx->complete = req->base.complete;
Mark Rustad3e3dc252014-07-25 02:53:38 -07001014 req->base.complete = compl;
Adrian Hoban298c9262010-09-20 16:05:12 +08001015 return cryptd_enqueue_request(queue, &req->base);
1016}
1017
1018static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
1019{
1020 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
1021}
1022
1023static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
1024{
1025 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
1026}
1027
Herbert Xuf614e542015-05-28 22:08:04 +08001028static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
Adrian Hoban298c9262010-09-20 16:05:12 +08001029{
Herbert Xuf614e542015-05-28 22:08:04 +08001030 struct aead_instance *inst = aead_alg_instance(tfm);
1031 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
Adrian Hoban298c9262010-09-20 16:05:12 +08001032 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
Herbert Xuf614e542015-05-28 22:08:04 +08001033 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
Adrian Hoban298c9262010-09-20 16:05:12 +08001034 struct crypto_aead *cipher;
1035
1036 cipher = crypto_spawn_aead(spawn);
1037 if (IS_ERR(cipher))
1038 return PTR_ERR(cipher);
1039
Adrian Hoban298c9262010-09-20 16:05:12 +08001040 ctx->child = cipher;
Herbert Xuec9f2002015-07-06 19:11:03 +08001041 crypto_aead_set_reqsize(
1042 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
1043 crypto_aead_reqsize(cipher)));
Adrian Hoban298c9262010-09-20 16:05:12 +08001044 return 0;
1045}
1046
Herbert Xuf614e542015-05-28 22:08:04 +08001047static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
Adrian Hoban298c9262010-09-20 16:05:12 +08001048{
Herbert Xuf614e542015-05-28 22:08:04 +08001049 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
Adrian Hoban298c9262010-09-20 16:05:12 +08001050 crypto_free_aead(ctx->child);
1051}
1052
1053static int cryptd_create_aead(struct crypto_template *tmpl,
1054 struct rtattr **tb,
1055 struct cryptd_queue *queue)
1056{
1057 struct aead_instance_ctx *ctx;
Herbert Xuf614e542015-05-28 22:08:04 +08001058 struct aead_instance *inst;
1059 struct aead_alg *alg;
Herbert Xu9b8c4562015-05-21 15:10:57 +08001060 const char *name;
1061 u32 type = 0;
Herbert Xuec9f2002015-07-06 19:11:03 +08001062 u32 mask = CRYPTO_ALG_ASYNC;
Adrian Hoban298c9262010-09-20 16:05:12 +08001063 int err;
1064
Stephan Mueller466a7b92015-03-30 21:57:06 +02001065 cryptd_check_internal(tb, &type, &mask);
1066
Herbert Xu9b8c4562015-05-21 15:10:57 +08001067 name = crypto_attr_alg_name(tb[1]);
1068 if (IS_ERR(name))
1069 return PTR_ERR(name);
Adrian Hoban298c9262010-09-20 16:05:12 +08001070
Herbert Xu9b8c4562015-05-21 15:10:57 +08001071 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
1072 if (!inst)
1073 return -ENOMEM;
Adrian Hoban298c9262010-09-20 16:05:12 +08001074
Herbert Xuf614e542015-05-28 22:08:04 +08001075 ctx = aead_instance_ctx(inst);
Adrian Hoban298c9262010-09-20 16:05:12 +08001076 ctx->queue = queue;
1077
Herbert Xuf614e542015-05-28 22:08:04 +08001078 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
Herbert Xu9b8c4562015-05-21 15:10:57 +08001079 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
Adrian Hoban298c9262010-09-20 16:05:12 +08001080 if (err)
1081 goto out_free_inst;
1082
Herbert Xuf614e542015-05-28 22:08:04 +08001083 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
1084 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
Herbert Xu9b8c4562015-05-21 15:10:57 +08001085 if (err)
1086 goto out_drop_aead;
1087
Herbert Xuf614e542015-05-28 22:08:04 +08001088 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
Herbert Xu5e4b8c12015-08-13 17:29:06 +08001089 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
Herbert Xuf614e542015-05-28 22:08:04 +08001090 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
Adrian Hoban298c9262010-09-20 16:05:12 +08001091
Herbert Xuf614e542015-05-28 22:08:04 +08001092 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
1093 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
1094
1095 inst->alg.init = cryptd_aead_init_tfm;
1096 inst->alg.exit = cryptd_aead_exit_tfm;
1097 inst->alg.setkey = cryptd_aead_setkey;
1098 inst->alg.setauthsize = cryptd_aead_setauthsize;
1099 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
1100 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
1101
1102 err = aead_register_instance(tmpl, inst);
Adrian Hoban298c9262010-09-20 16:05:12 +08001103 if (err) {
Herbert Xu9b8c4562015-05-21 15:10:57 +08001104out_drop_aead:
1105 crypto_drop_aead(&ctx->aead_spawn);
Adrian Hoban298c9262010-09-20 16:05:12 +08001106out_free_inst:
1107 kfree(inst);
1108 }
Adrian Hoban298c9262010-09-20 16:05:12 +08001109 return err;
1110}
1111
Huang Ying254eff72009-02-19 14:42:19 +08001112static struct cryptd_queue queue;
Herbert Xu124b53d2007-04-16 20:49:20 +10001113
Herbert Xu9cd899a2009-07-14 18:45:45 +08001114static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
Herbert Xu124b53d2007-04-16 20:49:20 +10001115{
1116 struct crypto_attr_type *algt;
1117
1118 algt = crypto_get_attr_type(tb);
1119 if (IS_ERR(algt))
Herbert Xu9cd899a2009-07-14 18:45:45 +08001120 return PTR_ERR(algt);
Herbert Xu124b53d2007-04-16 20:49:20 +10001121
1122 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
1123 case CRYPTO_ALG_TYPE_BLKCIPHER:
Herbert Xu4e0958d2016-11-22 20:08:23 +08001124 if ((algt->type & CRYPTO_ALG_TYPE_MASK) ==
1125 CRYPTO_ALG_TYPE_BLKCIPHER)
1126 return cryptd_create_blkcipher(tmpl, tb, &queue);
1127
1128 return cryptd_create_skcipher(tmpl, tb, &queue);
Loc Hob8a28252008-05-14 21:23:00 +08001129 case CRYPTO_ALG_TYPE_DIGEST:
Herbert Xu9cd899a2009-07-14 18:45:45 +08001130 return cryptd_create_hash(tmpl, tb, &queue);
Adrian Hoban298c9262010-09-20 16:05:12 +08001131 case CRYPTO_ALG_TYPE_AEAD:
1132 return cryptd_create_aead(tmpl, tb, &queue);
Herbert Xu124b53d2007-04-16 20:49:20 +10001133 }
1134
Herbert Xu9cd899a2009-07-14 18:45:45 +08001135 return -EINVAL;
Herbert Xu124b53d2007-04-16 20:49:20 +10001136}
1137
1138static void cryptd_free(struct crypto_instance *inst)
1139{
1140 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
Herbert Xu0b535ad2009-07-14 19:11:32 +08001141 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
Adrian Hoban298c9262010-09-20 16:05:12 +08001142 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
Herbert Xu0b535ad2009-07-14 19:11:32 +08001143
1144 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
1145 case CRYPTO_ALG_TYPE_AHASH:
1146 crypto_drop_shash(&hctx->spawn);
1147 kfree(ahash_instance(inst));
1148 return;
Adrian Hoban298c9262010-09-20 16:05:12 +08001149 case CRYPTO_ALG_TYPE_AEAD:
Herbert Xuf614e542015-05-28 22:08:04 +08001150 crypto_drop_aead(&aead_ctx->aead_spawn);
1151 kfree(aead_instance(inst));
Adrian Hoban298c9262010-09-20 16:05:12 +08001152 return;
1153 default:
1154 crypto_drop_spawn(&ctx->spawn);
1155 kfree(inst);
Herbert Xu0b535ad2009-07-14 19:11:32 +08001156 }
Herbert Xu124b53d2007-04-16 20:49:20 +10001157}
1158
1159static struct crypto_template cryptd_tmpl = {
1160 .name = "cryptd",
Herbert Xu9cd899a2009-07-14 18:45:45 +08001161 .create = cryptd_create,
Herbert Xu124b53d2007-04-16 20:49:20 +10001162 .free = cryptd_free,
1163 .module = THIS_MODULE,
1164};
1165
Huang Ying1cac2cb2009-01-18 16:19:46 +11001166struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
1167 u32 type, u32 mask)
1168{
1169 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
Herbert Xu81760ea2016-06-21 16:55:13 +08001170 struct cryptd_blkcipher_ctx *ctx;
Huang Ying505fd212009-03-29 15:33:53 +08001171 struct crypto_tfm *tfm;
Huang Ying1cac2cb2009-01-18 16:19:46 +11001172
1173 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1174 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1175 return ERR_PTR(-EINVAL);
Alexander Kuleshovc012a792015-11-25 23:48:28 +06001176 type = crypto_skcipher_type(type);
Huang Ying505fd212009-03-29 15:33:53 +08001177 mask &= ~CRYPTO_ALG_TYPE_MASK;
1178 mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
1179 tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
Huang Ying1cac2cb2009-01-18 16:19:46 +11001180 if (IS_ERR(tfm))
1181 return ERR_CAST(tfm);
Huang Ying505fd212009-03-29 15:33:53 +08001182 if (tfm->__crt_alg->cra_module != THIS_MODULE) {
1183 crypto_free_tfm(tfm);
Huang Ying1cac2cb2009-01-18 16:19:46 +11001184 return ERR_PTR(-EINVAL);
1185 }
1186
Herbert Xu81760ea2016-06-21 16:55:13 +08001187 ctx = crypto_tfm_ctx(tfm);
1188 atomic_set(&ctx->refcnt, 1);
1189
Huang Ying505fd212009-03-29 15:33:53 +08001190 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
Huang Ying1cac2cb2009-01-18 16:19:46 +11001191}
1192EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
1193
1194struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
1195{
1196 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
1197 return ctx->child;
1198}
1199EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
1200
Herbert Xu81760ea2016-06-21 16:55:13 +08001201bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm)
1202{
1203 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
1204
1205 return atomic_read(&ctx->refcnt) - 1;
1206}
1207EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued);
1208
Huang Ying1cac2cb2009-01-18 16:19:46 +11001209void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
1210{
Herbert Xu81760ea2016-06-21 16:55:13 +08001211 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
1212
1213 if (atomic_dec_and_test(&ctx->refcnt))
1214 crypto_free_ablkcipher(&tfm->base);
Huang Ying1cac2cb2009-01-18 16:19:46 +11001215}
1216EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
1217
Herbert Xu4e0958d2016-11-22 20:08:23 +08001218struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
1219 u32 type, u32 mask)
1220{
1221 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1222 struct cryptd_skcipher_ctx *ctx;
1223 struct crypto_skcipher *tfm;
1224
1225 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1226 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1227 return ERR_PTR(-EINVAL);
1228
1229 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
1230 if (IS_ERR(tfm))
1231 return ERR_CAST(tfm);
1232
1233 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1234 crypto_free_skcipher(tfm);
1235 return ERR_PTR(-EINVAL);
1236 }
1237
1238 ctx = crypto_skcipher_ctx(tfm);
1239 atomic_set(&ctx->refcnt, 1);
1240
1241 return container_of(tfm, struct cryptd_skcipher, base);
1242}
1243EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
1244
1245struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
1246{
1247 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1248
Kees Cooke08156e2018-09-18 19:10:52 -07001249 return &ctx->child->base;
Herbert Xu4e0958d2016-11-22 20:08:23 +08001250}
1251EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
1252
1253bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
1254{
1255 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1256
1257 return atomic_read(&ctx->refcnt) - 1;
1258}
1259EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
1260
1261void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
1262{
1263 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
1264
1265 if (atomic_dec_and_test(&ctx->refcnt))
1266 crypto_free_skcipher(&tfm->base);
1267}
1268EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
1269
Huang Yingace13662009-08-06 15:35:20 +10001270struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
1271 u32 type, u32 mask)
1272{
1273 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
Herbert Xu81760ea2016-06-21 16:55:13 +08001274 struct cryptd_hash_ctx *ctx;
Huang Yingace13662009-08-06 15:35:20 +10001275 struct crypto_ahash *tfm;
1276
1277 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1278 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1279 return ERR_PTR(-EINVAL);
1280 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
1281 if (IS_ERR(tfm))
1282 return ERR_CAST(tfm);
1283 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1284 crypto_free_ahash(tfm);
1285 return ERR_PTR(-EINVAL);
1286 }
1287
Herbert Xu81760ea2016-06-21 16:55:13 +08001288 ctx = crypto_ahash_ctx(tfm);
1289 atomic_set(&ctx->refcnt, 1);
1290
Huang Yingace13662009-08-06 15:35:20 +10001291 return __cryptd_ahash_cast(tfm);
1292}
1293EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1294
1295struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1296{
1297 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1298
1299 return ctx->child;
1300}
1301EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1302
Huang Ying0e1227d2009-10-19 11:53:06 +09001303struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1304{
1305 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1306 return &rctx->desc;
1307}
1308EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1309
Herbert Xu81760ea2016-06-21 16:55:13 +08001310bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1311{
1312 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1313
1314 return atomic_read(&ctx->refcnt) - 1;
1315}
1316EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1317
Huang Yingace13662009-08-06 15:35:20 +10001318void cryptd_free_ahash(struct cryptd_ahash *tfm)
1319{
Herbert Xu81760ea2016-06-21 16:55:13 +08001320 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1321
1322 if (atomic_dec_and_test(&ctx->refcnt))
1323 crypto_free_ahash(&tfm->base);
Huang Yingace13662009-08-06 15:35:20 +10001324}
1325EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1326
Adrian Hoban298c9262010-09-20 16:05:12 +08001327struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1328 u32 type, u32 mask)
1329{
1330 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
Herbert Xu81760ea2016-06-21 16:55:13 +08001331 struct cryptd_aead_ctx *ctx;
Adrian Hoban298c9262010-09-20 16:05:12 +08001332 struct crypto_aead *tfm;
1333
1334 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1335 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1336 return ERR_PTR(-EINVAL);
1337 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1338 if (IS_ERR(tfm))
1339 return ERR_CAST(tfm);
1340 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1341 crypto_free_aead(tfm);
1342 return ERR_PTR(-EINVAL);
1343 }
Herbert Xu81760ea2016-06-21 16:55:13 +08001344
1345 ctx = crypto_aead_ctx(tfm);
1346 atomic_set(&ctx->refcnt, 1);
1347
Adrian Hoban298c9262010-09-20 16:05:12 +08001348 return __cryptd_aead_cast(tfm);
1349}
1350EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1351
1352struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1353{
1354 struct cryptd_aead_ctx *ctx;
1355 ctx = crypto_aead_ctx(&tfm->base);
1356 return ctx->child;
1357}
1358EXPORT_SYMBOL_GPL(cryptd_aead_child);
1359
Herbert Xu81760ea2016-06-21 16:55:13 +08001360bool cryptd_aead_queued(struct cryptd_aead *tfm)
1361{
1362 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1363
1364 return atomic_read(&ctx->refcnt) - 1;
1365}
1366EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1367
Adrian Hoban298c9262010-09-20 16:05:12 +08001368void cryptd_free_aead(struct cryptd_aead *tfm)
1369{
Herbert Xu81760ea2016-06-21 16:55:13 +08001370 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1371
1372 if (atomic_dec_and_test(&ctx->refcnt))
1373 crypto_free_aead(&tfm->base);
Adrian Hoban298c9262010-09-20 16:05:12 +08001374}
1375EXPORT_SYMBOL_GPL(cryptd_free_aead);
1376
Herbert Xu124b53d2007-04-16 20:49:20 +10001377static int __init cryptd_init(void)
1378{
1379 int err;
1380
Jon Maxwellc3a53602017-11-22 16:08:17 +11001381 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
Herbert Xu124b53d2007-04-16 20:49:20 +10001382 if (err)
1383 return err;
1384
1385 err = crypto_register_template(&cryptd_tmpl);
1386 if (err)
Huang Ying254eff72009-02-19 14:42:19 +08001387 cryptd_fini_queue(&queue);
Herbert Xu124b53d2007-04-16 20:49:20 +10001388
1389 return err;
1390}
1391
1392static void __exit cryptd_exit(void)
1393{
Huang Ying254eff72009-02-19 14:42:19 +08001394 cryptd_fini_queue(&queue);
Herbert Xu124b53d2007-04-16 20:49:20 +10001395 crypto_unregister_template(&cryptd_tmpl);
1396}
1397
Herbert Xub2bac6a2011-08-19 16:11:23 +08001398subsys_initcall(cryptd_init);
Herbert Xu124b53d2007-04-16 20:49:20 +10001399module_exit(cryptd_exit);
1400
1401MODULE_LICENSE("GPL");
1402MODULE_DESCRIPTION("Software async crypto daemon");
Kees Cook4943ba12014-11-24 16:32:38 -08001403MODULE_ALIAS_CRYPTO("cryptd");