2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/internal/hash.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/atomic.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/kernel.h>
29 #include <linux/list.h>
30 #include <linux/module.h>
31 #include <linux/scatterlist.h>
32 #include <linux/sched.h>
33 #include <linux/slab.h>
35 static unsigned int cryptd_max_cpu_qlen
= 1000;
36 module_param(cryptd_max_cpu_qlen
, uint
, 0);
37 MODULE_PARM_DESC(cryptd_max_cpu_qlen
, "Set cryptd Max queue depth");
39 struct cryptd_cpu_queue
{
40 struct crypto_queue queue
;
41 struct work_struct work
;
45 struct cryptd_cpu_queue __percpu
*cpu_queue
;
48 struct cryptd_instance_ctx
{
49 struct crypto_spawn spawn
;
50 struct cryptd_queue
*queue
;
53 struct skcipherd_instance_ctx
{
54 struct crypto_skcipher_spawn spawn
;
55 struct cryptd_queue
*queue
;
58 struct hashd_instance_ctx
{
59 struct crypto_shash_spawn spawn
;
60 struct cryptd_queue
*queue
;
63 struct aead_instance_ctx
{
64 struct crypto_aead_spawn aead_spawn
;
65 struct cryptd_queue
*queue
;
68 struct cryptd_blkcipher_ctx
{
70 struct crypto_blkcipher
*child
;
73 struct cryptd_blkcipher_request_ctx
{
74 crypto_completion_t complete
;
77 struct cryptd_skcipher_ctx
{
79 struct crypto_skcipher
*child
;
82 struct cryptd_skcipher_request_ctx
{
83 crypto_completion_t complete
;
86 struct cryptd_hash_ctx
{
88 struct crypto_shash
*child
;
91 struct cryptd_hash_request_ctx
{
92 crypto_completion_t complete
;
93 struct shash_desc desc
;
96 struct cryptd_aead_ctx
{
98 struct crypto_aead
*child
;
101 struct cryptd_aead_request_ctx
{
102 crypto_completion_t complete
;
105 static void cryptd_queue_worker(struct work_struct
*work
);
107 static int cryptd_init_queue(struct cryptd_queue
*queue
,
108 unsigned int max_cpu_qlen
)
111 struct cryptd_cpu_queue
*cpu_queue
;
113 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
114 if (!queue
->cpu_queue
)
116 for_each_possible_cpu(cpu
) {
117 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
118 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
119 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
121 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen
);
125 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
128 struct cryptd_cpu_queue
*cpu_queue
;
130 for_each_possible_cpu(cpu
) {
131 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
132 BUG_ON(cpu_queue
->queue
.qlen
);
134 free_percpu(queue
->cpu_queue
);
137 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
138 struct crypto_async_request
*request
)
141 struct cryptd_cpu_queue
*cpu_queue
;
145 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
146 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
148 refcnt
= crypto_tfm_ctx(request
->tfm
);
153 queue_work_on(cpu
, kcrypto_wq
, &cpu_queue
->work
);
155 if (!atomic_read(refcnt
))
166 /* Called in workqueue context, do one real cryption work (via
167 * req->complete) and reschedule itself if there are more work to
169 static void cryptd_queue_worker(struct work_struct
*work
)
171 struct cryptd_cpu_queue
*cpu_queue
;
172 struct crypto_async_request
*req
, *backlog
;
174 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
176 * Only handle one request at a time to avoid hogging crypto workqueue.
177 * preempt_disable/enable is used to prevent being preempted by
178 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
179 * cryptd_enqueue_request() being accessed from software interrupts.
183 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
184 req
= crypto_dequeue_request(&cpu_queue
->queue
);
192 backlog
->complete(backlog
, -EINPROGRESS
);
193 req
->complete(req
, 0);
195 if (cpu_queue
->queue
.qlen
)
196 queue_work(kcrypto_wq
, &cpu_queue
->work
);
199 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
201 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
202 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
206 static inline void cryptd_check_internal(struct rtattr
**tb
, u32
*type
,
209 struct crypto_attr_type
*algt
;
211 algt
= crypto_get_attr_type(tb
);
215 *type
|= algt
->type
& CRYPTO_ALG_INTERNAL
;
216 *mask
|= algt
->mask
& CRYPTO_ALG_INTERNAL
;
219 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher
*parent
,
220 const u8
*key
, unsigned int keylen
)
222 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(parent
);
223 struct crypto_blkcipher
*child
= ctx
->child
;
226 crypto_blkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
227 crypto_blkcipher_set_flags(child
, crypto_ablkcipher_get_flags(parent
) &
228 CRYPTO_TFM_REQ_MASK
);
229 err
= crypto_blkcipher_setkey(child
, key
, keylen
);
230 crypto_ablkcipher_set_flags(parent
, crypto_blkcipher_get_flags(child
) &
231 CRYPTO_TFM_RES_MASK
);
235 static void cryptd_blkcipher_crypt(struct ablkcipher_request
*req
,
236 struct crypto_blkcipher
*child
,
238 int (*crypt
)(struct blkcipher_desc
*desc
,
239 struct scatterlist
*dst
,
240 struct scatterlist
*src
,
243 struct cryptd_blkcipher_request_ctx
*rctx
;
244 struct cryptd_blkcipher_ctx
*ctx
;
245 struct crypto_ablkcipher
*tfm
;
246 struct blkcipher_desc desc
;
249 rctx
= ablkcipher_request_ctx(req
);
251 if (unlikely(err
== -EINPROGRESS
))
255 desc
.info
= req
->info
;
256 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
258 err
= crypt(&desc
, req
->dst
, req
->src
, req
->nbytes
);
260 req
->base
.complete
= rctx
->complete
;
263 tfm
= crypto_ablkcipher_reqtfm(req
);
264 ctx
= crypto_ablkcipher_ctx(tfm
);
265 refcnt
= atomic_read(&ctx
->refcnt
);
268 rctx
->complete(&req
->base
, err
);
271 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
272 crypto_free_ablkcipher(tfm
);
275 static void cryptd_blkcipher_encrypt(struct crypto_async_request
*req
, int err
)
277 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
278 struct crypto_blkcipher
*child
= ctx
->child
;
280 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
281 crypto_blkcipher_crt(child
)->encrypt
);
284 static void cryptd_blkcipher_decrypt(struct crypto_async_request
*req
, int err
)
286 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
287 struct crypto_blkcipher
*child
= ctx
->child
;
289 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
290 crypto_blkcipher_crt(child
)->decrypt
);
293 static int cryptd_blkcipher_enqueue(struct ablkcipher_request
*req
,
294 crypto_completion_t
compl)
296 struct cryptd_blkcipher_request_ctx
*rctx
= ablkcipher_request_ctx(req
);
297 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
298 struct cryptd_queue
*queue
;
300 queue
= cryptd_get_queue(crypto_ablkcipher_tfm(tfm
));
301 rctx
->complete
= req
->base
.complete
;
302 req
->base
.complete
= compl;
304 return cryptd_enqueue_request(queue
, &req
->base
);
307 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request
*req
)
309 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_encrypt
);
312 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request
*req
)
314 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_decrypt
);
317 static int cryptd_blkcipher_init_tfm(struct crypto_tfm
*tfm
)
319 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
320 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
321 struct crypto_spawn
*spawn
= &ictx
->spawn
;
322 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
323 struct crypto_blkcipher
*cipher
;
325 cipher
= crypto_spawn_blkcipher(spawn
);
327 return PTR_ERR(cipher
);
330 tfm
->crt_ablkcipher
.reqsize
=
331 sizeof(struct cryptd_blkcipher_request_ctx
);
335 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm
*tfm
)
337 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
339 crypto_free_blkcipher(ctx
->child
);
342 static int cryptd_init_instance(struct crypto_instance
*inst
,
343 struct crypto_alg
*alg
)
345 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
347 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
348 return -ENAMETOOLONG
;
350 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
352 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
353 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
354 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
359 static void *cryptd_alloc_instance(struct crypto_alg
*alg
, unsigned int head
,
363 struct crypto_instance
*inst
;
366 p
= kzalloc(head
+ sizeof(*inst
) + tail
, GFP_KERNEL
);
368 return ERR_PTR(-ENOMEM
);
370 inst
= (void *)(p
+ head
);
372 err
= cryptd_init_instance(inst
, alg
);
385 static int cryptd_create_blkcipher(struct crypto_template
*tmpl
,
387 struct cryptd_queue
*queue
)
389 struct cryptd_instance_ctx
*ctx
;
390 struct crypto_instance
*inst
;
391 struct crypto_alg
*alg
;
392 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
393 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
396 cryptd_check_internal(tb
, &type
, &mask
);
398 alg
= crypto_get_attr_alg(tb
, type
, mask
);
402 inst
= cryptd_alloc_instance(alg
, 0, sizeof(*ctx
));
407 ctx
= crypto_instance_ctx(inst
);
410 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
411 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_ASYNC
);
415 type
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
;
416 if (alg
->cra_flags
& CRYPTO_ALG_INTERNAL
)
417 type
|= CRYPTO_ALG_INTERNAL
;
418 inst
->alg
.cra_flags
= type
;
419 inst
->alg
.cra_type
= &crypto_ablkcipher_type
;
421 inst
->alg
.cra_ablkcipher
.ivsize
= alg
->cra_blkcipher
.ivsize
;
422 inst
->alg
.cra_ablkcipher
.min_keysize
= alg
->cra_blkcipher
.min_keysize
;
423 inst
->alg
.cra_ablkcipher
.max_keysize
= alg
->cra_blkcipher
.max_keysize
;
425 inst
->alg
.cra_ablkcipher
.geniv
= alg
->cra_blkcipher
.geniv
;
427 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_blkcipher_ctx
);
429 inst
->alg
.cra_init
= cryptd_blkcipher_init_tfm
;
430 inst
->alg
.cra_exit
= cryptd_blkcipher_exit_tfm
;
432 inst
->alg
.cra_ablkcipher
.setkey
= cryptd_blkcipher_setkey
;
433 inst
->alg
.cra_ablkcipher
.encrypt
= cryptd_blkcipher_encrypt_enqueue
;
434 inst
->alg
.cra_ablkcipher
.decrypt
= cryptd_blkcipher_decrypt_enqueue
;
436 err
= crypto_register_instance(tmpl
, inst
);
438 crypto_drop_spawn(&ctx
->spawn
);
448 static int cryptd_skcipher_setkey(struct crypto_skcipher
*parent
,
449 const u8
*key
, unsigned int keylen
)
451 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(parent
);
452 struct crypto_skcipher
*child
= ctx
->child
;
455 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
456 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
457 CRYPTO_TFM_REQ_MASK
);
458 err
= crypto_skcipher_setkey(child
, key
, keylen
);
459 crypto_skcipher_set_flags(parent
, crypto_skcipher_get_flags(child
) &
460 CRYPTO_TFM_RES_MASK
);
464 static void cryptd_skcipher_complete(struct skcipher_request
*req
, int err
)
466 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
467 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
468 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
469 int refcnt
= atomic_read(&ctx
->refcnt
);
472 rctx
->complete(&req
->base
, err
);
475 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
476 crypto_free_skcipher(tfm
);
479 static void cryptd_skcipher_encrypt(struct crypto_async_request
*base
,
482 struct skcipher_request
*req
= skcipher_request_cast(base
);
483 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
484 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
485 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
486 struct crypto_skcipher
*child
= ctx
->child
;
487 SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
489 if (unlikely(err
== -EINPROGRESS
))
492 skcipher_request_set_tfm(subreq
, child
);
493 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
495 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
498 err
= crypto_skcipher_encrypt(subreq
);
499 skcipher_request_zero(subreq
);
501 req
->base
.complete
= rctx
->complete
;
504 cryptd_skcipher_complete(req
, err
);
507 static void cryptd_skcipher_decrypt(struct crypto_async_request
*base
,
510 struct skcipher_request
*req
= skcipher_request_cast(base
);
511 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
512 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
513 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
514 struct crypto_skcipher
*child
= ctx
->child
;
515 SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
517 if (unlikely(err
== -EINPROGRESS
))
520 skcipher_request_set_tfm(subreq
, child
);
521 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
523 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
526 err
= crypto_skcipher_decrypt(subreq
);
527 skcipher_request_zero(subreq
);
529 req
->base
.complete
= rctx
->complete
;
532 cryptd_skcipher_complete(req
, err
);
535 static int cryptd_skcipher_enqueue(struct skcipher_request
*req
,
536 crypto_completion_t
compl)
538 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
539 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
540 struct cryptd_queue
*queue
;
542 queue
= cryptd_get_queue(crypto_skcipher_tfm(tfm
));
543 rctx
->complete
= req
->base
.complete
;
544 req
->base
.complete
= compl;
546 return cryptd_enqueue_request(queue
, &req
->base
);
549 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request
*req
)
551 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_encrypt
);
554 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request
*req
)
556 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_decrypt
);
559 static int cryptd_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
561 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
562 struct skcipherd_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
563 struct crypto_skcipher_spawn
*spawn
= &ictx
->spawn
;
564 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
565 struct crypto_skcipher
*cipher
;
567 cipher
= crypto_spawn_skcipher(spawn
);
569 return PTR_ERR(cipher
);
572 crypto_skcipher_set_reqsize(
573 tfm
, sizeof(struct cryptd_skcipher_request_ctx
));
577 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
579 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
581 crypto_free_skcipher(ctx
->child
);
584 static void cryptd_skcipher_free(struct skcipher_instance
*inst
)
586 struct skcipherd_instance_ctx
*ctx
= skcipher_instance_ctx(inst
);
588 crypto_drop_skcipher(&ctx
->spawn
);
592 static int cryptd_create_skcipher(struct crypto_template
*tmpl
,
594 struct cryptd_queue
*queue
)
596 struct skcipherd_instance_ctx
*ctx
;
597 struct skcipher_instance
*inst
;
598 struct skcipher_alg
*alg
;
605 mask
= CRYPTO_ALG_ASYNC
;
607 cryptd_check_internal(tb
, &type
, &mask
);
609 name
= crypto_attr_alg_name(tb
[1]);
611 return PTR_ERR(name
);
613 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
617 ctx
= skcipher_instance_ctx(inst
);
620 crypto_set_skcipher_spawn(&ctx
->spawn
, skcipher_crypto_instance(inst
));
621 err
= crypto_grab_skcipher(&ctx
->spawn
, name
, type
, mask
);
625 alg
= crypto_spawn_skcipher_alg(&ctx
->spawn
);
626 err
= cryptd_init_instance(skcipher_crypto_instance(inst
), &alg
->base
);
628 goto out_drop_skcipher
;
630 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
631 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
633 inst
->alg
.ivsize
= crypto_skcipher_alg_ivsize(alg
);
634 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(alg
);
635 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
);
636 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
);
638 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_skcipher_ctx
);
640 inst
->alg
.init
= cryptd_skcipher_init_tfm
;
641 inst
->alg
.exit
= cryptd_skcipher_exit_tfm
;
643 inst
->alg
.setkey
= cryptd_skcipher_setkey
;
644 inst
->alg
.encrypt
= cryptd_skcipher_encrypt_enqueue
;
645 inst
->alg
.decrypt
= cryptd_skcipher_decrypt_enqueue
;
647 inst
->free
= cryptd_skcipher_free
;
649 err
= skcipher_register_instance(tmpl
, inst
);
652 crypto_drop_skcipher(&ctx
->spawn
);
659 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
661 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
662 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
663 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
664 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
665 struct crypto_shash
*hash
;
667 hash
= crypto_spawn_shash(spawn
);
669 return PTR_ERR(hash
);
672 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
673 sizeof(struct cryptd_hash_request_ctx
) +
674 crypto_shash_descsize(hash
));
678 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
680 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
682 crypto_free_shash(ctx
->child
);
685 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
686 const u8
*key
, unsigned int keylen
)
688 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
689 struct crypto_shash
*child
= ctx
->child
;
692 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
693 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
694 CRYPTO_TFM_REQ_MASK
);
695 err
= crypto_shash_setkey(child
, key
, keylen
);
696 crypto_ahash_set_flags(parent
, crypto_shash_get_flags(child
) &
697 CRYPTO_TFM_RES_MASK
);
701 static int cryptd_hash_enqueue(struct ahash_request
*req
,
702 crypto_completion_t
compl)
704 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
705 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
706 struct cryptd_queue
*queue
=
707 cryptd_get_queue(crypto_ahash_tfm(tfm
));
709 rctx
->complete
= req
->base
.complete
;
710 req
->base
.complete
= compl;
712 return cryptd_enqueue_request(queue
, &req
->base
);
715 static void cryptd_hash_complete(struct ahash_request
*req
, int err
)
717 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
718 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
719 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
720 int refcnt
= atomic_read(&ctx
->refcnt
);
723 rctx
->complete(&req
->base
, err
);
726 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
727 crypto_free_ahash(tfm
);
730 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
732 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
733 struct crypto_shash
*child
= ctx
->child
;
734 struct ahash_request
*req
= ahash_request_cast(req_async
);
735 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
736 struct shash_desc
*desc
= &rctx
->desc
;
738 if (unlikely(err
== -EINPROGRESS
))
742 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
744 err
= crypto_shash_init(desc
);
746 req
->base
.complete
= rctx
->complete
;
749 cryptd_hash_complete(req
, err
);
752 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
754 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
757 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
759 struct ahash_request
*req
= ahash_request_cast(req_async
);
760 struct cryptd_hash_request_ctx
*rctx
;
762 rctx
= ahash_request_ctx(req
);
764 if (unlikely(err
== -EINPROGRESS
))
767 err
= shash_ahash_update(req
, &rctx
->desc
);
769 req
->base
.complete
= rctx
->complete
;
772 cryptd_hash_complete(req
, err
);
775 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
777 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
780 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
782 struct ahash_request
*req
= ahash_request_cast(req_async
);
783 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
785 if (unlikely(err
== -EINPROGRESS
))
788 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
790 req
->base
.complete
= rctx
->complete
;
793 cryptd_hash_complete(req
, err
);
796 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
798 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
801 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
803 struct ahash_request
*req
= ahash_request_cast(req_async
);
804 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
806 if (unlikely(err
== -EINPROGRESS
))
809 err
= shash_ahash_finup(req
, &rctx
->desc
);
811 req
->base
.complete
= rctx
->complete
;
814 cryptd_hash_complete(req
, err
);
817 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
819 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
822 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
824 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
825 struct crypto_shash
*child
= ctx
->child
;
826 struct ahash_request
*req
= ahash_request_cast(req_async
);
827 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
828 struct shash_desc
*desc
= &rctx
->desc
;
830 if (unlikely(err
== -EINPROGRESS
))
834 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
836 err
= shash_ahash_digest(req
, desc
);
838 req
->base
.complete
= rctx
->complete
;
841 cryptd_hash_complete(req
, err
);
844 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
846 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
849 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
851 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
853 return crypto_shash_export(&rctx
->desc
, out
);
856 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
858 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
859 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
860 struct shash_desc
*desc
= cryptd_shash_desc(req
);
862 desc
->tfm
= ctx
->child
;
863 desc
->flags
= req
->base
.flags
;
865 return crypto_shash_import(desc
, in
);
868 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
869 struct cryptd_queue
*queue
)
871 struct hashd_instance_ctx
*ctx
;
872 struct ahash_instance
*inst
;
873 struct shash_alg
*salg
;
874 struct crypto_alg
*alg
;
879 cryptd_check_internal(tb
, &type
, &mask
);
881 salg
= shash_attr_alg(tb
[1], type
, mask
);
883 return PTR_ERR(salg
);
886 inst
= cryptd_alloc_instance(alg
, ahash_instance_headroom(),
892 ctx
= ahash_instance_ctx(inst
);
895 err
= crypto_init_shash_spawn(&ctx
->spawn
, salg
,
896 ahash_crypto_instance(inst
));
900 inst
->alg
.halg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
901 (alg
->cra_flags
& (CRYPTO_ALG_INTERNAL
|
902 CRYPTO_ALG_OPTIONAL_KEY
));
904 inst
->alg
.halg
.digestsize
= salg
->digestsize
;
905 inst
->alg
.halg
.statesize
= salg
->statesize
;
906 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
908 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
909 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
911 inst
->alg
.init
= cryptd_hash_init_enqueue
;
912 inst
->alg
.update
= cryptd_hash_update_enqueue
;
913 inst
->alg
.final
= cryptd_hash_final_enqueue
;
914 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
915 inst
->alg
.export
= cryptd_hash_export
;
916 inst
->alg
.import
= cryptd_hash_import
;
917 if (crypto_shash_alg_has_setkey(salg
))
918 inst
->alg
.setkey
= cryptd_hash_setkey
;
919 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
921 err
= ahash_register_instance(tmpl
, inst
);
923 crypto_drop_shash(&ctx
->spawn
);
933 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
934 const u8
*key
, unsigned int keylen
)
936 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
937 struct crypto_aead
*child
= ctx
->child
;
939 return crypto_aead_setkey(child
, key
, keylen
);
942 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
943 unsigned int authsize
)
945 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
946 struct crypto_aead
*child
= ctx
->child
;
948 return crypto_aead_setauthsize(child
, authsize
);
951 static void cryptd_aead_crypt(struct aead_request
*req
,
952 struct crypto_aead
*child
,
954 int (*crypt
)(struct aead_request
*req
))
956 struct cryptd_aead_request_ctx
*rctx
;
957 struct cryptd_aead_ctx
*ctx
;
958 crypto_completion_t
compl;
959 struct crypto_aead
*tfm
;
962 rctx
= aead_request_ctx(req
);
963 compl = rctx
->complete
;
965 tfm
= crypto_aead_reqtfm(req
);
967 if (unlikely(err
== -EINPROGRESS
))
969 aead_request_set_tfm(req
, child
);
973 ctx
= crypto_aead_ctx(tfm
);
974 refcnt
= atomic_read(&ctx
->refcnt
);
977 compl(&req
->base
, err
);
980 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
981 crypto_free_aead(tfm
);
984 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
986 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
987 struct crypto_aead
*child
= ctx
->child
;
988 struct aead_request
*req
;
990 req
= container_of(areq
, struct aead_request
, base
);
991 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
994 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
996 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
997 struct crypto_aead
*child
= ctx
->child
;
998 struct aead_request
*req
;
1000 req
= container_of(areq
, struct aead_request
, base
);
1001 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
1004 static int cryptd_aead_enqueue(struct aead_request
*req
,
1005 crypto_completion_t
compl)
1007 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
1008 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1009 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
1011 rctx
->complete
= req
->base
.complete
;
1012 req
->base
.complete
= compl;
1013 return cryptd_enqueue_request(queue
, &req
->base
);
1016 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
1018 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
1021 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
1023 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
1026 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
1028 struct aead_instance
*inst
= aead_alg_instance(tfm
);
1029 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
1030 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
1031 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
1032 struct crypto_aead
*cipher
;
1034 cipher
= crypto_spawn_aead(spawn
);
1036 return PTR_ERR(cipher
);
1038 ctx
->child
= cipher
;
1039 crypto_aead_set_reqsize(
1040 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
1041 crypto_aead_reqsize(cipher
)));
1045 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
1047 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
1048 crypto_free_aead(ctx
->child
);
1051 static int cryptd_create_aead(struct crypto_template
*tmpl
,
1053 struct cryptd_queue
*queue
)
1055 struct aead_instance_ctx
*ctx
;
1056 struct aead_instance
*inst
;
1057 struct aead_alg
*alg
;
1060 u32 mask
= CRYPTO_ALG_ASYNC
;
1063 cryptd_check_internal(tb
, &type
, &mask
);
1065 name
= crypto_attr_alg_name(tb
[1]);
1067 return PTR_ERR(name
);
1069 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
1073 ctx
= aead_instance_ctx(inst
);
1076 crypto_set_aead_spawn(&ctx
->aead_spawn
, aead_crypto_instance(inst
));
1077 err
= crypto_grab_aead(&ctx
->aead_spawn
, name
, type
, mask
);
1081 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
1082 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
1086 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
1087 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
1088 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
1090 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
1091 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
1093 inst
->alg
.init
= cryptd_aead_init_tfm
;
1094 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
1095 inst
->alg
.setkey
= cryptd_aead_setkey
;
1096 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
1097 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
1098 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
1100 err
= aead_register_instance(tmpl
, inst
);
1103 crypto_drop_aead(&ctx
->aead_spawn
);
1110 static struct cryptd_queue queue
;
1112 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
1114 struct crypto_attr_type
*algt
;
1116 algt
= crypto_get_attr_type(tb
);
1118 return PTR_ERR(algt
);
1120 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
1121 case CRYPTO_ALG_TYPE_BLKCIPHER
:
1122 if ((algt
->type
& CRYPTO_ALG_TYPE_MASK
) ==
1123 CRYPTO_ALG_TYPE_BLKCIPHER
)
1124 return cryptd_create_blkcipher(tmpl
, tb
, &queue
);
1126 return cryptd_create_skcipher(tmpl
, tb
, &queue
);
1127 case CRYPTO_ALG_TYPE_DIGEST
:
1128 return cryptd_create_hash(tmpl
, tb
, &queue
);
1129 case CRYPTO_ALG_TYPE_AEAD
:
1130 return cryptd_create_aead(tmpl
, tb
, &queue
);
1136 static void cryptd_free(struct crypto_instance
*inst
)
1138 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
1139 struct hashd_instance_ctx
*hctx
= crypto_instance_ctx(inst
);
1140 struct aead_instance_ctx
*aead_ctx
= crypto_instance_ctx(inst
);
1142 switch (inst
->alg
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
1143 case CRYPTO_ALG_TYPE_AHASH
:
1144 crypto_drop_shash(&hctx
->spawn
);
1145 kfree(ahash_instance(inst
));
1147 case CRYPTO_ALG_TYPE_AEAD
:
1148 crypto_drop_aead(&aead_ctx
->aead_spawn
);
1149 kfree(aead_instance(inst
));
1152 crypto_drop_spawn(&ctx
->spawn
);
1157 static struct crypto_template cryptd_tmpl
= {
1159 .create
= cryptd_create
,
1160 .free
= cryptd_free
,
1161 .module
= THIS_MODULE
,
1164 struct cryptd_ablkcipher
*cryptd_alloc_ablkcipher(const char *alg_name
,
1167 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1168 struct cryptd_blkcipher_ctx
*ctx
;
1169 struct crypto_tfm
*tfm
;
1171 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1172 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1173 return ERR_PTR(-EINVAL
);
1174 type
= crypto_skcipher_type(type
);
1175 mask
&= ~CRYPTO_ALG_TYPE_MASK
;
1176 mask
|= (CRYPTO_ALG_GENIV
| CRYPTO_ALG_TYPE_BLKCIPHER_MASK
);
1177 tfm
= crypto_alloc_base(cryptd_alg_name
, type
, mask
);
1179 return ERR_CAST(tfm
);
1180 if (tfm
->__crt_alg
->cra_module
!= THIS_MODULE
) {
1181 crypto_free_tfm(tfm
);
1182 return ERR_PTR(-EINVAL
);
1185 ctx
= crypto_tfm_ctx(tfm
);
1186 atomic_set(&ctx
->refcnt
, 1);
1188 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm
));
1190 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher
);
1192 struct crypto_blkcipher
*cryptd_ablkcipher_child(struct cryptd_ablkcipher
*tfm
)
1194 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
1197 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child
);
1199 bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher
*tfm
)
1201 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
1203 return atomic_read(&ctx
->refcnt
) - 1;
1205 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued
);
1207 void cryptd_free_ablkcipher(struct cryptd_ablkcipher
*tfm
)
1209 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
1211 if (atomic_dec_and_test(&ctx
->refcnt
))
1212 crypto_free_ablkcipher(&tfm
->base
);
1214 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher
);
1216 struct cryptd_skcipher
*cryptd_alloc_skcipher(const char *alg_name
,
1219 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1220 struct cryptd_skcipher_ctx
*ctx
;
1221 struct crypto_skcipher
*tfm
;
1223 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1224 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1225 return ERR_PTR(-EINVAL
);
1227 tfm
= crypto_alloc_skcipher(cryptd_alg_name
, type
, mask
);
1229 return ERR_CAST(tfm
);
1231 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1232 crypto_free_skcipher(tfm
);
1233 return ERR_PTR(-EINVAL
);
1236 ctx
= crypto_skcipher_ctx(tfm
);
1237 atomic_set(&ctx
->refcnt
, 1);
1239 return container_of(tfm
, struct cryptd_skcipher
, base
);
1241 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher
);
1243 struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher
*tfm
)
1245 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1249 EXPORT_SYMBOL_GPL(cryptd_skcipher_child
);
1251 bool cryptd_skcipher_queued(struct cryptd_skcipher
*tfm
)
1253 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1255 return atomic_read(&ctx
->refcnt
) - 1;
1257 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued
);
1259 void cryptd_free_skcipher(struct cryptd_skcipher
*tfm
)
1261 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1263 if (atomic_dec_and_test(&ctx
->refcnt
))
1264 crypto_free_skcipher(&tfm
->base
);
1266 EXPORT_SYMBOL_GPL(cryptd_free_skcipher
);
1268 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
1271 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1272 struct cryptd_hash_ctx
*ctx
;
1273 struct crypto_ahash
*tfm
;
1275 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1276 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1277 return ERR_PTR(-EINVAL
);
1278 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
1280 return ERR_CAST(tfm
);
1281 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1282 crypto_free_ahash(tfm
);
1283 return ERR_PTR(-EINVAL
);
1286 ctx
= crypto_ahash_ctx(tfm
);
1287 atomic_set(&ctx
->refcnt
, 1);
1289 return __cryptd_ahash_cast(tfm
);
1291 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
1293 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
1295 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1299 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
1301 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
1303 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
1306 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
1308 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
1310 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1312 return atomic_read(&ctx
->refcnt
) - 1;
1314 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1316 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1318 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1320 if (atomic_dec_and_test(&ctx
->refcnt
))
1321 crypto_free_ahash(&tfm
->base
);
1323 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1325 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1328 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1329 struct cryptd_aead_ctx
*ctx
;
1330 struct crypto_aead
*tfm
;
1332 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1333 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1334 return ERR_PTR(-EINVAL
);
1335 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1337 return ERR_CAST(tfm
);
1338 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1339 crypto_free_aead(tfm
);
1340 return ERR_PTR(-EINVAL
);
1343 ctx
= crypto_aead_ctx(tfm
);
1344 atomic_set(&ctx
->refcnt
, 1);
1346 return __cryptd_aead_cast(tfm
);
1348 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1350 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1352 struct cryptd_aead_ctx
*ctx
;
1353 ctx
= crypto_aead_ctx(&tfm
->base
);
1356 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1358 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1360 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1362 return atomic_read(&ctx
->refcnt
) - 1;
1364 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1366 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1368 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1370 if (atomic_dec_and_test(&ctx
->refcnt
))
1371 crypto_free_aead(&tfm
->base
);
1373 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1375 static int __init
cryptd_init(void)
1379 err
= cryptd_init_queue(&queue
, cryptd_max_cpu_qlen
);
1383 err
= crypto_register_template(&cryptd_tmpl
);
1385 cryptd_fini_queue(&queue
);
1390 static void __exit
cryptd_exit(void)
1392 cryptd_fini_queue(&queue
);
1393 crypto_unregister_template(&cryptd_tmpl
);
1396 subsys_initcall(cryptd_init
);
1397 module_exit(cryptd_exit
);
1399 MODULE_LICENSE("GPL");
1400 MODULE_DESCRIPTION("Software async crypto daemon");
1401 MODULE_ALIAS_CRYPTO("cryptd");